os.path.join(current_file_path, relative_test_data_dir)) test_data_dir_raw = os.path.join(test_data_dir, "data") test_res_file = "20160805_test001_45_cc_01.res" test_res_file_full = os.path.join(test_data_dir_raw, test_res_file) test_data_dir_out = os.path.join(test_data_dir, "out") test_data_dir_cellpy = os.path.join(test_data_dir, "hdf5") test_cellpy_file = "20160805_test001_45_cc.h5" test_cellpy_file_tmp = "tmpfile.h5" test_cellpy_file_full = os.path.join(test_data_dir_cellpy, test_cellpy_file) test_cellpy_file_tmp_full = os.path.join(test_data_dir_cellpy, test_cellpy_file_tmp) test_run_name = "20160805_test001_45_cc" log.setup_logging(default_level="DEBUG") new_arbin_file = ( r"C:\Scripting\Processing\Celldata\indata\NewArbin\20170907_sic024_01_cc_01.res" ) new_arbin_mass = 0.824098422 def load_it(cellpy_data_instance): # cellpy_data_instance.loadcell(test_res_file_full) raw_file_loader = cellpy_data_instance.loader test = raw_file_loader(test_res_file_full) cellpy_data_instance.datasets.append(test[0]) def append_to_it(cellpy_data_instance):
import pytest import logging from cellpy import log from cellpy.utils import ocv_rlx from . import fdv log.setup_logging(default_level=logging.DEBUG) @pytest.fixture def dataset(): from cellpy import cellreader d = cellreader.CellpyData() d.load(fdv.cellpy_file_path) return d @pytest.mark.parametrize( "variable,value", [ ("r0", 12.15126), ("r1", 15.29991), ("ir", 19.36777), ("c1", 48.06680), ("c0", 7.41526), ("ocv", 0.096818), ], ) def test_ocv_rlx_single(dataset, variable, value): ocv_fit = ocv_rlx.OcvFit()
def __init__(self, *args, **kwargs): """Initialize the Batch class. The initialization accepts arbitrary arguments and keyword arguments. It first looks for the file_name and db_reader keyword arguments. Usage: b = Batch((name, (project)), **kwargs) Examples: >>> b = Batch("experiment001", "main_project") >>> b = Batch("experiment001", "main_project", batch_col="b02") >>> b = Batch(name="experiment001", project="main_project", batch_col="b02") >>> b = Batch(file_name="cellpydata/batchfiles/cellpy_batch_experiment001.json") Keyword Args (priority): file_name (str or pathlib.Path): journal file name to load. db_reader (str): data-base reader to use (defaults to "default" as given in the config-file or prm-class). Args: *args: name (str) (project (str)) Keyword Args (other): default_log_level (str): custom log-level (defaults to None (i.e. default log-level in cellpy)). custom_log_dir (str or pathlib.Path): custom folder for putting the log-files. force_raw_file (bool): load from raw regardless (defaults to False). force_cellpy (bool): load cellpy-files regardless (defaults to False). force_recalc (bool): Always recalculate (defaults to False). export_cycles (bool): Extract and export individual cycles to csv (defaults to True). export_raw (bool): Extract and export raw-data to csv (defaults to True). export_ica (bool): Extract and export individual dQ/dV data to csv (defaults to True). accept_errors (bool): Continue automatically to next file if error is raised (defaults to False). nom_cap (float): give a nominal capacity if you want to use another value than the one given in the config-file or prm-class. """ default_log_level = kwargs.pop("log_level", None) custom_log_dir = kwargs.pop("custom_log_dir", None) if default_log_level is not None or custom_log_dir is not None: log.setup_logging( custom_log_dir=custom_log_dir, default_level=default_log_level, reset_big_log=True, ) db_reader = kwargs.pop("db_reader", "default") file_name = kwargs.pop("file_name", None) logging.debug("creating CyclingExperiment") self.experiment = CyclingExperiment(db_reader=db_reader) logging.info("created CyclingExperiment") self.experiment.force_cellpy = kwargs.pop("force_cellpy", False) self.experiment.force_raw = kwargs.pop("force_raw_file", False) self.experiment.force_recalc = kwargs.pop("force_recalc", False) self.experiment.export_cycles = kwargs.pop("export_cycles", True) self.experiment.export_raw = kwargs.pop("export_raw", True) self.experiment.export_ica = kwargs.pop("export_ica", False) self.experiment.accept_errors = kwargs.pop("accept_errors", False) self.experiment.nom_cap = kwargs.pop("nom_cap", None) if not file_name: if len(args) > 0: self.experiment.journal.name = args[0] if len(args) > 1: self.experiment.journal.project = args[1] for key in kwargs: if key == "name": self.experiment.journal.name = kwargs[key] elif key == "project": self.experiment.journal.project = kwargs[key] elif key == "batch_col": self.experiment.journal.batch_col = kwargs[key] else: self.experiment.journal.from_file(file_name=file_name) self.exporter = CSVExporter() self.exporter._assign_dumper(ram_dumper) self.exporter.assign(self.experiment) self.plotter = CyclingSummaryPlotter() self.plotter.assign(self.experiment) self._journal_name = self.journal_name self.headers_step_table = get_headers_step_table()
def process_batch(*args, **kwargs): """Execute a batch run, either from a given file_name or by giving the name and project as input. Usage: process_batch(file_name | (name, project), **kwargs) Args: *args: file_name or name and project (both string) Optional keyword arguments: backend (str): what backend to use when plotting ('bokeh' or 'matplotlib'). Defaults to 'matplotlib'. dpi (int): resolution used when saving matplotlib plot(s). Defaults to 300 dpi. default_log_level (str): What log-level to use for console output. Chose between 'CRITICAL', 'DEBUG', or 'INFO'. The default is 'CRITICAL' (i.e. usually no log output to console). Returns: cellpy.batch.Batch object """ silent = kwargs.pop("silent", False) backend = kwargs.pop("backend", None) if backend is not None: prms.Batch.backend = backend else: prms.Batch.backend = "matplotlib" dpi = kwargs.pop("dpi", 300) default_log_level = kwargs.pop("default_log_level", "CRITICAL") if len(args) == 1: file_name = args[0] else: file_name = kwargs.pop("file_name", None) log.setup_logging(default_level=default_log_level, reset_big_log=True) logging.debug(f"creating Batch(kwargs: {kwargs})") if file_name is not None: kwargs.pop("db_reader", None) b = Batch(*args, file_name=file_name, db_reader=None, **kwargs) b.create_journal(file_name) else: b = Batch(*args, **kwargs) b.create_journal() steps = { "paginate": (b.paginate, ), "update": (b.update, ), "combine": (b.combine_summaries, ), "plot": (b.plot_summaries, ), "save": (_pb_save_plot, b, dpi), } with tqdm(total=(100 * len(steps) + 20), leave=False, file=sys.stdout) as pbar: pbar.update(10) for description in steps: func, *args = steps[description] pbar.set_description(description) pbar.update(10) try: func(*args) except cellpy.exceptions.NullData as e: if not silent: tqdm.write(f"\nEXCEPTION (NullData): {str(e)}") tqdm.write("...aborting") return else: raise e pbar.set_description(f"final") pbar.update(10) return b
import os import sys import time from pathlib import Path print(f"running {sys.argv[0]}") import cellpy from cellpy import log from cellpy import cellreader from cellpy.parameters import prms prms.Reader.use_cellpy_stat_file = False prms.Reader.cycle_mode = "cathode" prms.Reader.sorted_data = False log.setup_logging(default_level="DEBUG", custom_log_dir=os.getcwd()) datapath = "/Users/jepe/scripting/cellpy/dev_data/bugfixing" filename = Path(datapath) / "20180919_FC_LFP2_cen14_01_cc_01.res" assert os.path.isfile(filename) d = cellreader.CellpyData() d.from_raw(filename) d.set_mass(0.12) d.make_step_table() d.make_summary() # checking extracting cycles n = d.get_number_of_cycles() c = d.get_cycle_numbers()
def test_logger(clean_dir): test_logging_json = os.path.join(fdv.data_dir, "test_logging.json") prms.Paths["filelogdir"] = fdv.log_dir log.setup_logging() tmp_logger = logging.getLogger() assert tmp_logger.level == logging.DEBUG tmp_logger.info("default: testing logger (info)") tmp_logger.debug("default: testing logger (debug)") tmp_logger.error("default: testing logger (error)") for handler in tmp_logger.handlers: if handler.name == "console": assert handler.level == logging.CRITICAL if handler.name == "info_file_handler": assert handler.level == logging.INFO elif handler.name == "error_file_handler": assert handler.level == logging.ERROR elif handler.name == "debug_file_handler": assert handler.level == logging.DEBUG log.setup_logging(default_level="DEBUG") tmp_logger = logging.getLogger() tmp_logger.info("default: testing logger (info)") tmp_logger.debug("default: testing logger (debug)") tmp_logger.error("default: testing logger (error)") for handler in tmp_logger.handlers: if handler.name == "console": assert handler.level == logging.DEBUG if handler.name == "info_file_handler": assert handler.level == logging.INFO elif handler.name == "error_file_handler": assert handler.level == logging.ERROR elif handler.name == "debug_file_handler": assert handler.level == logging.DEBUG log.setup_logging(default_level="INFO") for handler in logging.getLogger().handlers: if handler.name == "console": assert handler.level == logging.INFO if handler.name == "info_file_handler": assert handler.level == logging.INFO elif handler.name == "error_file_handler": assert handler.level == logging.ERROR elif handler.name == "debug_file_handler": assert handler.level == logging.DEBUG log.setup_logging(default_json_path="./a_file_that_does_not_exist.json") assert len(logging.getLogger().handlers) == 4 log.setup_logging(default_json_path=test_logging_json) log.setup_logging(custom_log_dir=clean_dir) tmp_logger = logging.getLogger() tmp_logger.info("customdir, default: testing logger (info)") tmp_logger.debug("customdir, default: testing logger (debug)") tmp_logger.error("customdir, default: testing logger (error)")
from cellpy.utils.batch_tools import ( batch_experiments, batch_exporters, batch_journals, batch_plotters, dumpers, engines, ) from cellpy import log from cellpy import prms from cellpy.utils import batch as batch from . import fdv log.setup_logging(default_level="INFO") @pytest.fixture(scope="module") def clean_dir(): new_path = tempfile.mkdtemp() return new_path @pytest.fixture(scope="module") def batch_instance(clean_dir): prms.Paths["db_filename"] = fdv.db_file_name prms.Paths["cellpydatadir"] = fdv.cellpy_data_dir prms.Paths["outdatadir"] = clean_dir prms.Paths["rawdatadir"] = fdv.raw_data_dir prms.Paths["db_path"] = fdv.db_dir
def check_silicon(): log.setup_logging(default_level=logging.INFO) my_data = cellreader.CellpyData() filename = "../../../testdata/hdf5/20160805_test001_45_cc.h5" assert os.path.isfile(filename) my_data.load(filename) my_data.set_mass(0.1) cha, volt = my_data.get_ccap(2) v, dq = ica.dqdv(volt, cha) # log.setup_logging(default_level=logging.DEBUG) print("* creating a silicon peak ensemble:") silicon = Silicon(shift=-0.1, max_point=dq.max(), sigma_p1=0.06) print(f"hint: {silicon.peaks.param_hints['Si02sigma']}\n" f"val: {silicon.params['Si02sigma']}") print("* syncing hints:") silicon.create_hints_from_parameters() print(f"hint: {silicon.peaks.param_hints['Si02sigma']}\n" f"val: {silicon.params['Si02sigma']}") print("* updating the Si02sigma parameter:") silicon.set_param("Si02sigma", minimum=0.02, vary=False) print(f"hint: {silicon.peaks.param_hints['Si02sigma']}\n" f"val: {silicon.params['Si02sigma']}") print("* reset peaks:") silicon.reset_peaks() print(f"hint: {silicon.peaks.param_hints['Si02sigma']}\n" f"val: {silicon.params['Si02sigma']}") print("WITH AUTO SYNC") print("* creating a silicon peak ensemble:") silicon = Silicon(shift=-0.1, max_point=dq.max(), sigma_p1=0.06, sync_model_hints=True) print(f"hint: {silicon.peaks.param_hints['Si02sigma']}\n" f"val: {silicon.params['Si02sigma']}") print("* syncing hints:") silicon.create_hints_from_parameters() print(f"hint: {silicon.peaks.param_hints['Si02sigma']}\n" f"val: {silicon.params['Si02sigma']}") print("* updating the Si02sigma parameter:") silicon.set_param("Si02sigma", minimum=0.02, vary=False) print(f"hint: {silicon.peaks.param_hints['Si02sigma']}\n" f"val: {silicon.params['Si02sigma']}") print("* reset peaks:") silicon.reset_peaks() print(f"hint: {silicon.peaks.param_hints['Si02sigma']}\n" f"val: {silicon.params['Si02sigma']}") print() print(" Fitting ".center(80, "-")) silicon = Silicon(shift=-0.1, max_point=dq.max(), sigma_p1=0.06) print(silicon) res1 = silicon.fit(-dq, x=v) print(res1.fit_report()) print() print("New meta params") print(silicon) print("Setting crystalline") silicon.crystalline = True
def check_backprop_composite(): print("Checking back prop for composite ensamble") log.setup_logging(default_level=logging.INFO) my_data = cellreader.CellpyData() filename = "../../../testdata/hdf5/20160805_test001_45_cc.h5" assert os.path.isfile(filename) my_data.load(filename) my_data.set_mass(0.1) cha, volt = my_data.get_ccap(2) v, dq = ica.dqdv(volt, cha) # log.setup_logging(default_level=logging.DEBUG) print("* creating a silicon peak ensemble:") si_g_composite = CompositeEnsemble( Silicon(shift=-0.1, max_point=dq.max(), sigma_p1=0.06), Graphite(shift=-0.03)) print(si_g_composite) print("peak values:") print(f"val: {si_g_composite.params['Si01sigma']}") # sigma_p1 print(f"val: {si_g_composite.params['Si01center']}") # center - b print(f"val: {si_g_composite.params['G01center']}") # center - graphite print("\nsetting some new values:") si_g_composite.set_param("Si01center", value=0.18) si_g_composite.set_param("G01center", value=0.14) print(f"val: {si_g_composite.params['Si01sigma']}") print(f"val: {si_g_composite.params['Si01center']}") print(f"val: {si_g_composite.params['G01center']}") # center - graphite print("BACK PROPAGATION") si_g_composite.back_propagation() # select by order si_ensemble = si_g_composite.ensemble[0] g_ensemble = si_g_composite.ensemble[1] # select by name si_ensemble = si_g_composite.selector["Si"] g_ensemble = si_g_composite.selector["G"] si_new_shift = si_ensemble.shift si_new_max_point = si_ensemble.max_point si_new_sigma_p1 = si_ensemble.sigma_p1 g_new_shift = g_ensemble.shift print("- calculated back prop gives the following updated values") print(si_g_composite) print("- setting the values to a new object") another_si_g_composite = CompositeEnsemble( Silicon( shift=si_new_shift, max_point=si_new_max_point, sigma_p1=si_new_sigma_p1, compress=1.0, expand=1.0, ), Graphite(shift=g_new_shift), ) print(another_si_g_composite) print(f"val: {another_si_g_composite.params['Si01sigma']}") print(f"val: {another_si_g_composite.params['Si01center']}") print(f"val: {another_si_g_composite.params['G01center']}") print(another_si_g_composite.prefixes) print("PARAM NAMES") print(another_si_g_composite.param_names) print("NAMES") print(another_si_g_composite.names) print("SELECTED Si") print(another_si_g_composite.selector["Si"])