def test_historyHDF5_from_archive(): sheet = Sheet("3", *three_faces_sheet()) history = HistoryHdf5(sheet, hf5file="test.hf5") history.record() history.record() history.record() retrieved = HistoryHdf5.from_archive("test.hf5") try: assert isinstance(retrieved.sheet, type(sheet)) finally: os.remove("test.hf5")
def test_historyHDF5_path_warning(): sheet = Sheet("3", *three_faces_sheet()) with pytest.warns(UserWarning): history = HistoryHdf5(sheet) history.record(time_stamp=0) with pytest.warns(UserWarning): history = HistoryHdf5(sheet, hf5file="out.hf5") history.record(time_stamp=0) for p in Path(".").glob("out*.hf5"): p.unlink()
def test_historyHDF5_path_warning(): """ """ sheet = Sheet("3", *three_faces_sheet()) with pytest.warns(UserWarning): history = HistoryHdf5(sheet, extra_cols={"edge": ["dx"]}) history.record(time_stamp=0) with pytest.warns(UserWarning): history = HistoryHdf5(sheet, extra_cols={"edge": ["dx"]}, hf5file="out.hf5") history.record(time_stamp=0) os.remove("out.hf5") os.remove("out0.hf5")
def test_historyHDF5_itemsize(): sheet = Sheet("3", *three_faces_sheet()) sheet.vert_df["segment"] = "apical" history = HistoryHdf5( sheet, hf5file="out.hf5", ) for element in sheet.datasets: assert sheet.datasets[element].shape[0] == history.datasets[ element].shape[0] sheet.vert_df.loc[0, "segment"] = "" history.record(time_stamp=1) sheet.vert_df.loc[0, "segment"] = "lateral" history.record(time_stamp=2) sheet.face_df.loc[0, "area"] = 12.0 history.record(time_stamp=3, sheet=sheet) sheet1_ = history.retrieve(1) assert sheet1_.vert_df.loc[0, "segment"] == "" sheet2_ = history.retrieve(2) assert sheet2_.vert_df.loc[0, "segment"] == "lateral" sheet3_ = history.retrieve(3) assert sheet3_.face_df.loc[0, "area"] == 12.0 for p in Path(".").glob("out*.hf5"): p.unlink()
def test_historyHDF5_save_other_sheet(): sheet = Sheet("3", *three_faces_sheet()) history = HistoryHdf5( sheet, save_only={ "edge": ["dx"], "face": ["area"], "vert": ["segment"] }, hf5file="out.hf5", ) for element in sheet.datasets: assert sheet.datasets[element].shape[0] == history.datasets[ element].shape[0] sheet.face_df.loc[0, "area"] = 1.0 history.record(time_stamp=1) sheet.face_df.loc[0, "area"] = 12.0 history.record(time_stamp=2, sheet=sheet) sheet1_ = history.retrieve(1) assert sheet1_.face_df.loc[0, "area"] == 1.0 sheet2_ = history.retrieve(2) assert sheet2_.face_df.loc[0, "area"] == 12.0 for p in Path(".").glob("out*.hf5"): p.unlink()
def test_historyHDF5_itemsize(): sheet = Sheet("3", *three_faces_sheet()) sheet.vert_df["segment"] = "apical" history = HistoryHdf5(sheet, extra_cols={ "edge": ["dx"], "face": ["area"], "vert": ["segment"] }) for element in sheet.datasets: assert sheet.datasets[element].shape[0] == history.datasets[ element].shape[0] sheet.vert_df.loc[0, "segment"] = "" history.record(time_stamp=1) sheet.vert_df.loc[0, "segment"] = "lateral" history.record(time_stamp=2) sheet.face_df.loc[0, "area"] = 12.0 history.record(time_stamp=3, sheet=sheet) sheet1_ = history.retrieve(1) assert sheet1_.vert_df.loc[0, "segment"] == "" sheet2_ = history.retrieve(2) assert sheet2_.vert_df.loc[0, "segment"] == "lateral" sheet3_ = history.retrieve(3) assert sheet3_.face_df.loc[0, "area"] == 12.0 os.remove("out.hf5")
def test_historyHDF5_save_every(): sheet = Sheet("3", *three_faces_sheet()) history = HistoryHdf5(sheet, extra_cols={"edge": ["dx"]}, save_every=2, dt=1) for element in sheet.datasets: assert sheet.datasets[element].shape[0] == history.datasets[ element].shape[0] for i in range(6): history.record(time_stamp=i) sheet_ = history.retrieve(0) for elem, dset in sheet_.datasets.items(): assert dset.shape[0] == sheet.datasets[elem].shape[0] assert dset.time.unique()[0] == 0 sheet_ = history.retrieve(1) for elem, dset in sheet_.datasets.items(): assert dset.shape[0] == sheet.datasets[elem].shape[0] assert dset.time.unique()[0] == 0 sheet_ = history.retrieve(2) for elem, dset in sheet_.datasets.items(): assert dset.shape[0] == sheet.datasets[elem].shape[0] assert dset.time.unique()[0] == 2 os.remove("out.hf5")
def test_historyHDF5_save_every(): sheet = Sheet("3", *three_faces_sheet()) history = HistoryHdf5( sheet, save_every=2, dt=1, hf5file="out.hf5", ) for element in sheet.datasets: assert sheet.datasets[element].shape[0] == history.datasets[ element].shape[0] for i in range(6): history.record(time_stamp=i) sheet_ = history.retrieve(0) for elem, dset in sheet_.datasets.items(): assert dset.shape[0] == sheet.datasets[elem].shape[0] assert dset.time.unique()[0] == 0 sheet_ = history.retrieve(1) for elem, dset in sheet_.datasets.items(): assert dset.shape[0] == sheet.datasets[elem].shape[0] assert dset.time.unique()[0] == 0 sheet_ = history.retrieve(2) for elem, dset in sheet_.datasets.items(): assert dset.shape[0] == sheet.datasets[elem].shape[0] assert dset.time.unique()[0] == 2 for p in Path(".").glob("out*.hf5"): p.unlink()
def test_overwrite_tim_hdf5e(): sheet = Sheet("3", *three_faces_sheet()) history = HistoryHdf5(sheet, hf5file="out.hf5") history.record(time_stamp=1) history.record(time_stamp=1) sheet_ = history.retrieve(1) os.remove("out.hf5") assert sheet_.Nv == sheet.Nv
def test_unsaved_col(): sheet = Sheet("3", *three_faces_sheet()) history = HistoryHdf5( sheet, hf5file="test.hf5", ) history.record() history.record() sheet.face_df["new_col"] = 0 with pytest.warns(UserWarning): history.record() os.remove("test.hf5")
def test_change_col_types(): sheet = Sheet("3", *three_faces_sheet()) history = HistoryHdf5( sheet, hf5file="test.hf5", ) history.record() history.record() sheet.face_df["z"] = "abc" with pytest.raises(ValueError): history.record() os.remove("test.hf5")
def test_to_and_from_archive(): sheet = Sheet("3", *three_faces_sheet()) history = History(sheet) history.record() history.record() history.record() history.to_archive("test.hf5") history_h = HistoryHdf5.from_archive("test.hf5") sheet_ = history_h.retrieve(2) try: assert sheet_.Nv == sheet.Nv finally: os.remove("test.hf5")
def test_unsaved_col(): sheet = Sheet("3", *three_faces_sheet()) history = HistoryHdf5( sheet, extra_cols={ "face": sheet.face_df.columns, "edge": list(sheet.edge_df.columns), "vert": list(sheet.vert_df.columns), }, hf5file="test.hf5", ) history.record() history.record() sheet.face_df["new_col"] = 0 with pytest.warns(UserWarning): history.record() os.remove("test.hf5")
def test_change_col_types(): sheet = Sheet("3", *three_faces_sheet()) history = HistoryHdf5( sheet, extra_cols={ "face": sheet.face_df.columns, "edge": list(sheet.edge_df.columns), "vert": list(sheet.vert_df.columns), }, hf5file="test.hf5", ) history.record() history.record() sheet.face_df["z"] = "abc" with pytest.raises(ValueError): history.record() os.remove("test.hf5")
def test_historyHDF5_save_other_sheet(): sheet = Sheet("3", *three_faces_sheet()) history = HistoryHdf5(sheet, extra_cols={"edge": ["dx"], "face": ["area"]}) for element in sheet.datasets: assert sheet.datasets[element].shape[0] == history.datasets[ element].shape[0] sheet.face_df.loc[0, "area"] = 1.0 history.record(time_stamp=1) sheet.face_df.loc[0, "area"] = 12.0 history.record(time_stamp=2, sheet=sheet) sheet1_ = history.retrieve(1) assert sheet1_.face_df.loc[0, "area"] == 1.0 sheet2_ = history.retrieve(2) assert sheet2_.face_df.loc[0, "area"] == 12.0 os.remove("out.hf5")
def test_historyHDF5_retrieve(): sheet = Sheet("3", *three_faces_sheet()) history = HistoryHdf5(sheet, extra_cols={"edge": ["dx"]}) for element in sheet.datasets: assert sheet.datasets[element].shape[0] == history.datasets[ element].shape[0] history.record(time_stamp=0) history.record(time_stamp=1) sheet_ = history.retrieve(0) for elem, dset in sheet_.datasets.items(): assert dset.shape[0] == sheet.datasets[elem].shape[0] assert dset.time.unique()[0] == 0 sheet_ = history.retrieve(1) for elem, dset in sheet_.datasets.items(): assert dset.shape[0] == sheet.datasets[elem].shape[0] assert dset.time.unique()[0] == 1 os.remove("out.hf5")
def test_historyHDF5_retrieve(): sheet = Sheet("3", *three_faces_sheet()) history = HistoryHdf5(sheet, hf5file="out.hf5") for element in sheet.datasets: assert sheet.datasets[element].shape[0] == history.datasets[ element].shape[0] history.record(time_stamp=0) history.record(time_stamp=1) sheet_ = history.retrieve(0) for elem, dset in sheet_.datasets.items(): assert dset.shape[0] == sheet.datasets[elem].shape[0] assert dset.time.unique()[0] == 0 sheet_ = history.retrieve(1) for elem, dset in sheet_.datasets.items(): assert dset.shape[0] == sheet.datasets[elem].shape[0] assert dset.time.unique()[0] == 1 for p in Path(".").glob("out*.hf5"): p.unlink()
def run_sim( sim_save_dir, _sheet, polarity, perturbation=-1, stop=150., iteration=0, ): # Define solver solver = QSSolver(with_t1=False, with_t3=False, with_collisions=False) filename = '{}_polarity{}_perturbation.hf5'.format(polarity, perturbation) try: os.mkdir(sim_save_dir) except IOError: pass # without copy, dataframe is on read only... sheet = _sheet.copy() sheet.face_df['is_mesoderm'] = 0 if perturbation != -1: for p in perturbation: sheet.face_df.loc[int(p), 'is_mesoderm'] = 1 define_polarity(sheet, 1, polarity) geom.normalize_weights(sheet) # Add some information to the sheet sheet.face_df['id'] = sheet.face_df.index.values # Initiate history history = HistoryHdf5(sheet, extra_cols={ "face": sheet.face_df.columns, "edge": list(sheet.edge_df.columns), "vert": list(sheet.vert_df.columns) }, hf5file=os.path.join(sim_save_dir, filename)) # Initiate manager manager = EventManager('face') # save settings pd.Series(sheet.settings).to_csv( os.path.join(sim_save_dir, (filename[:-4] + '_settings.csv'))) manager.append(reconnect, **sheet.settings['rosette_kwargs']) manager.append(apoptosis_patterning, **sheet.settings['apopto_pattern_kwargs']) t = 0. stop = 150. # Run simulation while t < stop: if t == 5: for i in sheet.face_df[sheet.face_df.is_mesoderm == 1].index: delamination_kwargs = sheet.settings[ 'delaminate_setting'].copy() delamination_kwargs.update({ "face_id": i, }) manager.append(delamination, **delamination_kwargs) # Reset radial tension at each time step sheet.vert_df.radial_tension = 0. manager.execute(sheet) res = solver.find_energy_min(sheet, geom, model, options={"gtol": 1e-8}) if res.success is False: raise ('Stop because solver didn' 't succeed at time t ' + str(t), res) # add noise on vertex position to avoid local minimal. sheet.vert_df[['x', 'y']] += np.random.normal(scale=1e-3, size=(sheet.Nv, 2)) geom.update_all(sheet) history.record(time_stamp=float(t)) manager.update() t += 1. return sheet
def run_sim(sim_save_dir, original_tissue, polarity, perturbation, ve, iteration=0): time.sleep(np.random.rand()) # without copy, dataframe is on read only... sheet = original_tissue.copy() sheet.settings['lumen_prefered_vol'] = ve if perturbation != -1: for p in perturbation: sheet.face_df.loc[int(p), 'is_mesoderm'] = 1 define_polarity_old(sheet, 1, polarity) geom.normalize_weights(sheet) res = solver.find_energy_min(sheet, geom, model, options={"gtol": 1e-8}) filename = '{}_polarity_{}_perturbation_{}_ve_{}'.format( polarity, perturbation, ve, iteration) dirname = os.path.join(sim_save_dir, filename) print('starting {}'.format(dirname)) try: os.mkdir(dirname) except IOError: pass # Add some information to the sheet and copy initial sheet sheet.face_df['id'] = sheet.face_df.index.values # Initiate history history = HistoryHdf5(sheet, extra_cols={ "face": sheet.face_df.columns, "edge": list(sheet.edge_df.columns), "vert": list(sheet.vert_df.columns) }, hf5file=os.path.join(dirname, filename + '.hf5')) # Initiate manager manager = EventManager('face') # Update kwargs... sheet.settings['apoptosis'].update({ 'contract_rate': 1.08, 'radial_tension': 50, }) # save settings pd.Series(sheet.settings).to_csv(os.path.join(dirname, 'settings.csv')) manager.append(reconnect, **sheet.settings['rosette_kwargs']) manager.append(apoptosis_patterning, **sheet.settings['apopto_pattern_kwargs']) t = 0. stop = 150. # Run simulation while t < stop: if t == 5: for i in sheet.face_df[sheet.face_df.is_mesoderm == 1].index: delamination_kwargs = sheet.settings[ 'delaminate_setting'].copy() delamination_kwargs.update({ "face_id": i, #"radial_tension": radial_tension, "radial_tension": 50, "contract_rate": 1.08, "max_traction": 90, "current_traction": 0, }) manager.append(delamination, **delamination_kwargs) # Reset radial tension at each time step sheet.vert_df.radial_tension = 0. manager.execute(sheet) res = solver.find_energy_min(sheet, geom, model, options={"gtol": 1e-8}) # add noise on vertex position to avoid local minimal. sheet.vert_df[['x', 'y']] += np.random.normal(scale=1e-3, size=(sheet.Nv, 2)) geom.update_all(sheet) # Save result in each time step. """figname = os.path.join( dirname, 'invagination_{:04.0f}.png'.format(t)) hdfname = figname[:-3] + 'hf5' hdf5.save_datasets(hdfname, sheet) """ history.record(time_stamp=float(t)) manager.update() t += 1. print('{} done'.format(dirname)) print('~~~~~~~~~~~~~~~~~~~~~\n')