def __init__(self, fixed_nucl_mc=None, matrix_element=None, cluster_elements=[], num_matrix_atoms_surface=1, traj_file="full_system_insertia.traj", traj_file_clst="clusters_inertial.traj", output_every=10, formula="I1/I3"): if matrix_element in cluster_elements: raise ValueError("InertiaCrdInitializer works only when " "the matrix element is not present in the " "clustering element!") allowed_types = ["I1/I3", "2*I1/(I2+I3)", "(I1+I2)/(2*I3)"] if formula not in allowed_types: raise ValueError("formula has to be one of {}" "".format(allowed_types)) self.formula = formula self.matrix_element = matrix_element self.cluster_elements = cluster_elements self.fixed_nucl_mc = fixed_nucl_mc self.num_matrix_atoms_surface = num_matrix_atoms_surface self.output_every = output_every size = num_processors() rank = mpi_rank() if size > 1: # Rename the trajectory file writer one for each process fname_base = traj_file.rpartition(".")[0] traj_file = fname_base + str(rank) + ".traj" fname_base = traj_file_clst.rpartition(".")[0] traj_file_clst = fname_base + str(rank) + ".traj" self.traj_file = traj_file self.traj_file_clst = traj_file_clst
def _backup(self, data, dsetname="data"): """ Stores backup data to hdf5 file :param data: Dictionary of data to be backed up :param dsetname: Basename for all datasets in the h5 file """ rank = mpi_rank() if rank == 0: with h5.File(self._backupfile, 'a') as hfile: grp = hfile.create_group( dsetname + "{}".format( self._current_backup_indx)) for key, value in data.items(): if value is None: continue if key == "images": for img_num, img in enumerate(value): if img is None: continue #img = img.T dset = grp.create_dataset( "img_{}".format(img_num), data=img) dset.attrs['CLASS'] = "IMAGE" dset.attrs['IMAGE_VERSION'] = '1.2' dset.attrs['IMAGE_SUBCLASS'] = 'IMAGE_INDEXED' dset.attrs['IMAGE_MINMAXRANGE'] = np.array( [0, 255], dtype=np.uint8) else: grp.create_dataset(key, data=value) self._current_backup_indx += 1 mpi_barrier()
def _include_rank_in_filename(self, fname): """Include the current rank in the filename if nessecary.""" size = num_processors() if size > 1: # We have to include the rank in the filename to avoid problems rank = mpi_rank() prefix = fname.rpartition(".")[0] return prefix + "_rank{}.pkl".format(rank) return fname
def _log(self, msg, mode="info"): """ Print message for logging """ rank = mpi_rank() if rank == 0: if mode == "info": self._logger.info(msg) elif mode == "warning": self._logger.warning(msg)
def __call__(self, system_changes): """Write a copy of the Monte Carlo object to file.""" self.mc_obj.save(self.backup_file) if self.db_name != "": import dataset thermo = self.mc_obj.get_thermodynamic() rank = mpi_rank() db = dataset.connect("sqlite:///{}".format(self.db_name)) tab = db[self.db_tab_name] if self.db_id is None: # This shoud be a new entry self.db_id = tab.insert(thermo) else: # Entry alread exists. Update that one. thermo["id"] = self.db_id tab.update(thermo, ["id"])
def __call__(self, system_changes): """Check the system is in a valid state after the changes. :param list system_changes: Proposed changes :return: True/False, if True the system is still within the bounds :rtype: bool """ new_val = self.get_new_value(system_changes) ok = (new_val >= self.range[0] and new_val < self.range[1]) if not ok and self.verbose: # The evaluation of this constraint can be time consuming # so let the user know at regular intervals rank = mpi_rank() if time.time() - self.last_print > 10: print("Move violates constraint on rank {}".format(rank)) self.last_print = time.time() return ok
def predict_composition( comp, temperatures, target_temp, target_comp): """ Performs a prediction of the next composition value based on history :param comp: History of compositions :param temperatures: History of temperatures :param target_temp: Temperature where the composition should be predicted :param target_temp: Computed composition """ # With this backend one does not need a screen (useful for clusters) has_matplotlib = True try: from matplotlib import pyplot as plt plt.switch_backend("Agg") except ImportError as exc: has_matplotlib = False print(str(exc)) print("Waring! Cannot produce convregence plots without matplotlib!") if len(comp) == 0: return target_comp, None elif len(comp) <= 2: return comp[-1], None elif len(comp) == 3: k = 2 else: k = 3 temp_lin_space = np.arange(0, len(temperatures))[::-1] weights = np.exp(-2.0 * temp_lin_space / len(temperatures)) # Ad hoc smoothing parameter # This choice leads to the deviation from the last point being # maximum 0.05 smoothing = 0.05 * np.sum(weights) # Weight the data sich that the last point is more important than # the first. # Impact of the first point is exp(-2) relative to the impact of the # last point sign = 1 if temperatures[1] < temperatures[0]: # We have to revert the arrays temperatures = temperatures[::-1] comp = comp[::-1] weights = weights[::-1] sign = -1 spl = UnivariateSpline(temperatures, comp, k=k, w=weights, s=smoothing) predicted_comp = spl(target_temp) rgbimage = np.zeros(1) rank = mpi_rank() if rank == 0 and has_matplotlib: # Create a plot of how the spline performs fig = plt.figure() axis = fig.add_subplot(1, 1, 1) axis.plot( temperatures, comp, marker='^', color="black", label="History") temp_lin_space = np.linspace(np.min(temperatures), target_temp + sign*40, 100) pred = spl(temp_lin_space) axis.plot(temp_lin_space, pred, "--", label="Spline") axis.plot([target_temp], [target_comp], 'x', label="Computed") axis.set_ylabel("Singlets") axis.set_xlabel("Temperature (K)") axis.legend() rgbimage = fig2rgb(fig) plt.close("all") rgbimage = mpi_bcast(rgbimage, root=0) return predicted_comp, rgbimage