def arrange_analysis_data(self, n_bias_step, n_ion_step, analysis_mode): data = self.read_data(bias_step=n_bias_step, option='Analysis') parsize = data['domain_parsize'] parpos = data['domain_parpos'] domain_rank = data['domain_rank'] kpt_rank = data['kpt_rank'] kpt_size = data['kpt_size'] transmission_dimension = data['transmission_dimension'] dos_dimension = data['dos_dimension'] assert kpt_rank == self.kpt_comm.rank assert domain_rank == self.domain_comm.rank #collect transmission, dos, current global_data = {} flag = 'K_' + str(kpt_rank) + 'D_' + str(domain_rank) + '_' global_data[flag + 'parpos'] = data['domain_parpos'] global_data[flag + 'tc'] = data['tc'] global_data[flag + 'dos'] = data['dos'] global_data = gather_ndarray_dict(global_data, self.kpt_comm) global_data[flag + 'vt'] = data['vt'] global_data[flag + 'vtx'] = data['vtx'] global_data[flag + 'vty'] = data['vty'] global_data[flag + 'nt'] = data['nt'] global_data[flag + 'ntx'] = data['ntx'] global_data[flag + 'nty'] = data['nty'] global_data = gather_ndarray_dict(global_data, self.domain_comm) global_data['lead_fermi'] = data['lead_fermi'] global_data['bias'] = data['bias'] global_data['gate'] = data['gate'] global_data['charge'] = data['charge'] global_data['magmom'] = data['magmom'] global_data['local_magmom'] = data['local_magmom'] global_data['newform'] = True global_data['domain_parsize'] = data['domain_parsize'] global_data['kpt_size'] = data['kpt_size'] global_data['transmission_dimension'] = data['transmission_dimension'] global_data['dos_dimension'] = data['dos_dimension'] world.barrier() if world.rank == 0: if analysis_mode: filename = '/abias_step_' + str(n_bias_step) else: filename = '/bias_step_' + str(n_bias_step) fd = file('analysis_data/ionic_step_' + str(n_ion_step) + filename, 'wb') cPickle.dump(global_data, fd, 2) fd.close() for root, dirs, files in os.walk('temperary_data'): for name in files: if 'AD' in name: os.remove(os.path.join(root, name))
def joint(self, zones): my_zones = np.array_split(zones, self.comm.size)[self.comm.rank] my_info_dict = {} num = 0 for zone in my_zones: if zone in [3, 4, 5, 6]: pass else: path_index = zone - 1 order = 10 ** self.maxdepth base = zone * order nid = base + 7 link_nid = nid + order - 6 link_path_index = link_nid // (10 ** self.maxdepth) - 1 flag = str(self.comm.rank) + '_' + str(num) my_info_dict[flag] = np.array([nid, link_nid, path_index, link_path_index], int) num += 1 info_dict = gather_ndarray_dict(my_info_dict, self.comm, broadcast=True) for name in info_dict: nid, link_nid, path_index, link_path_index = info_dict[name] rank = self.get_rank(link_path_index, link_nid) if self.comm.rank == rank: link_path = self.paths[link_path_index] index = link_path.nids.index(link_nid) function = link_path.functions[index] se = link_path.ses[index] energy = link_path.energies[index] self.paths[path_index].add_node(nid, energy, function, se)
def joint(self, zones): my_zones = np.array_split(zones, self.comm.size)[self.comm.rank] my_info_dict = {} num = 0 for zone in my_zones: if zone in [3, 4, 5, 6]: pass else: path_index = zone - 1 order = 10**self.maxdepth base = zone * order nid = base + 7 link_nid = nid + order - 6 link_path_index = link_nid // (10**self.maxdepth) - 1 flag = str(self.comm.rank) + '_' + str(num) my_info_dict[flag] = np.array( [nid, link_nid, path_index, link_path_index], int) num += 1 info_dict = gather_ndarray_dict(my_info_dict, self.comm, broadcast=True) for name in info_dict: nid, link_nid, path_index, link_path_index = info_dict[name] rank = self.get_rank(link_path_index, link_nid) if self.comm.rank == rank: link_path = self.paths[link_path_index] index = link_path.nids.index(link_nid) function = link_path.functions[index] se = link_path.ses[index] energy = link_path.energies[index] self.paths[path_index].add_node(nid, energy, function, se)
def collect_contour(self, tp): my_eq_contour = {} my_ne_contour = {} my_loc_contour = {} num = 0 for s in range(tp.my_nspins): for q in range(tp.my_npk): flag = str(tp.my_ks_map[num, 0]) + str(tp.my_ks_map[num, 1]) my_eq_contour[flag] = np.array(tp.eqpathinfo[s][q].energy) my_ne_contour[flag] = np.array(tp.nepathinfo[s][q].energy) if not tp.ground: my_loc_contour[flag] = np.array(tp.locpathinfo[s][q].energy) eq_contour = gather_ndarray_dict(my_eq_contour, tp.wfs.kpt_comm) ne_contour = gather_ndarray_dict(my_ne_contour, tp.wfs.kpt_comm) if not tp.ground: loc_contour = gather_ndarray_dict(my_loc_contour, tp.wfs.kpt_comm) else: loc_contour = None contour = {'eq': eq_contour, 'ne': ne_contour, 'loc': loc_contour} return contour
def collect_contour(self, tp): my_eq_contour = {} my_ne_contour = {} my_loc_contour = {} num = 0 for s in range(tp.my_nspins): for q in range(tp.my_npk): flag = str(tp.my_ks_map[num, 0]) + str(tp.my_ks_map[num, 1]) my_eq_contour[flag] = np.array(tp.eqpathinfo[s][q].energy) my_ne_contour[flag] = np.array(tp.nepathinfo[s][q].energy) if not tp.ground: my_loc_contour[flag] = np.array( tp.locpathinfo[s][q].energy) eq_contour = gather_ndarray_dict(my_eq_contour, tp.wfs.kpt_comm) ne_contour = gather_ndarray_dict(my_ne_contour, tp.wfs.kpt_comm) if not tp.ground: loc_contour = gather_ndarray_dict(my_loc_contour, tp.wfs.kpt_comm) else: loc_contour = None contour = {'eq': eq_contour, 'ne': ne_contour, 'loc': loc_contour} return contour
def transfer(self, zones, depth): my_zones = np.array_split(zones, self.comm.size)[self.comm.rank] my_info_dict = {} num = 0 maximum = 100000 name_flags = [] for zone in my_zones: path_index = zone // (10**depth) - 1 order = 10**(self.maxdepth - depth) base = zone * order node_index = zone % 10 nid = zone * order + 1 link_nid = (zone - node_index) * order + 2 * node_index - 1 flag = str((self.comm.rank + 1) * maximum + num) my_info_dict[flag] = np.array([nid, link_nid, path_index], int) num += 1 nid = zone * order + 7 link_nid = (zone - node_index) * order + 2 * node_index + 1 flag = str((self.comm.rank + 1) * maximum + num) my_info_dict[flag] = np.array([nid, link_nid, path_index], int) num += 1 info_dict = gather_ndarray_dict(my_info_dict, self.comm, broadcast=True) for name in info_dict: name_flags.append(eval(name)) name_flags = np.sort(name_flags) #sort is necessrary because sometime the dict sequency is different for #different processors!! for name_flag in name_flags: nid, link_nid, path_index = info_dict[str(name_flag)] rank = self.get_rank(path_index, link_nid) if self.comm.rank == rank: path = self.paths[path_index] index = path.nids.index(link_nid) function = path.functions[index] energy = path.energies[index] se = path.ses[index] self.paths[path_index].add_node(nid, energy, function, se)
def transfer(self, zones, depth): my_zones = np.array_split(zones, self.comm.size)[self.comm.rank] my_info_dict = {} num = 0 maximum = 100000 name_flags = [] for zone in my_zones: path_index = zone // (10 ** depth) - 1 order = 10 ** (self.maxdepth - depth) base = zone * order node_index = zone % 10 nid = zone * order + 1 link_nid = (zone - node_index) * order + 2 * node_index - 1 flag = str((self.comm.rank + 1) * maximum + num) my_info_dict[flag] = np.array([nid, link_nid, path_index], int) num += 1 nid = zone * order + 7 link_nid = (zone - node_index) * order + 2 * node_index + 1 flag = str((self.comm.rank + 1) * maximum + num) my_info_dict[flag] = np.array([nid, link_nid, path_index], int) num += 1 info_dict = gather_ndarray_dict(my_info_dict, self.comm, broadcast=True) for name in info_dict: name_flags.append(eval(name)) name_flags = np.sort(name_flags) #sort is necessrary because sometime the dict sequency is different for #different processors!! for name_flag in name_flags: nid, link_nid, path_index = info_dict[str(name_flag)] rank = self.get_rank(path_index, link_nid) if self.comm.rank == rank: path = self.paths[path_index] index = path.nids.index(link_nid) function = path.functions[index] energy = path.energies[index] se = path.ses[index] self.paths[path_index].add_node(nid, energy, function, se)
def save_bias_step(self, tp): if not self.overhead_data_saved: self.save_overhead_data(tp) self.overhead_data_saved = True ks_map = tp.my_ks_map if 'tc' in tp.analysis_data_list: tc, dos = self.collect_transmission_and_dos(tp) if not tp.non_sc: current = self.calculate_current(tp, tc) else: current = np.array(0) flag = 'ER_' + str(tp.contour.comm.rank) if tp.wfs.kpt_comm.rank == 0: self.data[flag + '_tc'] = tc self.data[flag + '_dos'] = dos if tp.contour.comm.rank == 0: self.data['current'] = current nt, vt, ntx, vtx, nty, vty = self.abstract_d_and_v(tp) if world.rank == 0: for name in ['nt', 'vt', 'ntx', 'vtx', 'nty', 'vty']: self.data[name] = eval(name) if tp.non_sc or tp.analysis_mode: force = None contour = None else: if not tp.use_qzk_boundary and not tp.multi_leads: force = tp.calculate_force() * Hartree / Bohr else: force = tp.extended_calc.get_forces( tp.extended_atoms)[:len(tp.atoms)] tp.F_av = None contour = self.collect_contour(tp) charge = self.collect_charge(tp) magmom = tp.occupations.magmom local_magmom = tp.get_magnetic_moments() if world.rank == 0: lead_fermi = np.array(tp.lead_fermi) lead_pairs = np.array(self.lead_pairs) bias = np.array(tp.bias) gate = np.array(tp.gate) for name in [ 'lead_fermi', 'lead_pairs', 'bias', 'gate', 'charge', 'magmom', 'local_magmom' ]: self.data[name] = eval(name) # do not include contour now because it is a dict, not a array able to # collect, but will do it at last self.data = gather_ndarray_dict(self.data, tp.contour.comm) self.data['contour'] = contour self.data['force'] = force if tp.non_sc: self.data['total_energy'] = tp.guess_total_energy for condition, obj, name in tp.special_datas: if eval(condition): self.data[name] = eval(obj) if world.rank == 0: if tp.analysis_mode: filename = '/abias_step_' + str(self.n_bias_step) else: filename = '/bias_step_' + str(self.n_bias_step) fd = file( 'analysis_data/ionic_step_' + str(self.n_ion_step) + filename, 'wb') cPickle.dump(self.data, fd, 2) fd.close() self.data = {} self.n_ele_step = 0 self.n_bias_step += 1
def save_bias_step(self, tp): if not self.overhead_data_saved: self.save_overhead_data(tp) self.overhead_data_saved = True ks_map = tp.my_ks_map if 'tc' in tp.analysis_data_list: tc, dos = self.collect_transmission_and_dos(tp) if not tp.non_sc: current = self.calculate_current(tp, tc) else: current = np.array(0) flag = 'ER_' + str(tp.contour.comm.rank) if tp.wfs.kpt_comm.rank == 0: self.data[flag + '_tc'] = tc self.data[flag + '_dos'] = dos if tp.contour.comm.rank == 0: self.data['current'] = current nt, vt, ntx, vtx, nty, vty = self.abstract_d_and_v(tp) if world.rank == 0 : for name in ['nt', 'vt', 'ntx', 'vtx', 'nty', 'vty']: self.data[name] = eval(name) if tp.non_sc or tp.analysis_mode: force = None contour = None else: if not tp.use_qzk_boundary and not tp.multi_leads: force = tp.calculate_force() * Hartree / Bohr else: force = tp.extended_calc.get_forces(tp.extended_atoms )[:len(tp.atoms)] tp.F_av = None contour = self.collect_contour(tp) charge = self.collect_charge(tp) magmom = tp.occupations.magmom local_magmom = tp.get_magnetic_moments() if world.rank == 0: lead_fermi = np.array(tp.lead_fermi) lead_pairs = np.array(self.lead_pairs) bias = np.array(tp.bias) gate = np.array(tp.gate) for name in ['lead_fermi', 'lead_pairs', 'bias', 'gate', 'charge', 'magmom', 'local_magmom']: self.data[name] = eval(name) # do not include contour now because it is a dict, not a array able to # collect, but will do it at last self.data = gather_ndarray_dict(self.data, tp.contour.comm) self.data['contour'] = contour self.data['force'] = force if tp.non_sc: self.data['total_energy'] = tp.guess_total_energy for condition, obj, name in tp.special_datas: if eval(condition): self.data[name] = eval(obj) if world.rank == 0: if tp.analysis_mode: filename = '/abias_step_' + str(self.n_bias_step) else: filename = '/bias_step_' + str(self.n_bias_step) fd = file('analysis_data/ionic_step_' + str(self.n_ion_step) + filename, 'wb') cPickle.dump(self.data, fd, 2) fd.close() self.data = {} self.n_ele_step = 0 self.n_bias_step += 1