def do_shift(resids, source_path, resid_thresh=1e-13): global n_sites, m_rad, n_ms, n_states results_dict = dict() with h5py.File(source_path, 'r', libver='latest') as f: print(os.path.basename(source_path)) n_sites = f.attrs['n_sites'] m_rad = f.attrs['floquet_m_radius'] n_ms = 2*m_rad + 1 n_states = n_sites * n_ms state_matrix = f['floquet_state'] energy_matrix = f['floquet_energy'] all_drive_freqs = f['critical_drive'][:] n_sets = all_drive_freqs.size//2 #print(np.arange(60,n_sets)) #for set_idx in np.arange(60,n_sets): for set_idx in progbar(np.arange(60,n_sets)): print(set_idx) below_thresh = np.where(resids[set_idx] < resid_thresh)[0] stable_states = state_matrix[set_idx*2, :, below_thresh] shifted_states = np.zeros_like(stable_states) stable_quasi_en = energy_matrix[2*set_idx, below_thresh] this_drive = all_drive_freqs[2*set_idx] half_drive = 0.5*this_drive m, fbz_en = np.divmod(stable_quasi_en + 100, this_drive) #fbz_en = stable_quasi_en m = m.astype(np.int) fbz_en -= 100 shifted_too_far_count = 0 for state_idx in np.arange(len(below_thresh)): this_m = m[state_idx] m_x_n = this_m * n_sites if np.abs(this_m) > 2*m_rad: # No pops can survive if it shifts this far shifted_too_far_count += 1 continue dest_min_idx = 0 if this_m >= 0 else -m_x_n dest_max_idx = n_states if this_m <= 0 else n_states - m_x_n dest_slice = np.s_[dest_min_idx:dest_max_idx, state_idx] src_min_idx = 0 if this_m <= 0 else m_x_n src_max_idx = n_states if this_m >= 0 else n_states + m_x_n src_slice = np.s_[src_min_idx:src_max_idx, state_idx] shifted_states[dest_slice] = stable_states[src_slice] results_dict[set_idx] = {'fbz_en':fbz_en.copy(), 'm':m.copy()*this_drive, 'shifted_state':shifted_states.copy(), 'too_far':shifted_too_far_count, 'below_thresh_idx':below_thresh, 'matching_resids':resids[set_idx, below_thresh] } #del shifted_states, resids return results_dict
this_path = largest_m_path # Check quasienergy vs. residual, make sure we have enough unique states with h5py.File(this_path, 'r', libver="latest") as f: print(os.path.basename(this_path)) states_matrix = f['floquet_state'] m_rad = ((states_matrix.shape[-1] // n_sites) - 1) // 2 n_states = states_matrix.shape[-1] all_raw_residuals = np.empty((n_sets // set_stride, n_states)) resid_pops = np.empty((n_states, n_time_samples)) ## phasor = np.zeros((n_sites, (2 * m_rad + 1) * n_sites), dtype=np.complex) where_mat = np.tile(np.diag(np.full(n_sites, True)), 2 * m_rad + 1) m_block = np.kron(np.arange(-m_rad, m_rad + 1), np.identity(n_sites)).astype(np.complex) for out_idx, this_set in progbar(enumerate(np.arange( 0, n_sets, set_stride)), every=1, size=n_sets // set_stride): these_states = states_matrix[this_set, :] for idx, t in progbar(enumerate(np.linspace(0.0, 1.0, n_time_samples)), size=n_time_samples, every=32): np.exp(-two_pi_i * m_block * t, out=phasor, where=where_mat) now_amps = np.matmul(phasor, these_states) resid_pops[:, idx] = 1.0 - np.sum( np.abs(np.conjugate(now_amps) * now_amps), axis=0) all_raw_residuals[out_idx] = np.sum(np.abs(resid_pops), axis=1) / n_time_samples ## ## #m_block = (np.arange(-m_rad, m_rad+1) * -two_pi_i).reshape(-1,1,1) #site_amps = np.empty((n_sites, n_states), dtype=np.complex)
clear_first_last = True if clear_first_last: last_set_residuals_dict.clear() first_set_residuals_dict.clear() del last_set_residuals_dict, first_set_residuals_dict, residuals # In[42]: with h5py.File(largest_m_path, 'r', libver="latest") as f: gamma_list = f['scan_values'][:] thresh_list = np.power(10., np.arange(-6,-1)) count_below_thresh = np.empty((len(thresh_list), all_raw_residuals.shape[0])) for idx, this_thresh in progbar(enumerate(thresh_list), every=1): count_below_thresh[idx] = np.sum(all_raw_residuals < this_thresh, axis=1) # In[45]: for idx, this_thresh in enumerate(thresh_list): plt.plot(gamma_list[::set_stride], count_below_thresh[idx], '.', label=this_thresh) #plt.ylim(0,2.5*n_sites) plt.axhline( n_sites, c='k', ls=':') plt.axhline(2*n_sites, c='k', ls=':') #plt.legend(thresh_list) plt.legend()
import numpy as np from matplotlib import pyplot as plt import h5py import os import glob from zss_progbar import log_progress as progbar import re import time # In[2]: for t in progbar(np.arange(0)): time.sleep(60) # In[3]: m_re = re.compile(r'.*_m(\d+)_out.h5') def get_m(file_name): return int(m_re.search(file_name).group(1)) base_path = '/home/zachsmith/gitlab/breathing-floquet/notebooks/data/mscan/' data_dirs = [os.path.basename(dirpath[:-1]) for dirpath in glob.glob(base_path + "critical*/")] data_paths = sorted([os.path.join(base_path, this_dir, this_dir + '_out.h5') for this_dir in data_dirs], key=get_m) for idx, path in enumerate(data_paths): print('{}:\t{}'.format(idx, os.path.basename(path))) largest_m_path = data_paths[-1]
with h5py.File(this_path, 'r', libver="latest") as f: print(os.path.basename(this_path)) states_matrix = f['floquet_state'] m_rad = ((states_matrix.shape[-1] // n_sites) - 1) // 2 n_states = states_matrix.shape[-1] all_file_residuals = np.empty((n_sets // set_stride, n_states)) resid_pops = np.empty((n_states, n_time_samples)) phasor = np.zeros((n_sites, (2 * m_rad + 1) * n_sites), dtype=np.complex) where_mat = np.tile(np.diag(np.full(n_sites, True)), 2 * m_rad + 1) m_block = np.kron(np.arange(-m_rad, m_rad + 1), np.diag(np.full(n_sites, -two_pi_i))).astype(np.complex) site_amps = np.empty((n_sites, n_states), dtype=np.complex) #one_site_m_block = (-two_pi_i * np.arange(-m_rad, m_rad+1)).reshape((1,-1,1)) #one_site_phasor = np.empty_like(one_site_m_block) for out_idx, this_set in progbar(enumerate(np.arange( 0, n_sets, set_stride)), every=1, size=n_sets // set_stride): these_states = states_matrix[this_set, :] for idx, t in enumerate(np.linspace(0.0, 1.0, n_time_samples)): np.exp(m_block * t, out=phasor, where=where_mat) #np.exp(one_site_m_block * t, out=one_site_phasor) np.matmul(phasor, these_states, out=site_amps) #%time np.sum(one_site_phasor * these_states.reshape(n_sites, -1, n_states), axis=1, out=site_amps) resid_pops[:, idx] = 1.0 - np.sum( np.real(np.conjugate(site_amps) * site_amps), axis=0) all_file_residuals[out_idx] = np.sort( np.sum(np.abs(resid_pops), axis=1)) / n_time_samples # In[48]: result_file_path = os.path.join(base_path, 'residual_pops.h5')
def find_fbz_qes(resids, source_path, resid_thresh=5e-14, diff_split=1e-6, stride=1, offset=0): global n_sites, m_rad, n_ms, n_states with h5py.File(source_path, 'r', libver='latest') as f: print(os.path.basename(source_path)) n_sites = f.attrs['n_sites'] m_rad = f.attrs['floquet_m_radius'] n_ms = 2 * m_rad + 1 n_states = n_sites * n_ms state_matrix = f['floquet_state'] energy_matrix = f['floquet_energy'] try: all_drive_freqs = f['critical_drive'][:] except KeyError: all_drive_freqs = f['critcal_drive'][:] n_sets = all_drive_freqs[offset::stride].size group_idx = np.full((n_sets, n_states), -1, dtype=np.int8) all_fqe_means = np.empty((n_sets, n_sites)) all_fqe_stdev = np.empty_like(all_fqe_means) all_fqe_count = np.empty_like(all_fqe_means) all_fqe_minmax = np.empty((n_sets, n_sites, 2)) for set_idx in progbar(np.arange(0, n_sets)): below_thresh = np.where(resids[set_idx] < resid_thresh)[0] #stable_states = state_matrix[stride*set_idx+offset, :, below_thresh] stable_quasi_en = energy_matrix[stride * set_idx + offset, below_thresh] this_drive = all_drive_freqs[stride * set_idx + offset] half_drive = 0.5 * this_drive m, fbz_en = np.divmod(stable_quasi_en + half_drive, this_drive) m = m.astype(np.int) fbz_en -= half_drive order = np.argsort(fbz_en) sorted_en = fbz_en[order] split_spots = np.argwhere(np.diff(sorted_en) > diff_split)[:, 0] + 1 group_list = np.split(sorted_en, split_spots) order_lists = np.split(order, split_spots) if set_idx == 0: plt.semilogy(np.diff(sorted_en), '.r') plt.axhline(diff_split, c='grey', ls=':', lw=1) n_deg = 0 for idx in np.arange(min(n_sites, len(order_lists))): try: resorted_idx = np.sort(order_lists[idx]) reorder_idx = np.argsort(order_lists[idx]) out_idx = idx + n_deg if (np.unique(m[order_lists[idx]]).size != order_lists[idx].size): # Assume only two states for now #print("Likely degeneracy") stable_states = state_matrix[ stride * set_idx + offset, :, below_thresh[resorted_idx]] two_sets = check_overlaps(stable_states, m[resorted_idx], n_sites) # First set deg_set = resorted_idx[two_sets] deg_group = group_list[idx][reorder_idx][two_sets] group_idx[set_idx, below_thresh[deg_set]] = out_idx all_fqe_means[set_idx, out_idx] = deg_group.mean() all_fqe_stdev[set_idx, out_idx] = deg_group.std() all_fqe_minmax[set_idx, out_idx, 0] = deg_group.min() all_fqe_minmax[set_idx, out_idx, 1] = deg_group.max() all_fqe_count[set_idx, out_idx] = deg_group.shape[0] # Second set n_deg += 1 out_idx = idx + n_deg deg_set = resorted_idx[np.logical_not(two_sets)] deg_group = group_list[idx][reorder_idx][ np.logical_not(two_sets)] group_idx[set_idx, below_thresh[deg_set]] = out_idx all_fqe_means[set_idx, out_idx] = deg_group.mean() all_fqe_stdev[set_idx, out_idx] = deg_group.std() all_fqe_minmax[set_idx, out_idx, 0] = deg_group.min() all_fqe_minmax[set_idx, out_idx, 1] = deg_group.max() all_fqe_count[set_idx, out_idx] = deg_group.shape[0] else: group_idx[set_idx, below_thresh[order_lists[idx]]] = out_idx all_fqe_means[set_idx, out_idx] = group_list[idx].mean() all_fqe_stdev[set_idx, out_idx] = group_list[idx].std() all_fqe_minmax[set_idx, out_idx, 0] = group_list[idx].min() all_fqe_minmax[set_idx, out_idx, 1] = group_list[idx].max() all_fqe_count[set_idx, out_idx] = group_list[idx].shape[0] except: print(idx, set_idx, out_idx, n_deg) raise print("Found {} states with {} pairs of degenerate states".format( out_idx + 1, n_deg)) plt.semilogy(np.diff(sorted_en), '.k') return all_fqe_means, all_fqe_stdev, all_fqe_minmax, all_fqe_count, group_idx