def init_task_list(self): ''' init task list [A1, A2, A3, ... An] A1 x A1 A2 x A2 ... An x An ''' if self.mode_list is None: self.mode_list = self.params['mode_list'] with h5.File(self.input_files[0], 'r') as f: map_tmp = al.make_vect(al.load_h5(f, 'clean_map')) self.map_info = map_tmp.info task_list = [] for ii in range(self.input_files_num): input_file_name_ii = self.input_files[ii].split('/')[-1] input_file_name_ii = input_file_name_ii.replace('.h5', '') input_file_name_jj = input_file_name_ii if self.params['svd_key'] is not None: input_file_name_ii = self.params['svd_key'][0] input_file_name_jj = self.params['svd_key'][1] tind_l = (ii, ) tind_r = (ii, ) tind_o = [input_file_name_ii, input_file_name_jj] task_list.append([tind_l, tind_r, tind_o]) for kk in self.mode_list: self.create_dataset(ii, 'cleaned_%02dmode/' % kk + input_file_name_ii, dset_shp=map_tmp.shape, dset_info=map_tmp.info) if input_file_name_jj != input_file_name_ii: self.create_dataset(ii, 'cleaned_%02dmode/' % kk + input_file_name_jj, dset_shp=map_tmp.shape, dset_info=map_tmp.info) #print 'cleaned_%02dmode/Combined'%kk self.create_dataset(ii, 'cleaned_%02dmode/Combined' % kk, dset_shp=map_tmp.shape, dset_info=map_tmp.info) self.create_dataset(ii, 'weight', dset_shp=map_tmp.shape, dset_info=map_tmp.info) self.create_dataset(ii, 'mask', dset_shp=map_tmp.shape[:1]) self.df_out[ii]['mode_list'] = self.mode_list self.task_list = task_list self.dset_shp = map_tmp.shape
def theory_power_spectrum(map_tmp, bin_centers, unitless=True, cross=False): r"""simple caller to output a power spectrum""" with h5.File(map_tmp) as hf: zspace_cube = algebra.make_vect(algebra.load_h5(hf, 'clean_map')) simobj = Corr21cm.like_kiyo_map(zspace_cube) pwrspec_input = simobj.get_pwrspec(bin_centers, cross) if unitless: pwrspec_input *= bin_centers**3. / 2. / np.pi / np.pi return pwrspec_input
def load_transfer_func(self, transfer_func_path): if transfer_func_path is not None: with h5.File(transfer_func_path, 'r') as f: tf = al.make_vec(al.load_h5(f, 'ps3d')) tf[tf==0] = np.inf tf = 1./tf else: tf = None return tf
def load_2dtr_from3d(ps_path, ps_name, ps_ref, kbin_x_edges=None, kbin_y_edges=None): with h5.File(ps_path + ps_ref, 'r') as f: ps3d_ref = al.make_vect(al.load_h5(f, 'ps3d')) with h5.File(ps_path + ps_name, 'r') as f: ps3d = al.make_vect(al.load_h5(f, 'ps3d')) #ps3d[np.abs(ps3d)<1.e-20] = np.inf #ps3d = ps3d * ps3d_ref.copy() if kbin_x_edges is None: x = f['kbin_x_edges'][:] else: x = kbin_x_edges if kbin_y_edges is None: y = f['kbin_y_edges'][:] else: y = kbin_y_edges # find the k_perp by not including k_nu in the distance k_perp = binning.radius_array(ps3d, zero_axes=[0]) # find the k_perp by not including k_RA,Dec in the distance k_para = binning.radius_array(ps3d, zero_axes=[1, 2]) ps2d = binning.bin_an_array_2d(ps3d, k_perp, k_para, x, y)[1] ps2d_ref = binning.bin_an_array_2d(ps3d_ref, k_perp, k_para, x, y)[1] ps2d_ref[ps2d_ref==0] = np.inf ps2d /= ps2d_ref ps2d = ps2d ** 0.5 ps2d[ps2d==0] = np.inf #ps2d = np.ma.masked_equal(ps2d, 0) ps2d = 1./ps2d return ps2d, x, y
def init_task_list(self): with h5.File(self.input_files[0], 'r') as f: map_tmp = al.make_vect(al.load_h5(f, 'delta')) self.map_info = map_tmp.info xps_num = self.input_files_num task_list = [] for ii in range(xps_num): tind_l = (ii, ) tind_r = (ii,) tind_o = (ii,) task_list.append([tind_l, tind_r, tind_o]) self.task_list = task_list self.dset_shp = (xps_num, )
def make_optical_sim(self): if self.params['selection'] is None: print 'optical sim need selection function, pass' return else: with h5py.File(self.params['selection'], 'r') as f: _sel = al.load_h5(f, 'separable') _axis_names = _sel.info['axes'] _sel = al.make_vect(_sel, axis_names=_axis_names) _sel_ra = _sel.get_axis('ra') _sel_dec = _sel.get_axis('dec') _sel = np.ma.masked_equal(_sel, 0) # the simulated cube may have different shape than the # original shape of selection function, we 2d interpolate # to the correct shape _sel_mean = np.ma.mean(_sel, axis=0) _sel_freq = np.ma.sum(_sel, axis=(1, 2)) _sel_freq /= np.ma.sum(_sel_freq) #_cut = np.percentile(_sel_mean[~_sel_mean.mask], 60) #_sel_mean[_sel_mean>_cut] = _cut _sel_2dintp = interp2d(_sel_dec, _sel_ra, _sel_mean, bounds_error=False, fill_value=0) _ra = self.map_tmp.get_axis('ra') _dec = self.map_tmp.get_axis('dec') _ra_s = np.argsort(_ra) _sel = _sel_2dintp(_dec, _ra[_ra_s])[_ra_s, ...] #_sel = _sel * al.ones_like(self.map_tmp) _sel = _sel * _sel_freq[:, None, None] if not hasattr(self, 'sim_map_delta'): self.make_delta_sim() poisson_vect = np.vectorize(np.random.poisson) mean_num_gal = (self.sim_map_delta + 1.) * _sel self.sim_map_optsim = poisson_vect(mean_num_gal) self.sim_map_optsim = mean_num_gal _sel[_sel == 0] = np.inf self.sim_map_optsim = self.sim_map_optsim / _sel - 1. _sel[_sel == np.inf] = 0. self.sel = _sel
def read_input(self): for input_file in self.input_files: print input_file self.open(input_file) self.map_tmp = al.make_vect(al.load_h5(self.df_in[0], 'dirty_map')) self.map_shp = self.map_tmp.shape for output_file in self.output_files: output_file = output_path(output_file, relative=not output_file.startswith('/')) self.allocate_output(output_file, 'w') self.create_dataset_like(-1, 'clean_map', self.map_tmp) self.create_dataset_like(-1, 'noise_diag', self.map_tmp) return 1
def init_task_list(self): with h5.File(self.input_files[0], 'r') as f: map_tmp = al.make_vect(al.load_h5(f, 'clean_map')) ant_n, pol_n = map_tmp.shape[:2] self.map_info = map_tmp.info task_list = [] for ii in range(self.input_files_num): for jj in range(ant_n): for kk in range(pol_n): tind_l = (ii, jj, kk) tind_r = tind_l tind_o = tind_l task_list.append([tind_l, tind_r, tind_o]) self.task_list = task_list self.dset_shp = (self.input_files_num, ant_n, pol_n)
def init_task_list(self): with h5.File(self.input_files[0], 'r') as f: map_tmp = al.make_vect(al.load_h5(f, self.params['map_key'][0])) ant_n, pol_n = map_tmp.shape[:2] self.map_info = map_tmp.info xps_num = self.input_files_num / 2 task_list = [] for ii in range(xps_num): #for jj in range(ant_n): # for kk in range(pol_n): tind_l = (ii, ) tind_r = (ii + xps_num, ) tind_o = tind_l task_list.append([tind_l, tind_r, tind_o]) self.task_list = task_list self.dset_shp = (xps_num, )
def read_input(self): for input_file in self.input_files: if mpiutil.rank0: logger.info('%s' % input_file) self.open(input_file) map_tmp = al.load_h5(self.df_in[0], 'dirty_map') self.map_tmp = al.make_vect(map_tmp, axis_names=map_tmp.info['axes']) self.map_shp = self.map_tmp.shape for output_file in self.output_files: output_file = output_path(output_file, relative=not output_file.startswith('/')) self.allocate_output(output_file, 'w') self.create_dataset_like(-1, 'clean_map', self.map_tmp) self.create_dataset_like(-1, 'noise_diag', self.map_tmp) self.create_dataset_like(-1, 'dirty_map', self.map_tmp) return 1
def load_maps(dm_path, dm_file, name='clean_map'): with h5.File(dm_path + dm_file, 'r') as f: print f.keys() imap = al.load_h5(f, name) imap = al.make_vect(imap, axis_names=imap.info['axes']) #imap = al.make_vect(al.load_h5(f, name)) freq = imap.get_axis('freq') #print freq[1] - freq[0] #print freq[0], freq[-1] ra = imap.get_axis('ra') dec = imap.get_axis('dec') ra_edges = imap.get_axis_edges('ra') dec_edges = imap.get_axis_edges('dec') #print imap.get_axis('freq') try: mask = f['mask'][:] except KeyError: mask = None return imap, ra, dec, ra_edges, dec_edges, freq, mask
def show_map(map_path, map_type, indx=(), figsize=(10, 4), xlim=None, ylim=None, logscale=False, vmin=None, vmax=None, sigma=2., inv=False, mK=True, title='', c_label=None, factorize=False, nvss_path=None, smoothing=False, opt=False, print_info=False, submean=False): ext = os.path.splitext(map_path)[-1] if ext == '.h5': with h5.File(map_path, 'r') as f: keys = tuple(f.keys()) imap = al.load_h5(f, map_type) if print_info: logger.info(('%s ' * len(keys)) % keys) print imap.info try: mask = f['mask'][:].astype('bool') except KeyError: mask = None elif ext == '.npy': imap = al.load(map_path) mask = None else: raise IOError('%s not exists' % map_path) imap = al.make_vect(imap, axis_names=imap.info['axes']) freq = imap.get_axis('freq') ra = imap.get_axis('ra') dec = imap.get_axis('dec') ra_edges = imap.get_axis_edges('ra') dec_edges = imap.get_axis_edges('dec') if map_type == 'noise_diag' and factorize: imap = fgrm.make_noise_factorizable(imap) #imap[np.abs(imap) < imap.max() * 1.e-4] = 0. imap = np.ma.masked_equal(imap, 0) imap = np.ma.masked_invalid(imap) if mask is not None: imap[mask] = np.ma.masked imap = imap[indx] freq = freq[indx[-1]] if isinstance(indx[-1], slice): freq = (freq[0], freq[-1]) #print imap.shape imap = np.ma.mean(imap, axis=0) else: freq = (freq, ) if not opt: if mK: if map_type == 'noise_diag': imap = imap * 1.e6 unit = r'$[\rm mK]^2$' else: imap = imap * 1.e3 unit = r'$[\rm mK]$' else: if map_type == 'noise_diag': unit = r'$[\rm K]^2$' else: unit = r'$[\rm K]$' else: unit = r'$\delta N$' if c_label is None: c_label = unit if inv: imap[imap == 0] = np.inf imap = 1. / imap if xlim is None: xlim = [ra_edges.min(), ra_edges.max()] if ylim is None: ylim = [dec_edges.min(), dec_edges.max()] #imap -= np.ma.mean(imap) if smoothing: _sig = 3. / (8. * np.log(2.))**0.5 / 1. imap = gf(imap, _sig) if submean: imap -= np.ma.mean(imap) if logscale: imap = np.ma.masked_less(imap, 0) if vmin is None: vmin = np.ma.min(imap) if vmax is None: vmax = np.ma.max(imap) norm = mpl.colors.LogNorm(vmin=vmin, vmax=vmax) else: if sigma is not None: if vmin is None: vmin = np.ma.mean(imap) - sigma * np.ma.std(imap) if vmax is None: vmax = np.ma.mean(imap) + sigma * np.ma.std(imap) else: if vmin is None: vmin = np.ma.min(imap) if vmax is None: vmax = np.ma.max(imap) #if vmax is None: vmax = np.ma.median(imap) norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax) fig = plt.figure(figsize=figsize) l = 0.08 * 10. / figsize[0] b = 0.08 * 4. / figsize[1] w = 1 - 0.20 * 10. / figsize[0] h = 1 - 0.10 * 4. / figsize[1] ax = fig.add_axes([l, b, w, h]) l = 1 - 0.11 * 10. / figsize[0] b = 0.20 * 4 / figsize[1] w = 1 - 0.10 * 10 / figsize[0] - l h = 1 - 0.34 * 4 / figsize[1] cax = fig.add_axes([l, b, w, h]) ax.set_aspect('equal') #imap = np.sum(imap, axis=1) #imap = np.array(imap) cm = ax.pcolormesh(ra_edges, dec_edges, imap.T, norm=norm) if len(freq) == 1: ax.set_title(title + r'${\rm Frequency}\, %7.3f\,{\rm MHz}$' % freq) else: ax.set_title( title + r'${\rm Frequency}\, %7.3f\,{\rm MHz}$ - $%7.3f\,{\rm MHz}$' % freq) ax.set_xlim(xlim) ax.set_ylim(ylim) ax.set_xlabel(r'${\rm RA}\,[^\circ]$') ax.set_ylabel(r'${\rm Dec}\,[^\circ]$') nvss_range = [ [ra_edges.min(), ra_edges.max(), dec_edges.min(), dec_edges.max()], ] if nvss_path is not None: nvss_cat = plot_waterfall.get_nvss_radec(nvss_path, nvss_range) nvss_sel = nvss_cat['FLUX_20_CM'] > 10. nvss_ra = nvss_cat['RA'][nvss_sel] nvss_dec = nvss_cat['DEC'][nvss_sel] ax.plot(nvss_ra, nvss_dec, 'ko', mec='k', mfc='none', ms=8, mew=1.5) _sel = nvss_cat['FLUX_20_CM'] > 100. _id = nvss_cat['NAME'][_sel] _ra = nvss_cat['RA'][_sel] _dec = nvss_cat['DEC'][_sel] _flx = nvss_cat['FLUX_20_CM'][_sel] for i in range(np.sum(_sel)): ra_idx = np.digitize(_ra[i], ra_edges) - 1 dec_idx = np.digitize(_dec[i], dec_edges) - 1 ax.plot(ra[ra_idx], dec[dec_idx], 'wx', ms=10, mew=2) _c = SkyCoord(_ra[i] * u.deg, _dec[i] * u.deg) print '%s [RA,Dec]:'%_id[i] \ + '[%7.4fd'%_c.ra.deg \ + '(%dh%dm%6.4f) '%_c.ra.hms\ + ': %7.4fd], FLUX %7.4f Jy'%(_c.dec.deg, _flx[i]/1000.) if not logscale: ticks = list(np.linspace(vmin, vmax, 5)) ticks_label = [] for x in ticks: ticks_label.append(r"$%5.2f$" % x) fig.colorbar(cm, ax=ax, cax=cax, ticks=ticks) cax.set_yticklabels(ticks_label) else: fig.colorbar(cm, ax=ax, cax=cax) cax.minorticks_off() if c_label is None: c_label = r'$T\,$' + unit cax.set_ylabel(c_label) return xlim, ylim, (vmin, vmax), fig
def setup(self): self.refinement = self.params['refinement'] self.scenario = self.params['scenario'] map_pad = self.params['map_pad'] if self.params['map_tmp'] is None: freq = self.params['freq'] * 1.e6 freq_d = freq[1] - freq[0] freq_n = freq.shape[0] freq_c = freq[freq_n // 2] field_centre = self.params['field_centre'] spacing = self.params['pixel_spacing'] dec_spacing = spacing ra_spacing = -spacing / np.cos(field_centre[1] * np.pi / 180.) axis_names = ['freq', 'ra', 'dec'] map_shp = [x + map_pad for x in self.params['map_shape']] map_tmp = np.zeros([ freq_n, ] + map_shp) map_tmp = al.make_vect(map_tmp, axis_names=axis_names) map_tmp.set_axis_info('freq', freq_c, freq_d) map_tmp.set_axis_info('ra', field_centre[0], ra_spacing) map_tmp.set_axis_info('dec', field_centre[1], dec_spacing) self.map_tmp = map_tmp else: pad_shp = ((0, 0), (map_pad, map_pad), (map_pad, map_pad)) with h5py.File(self.params['map_tmp'], 'r') as f: _map_tmp = al.load_h5(f, self.params['map_tmp_key']) _axis_names = _map_tmp.info['axes'] _info = _map_tmp.info _map_tmp = np.pad(_map_tmp, pad_shp, 'constant') _map_tmp = al.make_vect(_map_tmp, axis_names=_axis_names) _map_tmp.info.update(_info) _weight = al.load_h5(f, self.params['map_tmp_weight']) _weight = np.pad(_weight, pad_shp, 'constant') _weight = al.make_vect(_weight, axis_names=_axis_names) #self.map_tmp = al.zeros_like(_map_tmp) self.map_tmp = _map_tmp self.weight = _weight # here we use 300 h km/s from WiggleZ for streaming dispersion self.streaming_dispersion = 300. * 0.72 self.map_pad = map_pad #self.beam_data = np.array([1., 1., 1.]) #self.beam_freq = np.array([900, 1100, 1400]) #* 1.e6 if self.params['beam_file'] is not None: _bd = np.loadtxt(self.params['beam_file']) self.beam_freq = _bd[:, 0] * 1.e6 self.beam_data = _bd[:, 1] else: fwhm1400 = 0.9 self.beam_freq = np.linspace(800., 1600., 500).astype('float') self.beam_data = 1.2 * fwhm1400 * 1400. / self.beam_freq self.beam_freq *= 1.e6 random.seed(3936650408) seeds = random.random_integers(100000000, 1000000000, mpiutil.size) self.seed = seeds[mpiutil.rank] print "RANK: %02d with random seed [%d]" % (mpiutil.rank, self.seed) random.seed(self.seed) self.outfiles = self.params['outfiles'] self.outfiles_split = self.params['outfiles_split'] self.open_outputfiles() self.iter_list = mpiutil.mpirange(self.params['mock_n']) self.iter = 0 self.iter_num = len(self.iter_list)
def process(self, input): task_list = self.task_list for task_ind in mpiutil.mpirange(len(task_list)): tind_l, tind_r, tind_o = task_list[task_ind] tind_l = tuple(tind_l) tind_r = tuple(tind_r) tind_o = tind_o print ("RANK %03d fgrm.\n(" + "%03d,"*len(tind_l) + ") x ("\ + "%03d,"*len(tind_r) + ")\n")%((mpiutil.rank, ) + tind_l + tind_r) tind_list = [tind_l, tind_r] maps = [] weights = [] freq_good = np.ones(self.dset_shp[0]).astype('bool') if len(self.params['freq_mask']) != 0: freq_good[self.params['freq_mask']] = False for i in range(2): tind = tind_list[i] map_key = self.params['map_key'] #'clean_map' input_map = al.load_h5(input[tind[0]], map_key) input_map = al.make_vect(input_map, axis_names=['freq', 'ra', 'dec']) maps.append(input_map) weight_key = self.params['weight_key'] #'noise_diag' if weight_key is not None: weight = al.load_h5(input[tind[0]], weight_key) if weight_key is 'noise_diag': weight_prior = self.params['weight_prior'] logger.info('using wp %e' % weight_prior) weight = make_noise_factorizable(weight, weight_prior) else: weight = np.ones_like(input_map) weight[input_map == 0] = 0. weight = al.make_vect(weight, axis_names=['freq', 'ra', 'dec']) weight.info = input_map.info try: freq_good *= ~(input[tind[0]]['mask'][:]).astype('bool') except KeyError: logger.info('mask doesn\' exist') pass weights.append(weight) maps[0][~freq_good] = 0. maps[1][~freq_good] = 0. weights[0][~freq_good] = 0. weights[1][~freq_good] = 0. if self.params['conv_factor'] != 0: maps, weights = degrade_resolution( maps, weights, conv_factor=self.params['conv_factor'], mode='constant', beam_file=self.params['beam_file'], fwhm1400=self.params['fwhm1400']) else: logger.info('common reso. conv. ignored') if self.params['add_map'] is not None: _maps = self.params['add_map'] _map_A_path, _map_A_name = os.path.split( os.path.splitext(_maps[0])[0]) _map_B_path, _map_B_name = os.path.split( os.path.splitext(_maps[1])[0]) logger.info('add real map pair (%s %s)' % (_map_A_name, _map_B_name)) with h5.File(os.path.join(_map_A_path, _map_A_name + '.h5'), 'r') as f: maps[0][:] += al.load_h5(f, 'cleaned_00mode/%s' % _map_B_name) with h5.File(os.path.join(_map_B_path, _map_B_name + '.h5'), 'r') as f: maps[1][:] += al.load_h5(f, 'cleaned_00mode/%s' % _map_A_name) svd_info = self.svd_info if svd_info is None: freq_cov, counts = find_modes.freq_covariance( maps[0], maps[1], weights[0], weights[1], freq_good, freq_good) svd_info = find_modes.get_freq_svd_modes( freq_cov, np.sum(freq_good)) mode_list = self.mode_list mode_list_ed = copy.deepcopy(mode_list) mode_list_st = copy.deepcopy(mode_list) mode_list_st[1:] = mode_list_st[:-1] dset_key = tind_o[0] + '_sigvalu' self.df_out[tind_l[0]][dset_key] = svd_info[0] dset_key = tind_o[0] + '_sigvect' self.df_out[tind_l[0]][dset_key] = svd_info[1] self.df_out[tind_l[0]]['weight'][:] = weights[0] self.df_out[tind_l[0]]['mask'][:] = (~freq_good).astype('int') if tind_o[1] != tind_o[0]: dset_key = tind_o[1] + '_sigvalu' self.df_out[tind_r[0]][dset_key] = svd_info[0] dset_key = tind_o[1] + '_sigvect' self.df_out[tind_r[0]][dset_key] = svd_info[2] self.df_out[tind_r[0]]['weight'][:] = weights[1] self.df_out[tind_r[0]]['mask'][:] = (~freq_good).astype('int') for (n_modes_st, n_modes_ed) in zip(mode_list_st, mode_list_ed): svd_modes = svd_info[1][n_modes_st:n_modes_ed] group_name = 'cleaned_%02dmode/' % n_modes_ed maps[0], amp = find_modes.subtract_frequency_modes( maps[0], svd_modes, weights[0], freq_good) dset_key = group_name + tind_o[0] self.df_out[tind_l[0]][dset_key][:] = copy.deepcopy(maps[0]) if tind_o[0] != tind_o[1]: svd_modes = svd_info[2][n_modes_st:n_modes_ed] maps[1], amp = find_modes.subtract_frequency_modes( maps[1], svd_modes, weights[1], freq_good) dset_key = group_name + tind_o[1] self.df_out[tind_r[0]][dset_key][:] = copy.deepcopy( maps[1]) # for the case of auto with different svd svd modes if 'Combined' in self.df_out[tind_r[0]][group_name].keys(): dset_key = group_name + 'Combined' _map = maps[0].copy() * weights[0].copy()\ + maps[1].copy() * weights[1].copy() _wet = weights[0].copy() + weights[1].copy() _wet[_wet == 0] = np.inf _map /= _wet self.df_out[tind_r[0]][dset_key][:] = copy.deepcopy(_map) if self.params['output_combined'] is not None: self.combine_results() for ii in range(self.input_files_num): input[ii].close()
def setup(self): ant_file = self.params['ant_file'] #ant_dat = np.genfromtxt(ant_file, # dtype=[('name', 'S4'), ('X', 'f8'), ('Y', 'f8'), ('Z', 'f8')]) ant_dat = pd.read_fwf(ant_file, header=None, names=['name', 'X', 'Y', 'Z', 'px', 'py']) self.ants = np.array(ant_dat['name'], dtype='str') ants_pos = [ np.array(ant_dat['X'])[:, None], np.array(ant_dat['Y'])[:, None], np.array(ant_dat['Z'])[:, None] ] self.ants_pos = np.concatenate(ants_pos, axis=1) freq = self.params['freq'] dfreq = freq[1] - freq[0] freq_n = freq.shape[0] self.SM = globals()[self.params['survey_mode']]( self.params['schedule_file']) #self.SM.generate_altaz(startalt, startaz, starttime, obs_len, obs_speed, obs_int) self.SM.generate_altaz() self.SM.radec_list([ant_dat['px'], ant_dat['py']]) #starttime = Time(self.params['starttime']) #startalt, startaz = self.params['startpointing'] #startalt *= u.deg #startaz *= u.deg #obs_speed = self.params['obs_speed'] obs_int = self.SM.obs_int #self.params['obs_int'] self.obs_int = obs_int samplerate = ((1. / obs_int).to(u.Hz)).value #obs_tot = self.SM.obs_tot # self.params['obs_tot'] #obs_len = int((obs_tot / obs_int).decompose().value) #self.block_time = self.SM.sche['block_time'] #self.params['block_time'] self.block_time = np.array(self.SM.sche['block_time']) #self.block_len = int((block_time / obs_int).decompose().value) block_num = self.block_time.shape[0] _obs_int = (obs_int.to(u.second)).value self._RMS = self.params['T_rec'] / np.sqrt(_obs_int * dfreq * 1.e6) if self.params['fg_syn_model'] is not None: self.syn_model = hp.read_map(self.params['fg_syn_model'], range(freq.shape[0])) self.syn_model = self.syn_model.T if self.params['HI_model'] is not None: with h5py.File(self.params['HI_model'], 'r') as fhi: #_HI_model = al.make_vect( _HI_model = al.load_h5(fhi, self.params['HI_model_type']) logger.info('HI bias %3.2f' % self.params['HI_bias']) _HI_model *= self.params['HI_bias'] if self.params['HI_mock_ids'] is not None: self.HI_mock_ids = list(self.params['HI_mock_ids']) _HI_model = _HI_model[self.HI_mock_ids] else: self.HI_mock_ids = range(_HI_model.shape[0]) self.mock_n = _HI_model.shape[0] self.HI_model = al.make_vect(_HI_model) else: self.mock_n = self.params['mock_n'] if self.params['fnoise']: self.FN = fnoise.FNoise(dtime=obs_int.value, dfreq=dfreq, alpha=self.params['alpha'], f0=self.params['f0'], beta=self.params['beta']) self.get_blorder() #self.iter_list = mpiutil.mpirange(0, obs_len, self.block_len) self.iter_list = mpiutil.mpirange(0, block_num) self.iter = 0 self.iter_num = len(self.iter_list)