def _partition_io(self, skip=False, threshold=1.99): """Split IO of this array into local sections under `threshold`. Parameters ---------- skip : bool, optional Don't partition, just find and return a full axis. threshold : float, optional Maximum size of IO (in GB). Returns ------- split_axis : int Which axis are we going to split along. partitions : list of slice objects List of slices. """ from mpi4py import MPI threshold_bytes = threshold * 2**30 largest_size = self.comm.allreduce(self.nbytes, op=MPI.MAX) num_split = int(np.ceil(largest_size / threshold_bytes)) # Return early if we can if skip or num_split == 1: return 0, [slice(0, self.local_shape[0])] if self.ndim == 1: raise RuntimeError( "To parition an array we must have multiple axes.") # Try and find the axis to split over for split_axis in range(self.ndim): if split_axis != self.axis and self.global_shape[ split_axis] >= num_split: break else: raise RuntimeError( "Can't identify an IO partition less than %.2f GB in size: " "shape=%s, distributed axis=%i" % (threshold, self.global_shape, self.axis)) logger.debug("Splitting along axis %i, %i ways", split_axis, num_split) # Figure out the start and end of the splits and return nums, starts, ends = mpiutil.split_m(self.global_shape[split_axis], num_split) slices = [slice(start, end) for start, end in zip(starts, ends)] return split_axis, slices
def mapmake_full(self, nside, mapname, nbin=None, dirty=False, method='svd', normalize=True, threshold=1.0e3): nfreq = self.telescope.nfreq if nbin is not None and (nbin <= 0 or nbin >= nfreq): # invalid nbin nbin = None def _make_alm(mi): print "Making %i" % mi mmode = self.mmode(mi) if dirty: sphmode = self.beamtransfer.project_vector_backward_dirty(mi, mmode, nbin, normalize, threshold) else: if method == 'svd': sphmode = self.beamtransfer.project_vector_telescope_to_sky(mi, mmode, nbin) elif method == 'tk': sphmode = self.beamtransfer.project_vector_telescope_to_sky_tk(mi, mmode, nbin) else: raise ValueError('Unknown map-making method %s' % method) return sphmode alm_list = mpiutil.parallel_map(_make_alm, range(self.telescope.mmax + 1)) if mpiutil.rank0: # get center freq of each bin if nbin is not None: n, s, e = mpiutil.split_m(nfreq, nbin) cfreqs = np.array([ self.beamtransfer.telescope.frequencies[(s[i]+e[i])/2] for i in range(nbin) ]) else: nbin = nfreq cfreqs = self.beamtransfer.telescope.frequencies alm = np.zeros((nbin, self.telescope.num_pol_sky, self.telescope.lmax + 1, self.telescope.lmax + 1), dtype=np.complex128) for mi in range(self.telescope.mmax + 1): alm[..., mi] = alm_list[mi] skymap = hputil.sphtrans_inv_sky(alm, nside) with h5py.File(self.output_directory + '/' + mapname, 'w') as f: f.create_dataset('/map', data=skymap) f.attrs['frequency'] = cfreqs f.attrs['polarization'] = np.array(['I', 'Q', 'U', 'V'])[:self.beamtransfer.telescope.num_pol_sky] mpiutil.barrier()
def process(self, ts): assert isinstance(ts, Timestream), '%s only works for Timestream object' % self.__class__.__name__ bin_number = self.params['bin_number'] ts.redistribute('baseline') nt = len(ts.time) nfreq = len(ts.freq) if bin_number >= nfreq: warnings.warn('The number of bins can not exceed the number of frequencies, do nothing') else: repeat_inds = np.repeat(np.arange(nfreq), bin_number) num, start, end = mpiutil.split_m(nfreq*bin_number, bin_number) freq = np.zeros(bin_number, dtype=ts.freq.dtype) vis = np.zeros((nt, bin_number)+ts.local_vis.shape[2:], dtype=ts.vis.dtype) vis_mask= np.zeros((nt, bin_number)+ts.local_vis.shape[2:], dtype=ts.vis_mask.dtype) # all False # average over frequency for idx in xrange(bin_number): inds, weight = unique(repeat_inds[start[idx]:end[idx]], return_counts=True) # rebin freq freq[idx] = average(ts.freq[inds], axis=0, weights=weight) # rebin vis masked_vis = np.ma.array(ts.local_vis[:, inds], mask=ts.local_vis_mask[:, inds]) vis[:, idx] = average(masked_vis, axis=1, weights=weight) # freq mean # rebin vis_mask valid_cnt = np.sum(np.logical_not(ts.local_vis_mask[:, inds]).astype(np.int16) * weight[:, np.newaxis, np.newaxis].astype(np.int16), axis=1) # use int16 to save memory vis_mask[:, idx] = np.where(valid_cnt==0, True, False) del valid_cnt # create rebinned datasets vis = mpiarray.MPIArray.wrap(vis, axis=3) vis_mask= mpiarray.MPIArray.wrap(vis_mask, axis=3) ts.create_main_data(vis, recreate=True, copy_attrs=True) axis_order = ts.main_axes_ordered_datasets['vis'] ts.create_main_axis_ordered_dataset(axis_order, 'vis_mask', vis_mask, axis_order, recreate=True, copy_attrs=True) ts.create_freq_ordered_dataset('freq', freq, recreate=True, copy_attrs=True, check_align=True) # for other freq_axis datasets for name in ts.freq_ordered_datasets.keys(): if name in ts.iterkeys() and not name in ('freq', 'vis', 'vis_mask'): # exclude already rebinned datasets raise RuntimeError('Should not have other freq_ordered_datasets %s' % name) # update freqstep attr ts.attrs['freqstep'] = nfreq * ts.attrs['freqstep'] / bin_number return super(Rebin, self).process(ts)
def _work_fisher_bias_m(self, mi): """Worker routine for calculating the Fisher and bias for a given m. This method estimates both quantities using Monte-Carlo estimation, and the fact that Cov(q_a, q_b) = F_ab. Parameters ---------- mi : integer m-mode to calculate. Returns ------- fisher : np.ndarray[nbands, nbands] Fisher matrix. bias : np.ndarray[nbands] Bias vector. """ qa = np.zeros((self.nbands + 1, self.nsamples)) # Split calculation into subranges to save on memory usage num, starts, ends = mpiutil.split_m(self.nsamples, (self.nsamples / 1000) + 1) for n, s, e in zip(num, starts, ends): x1 = self.gen_sample(mi, n) x2 = self.gen_sample(mi, n) qa[:, s:e] = self.q_estimator(mi, x1, x2, noise=True) ft = np.cov(qa) fisher = ft[:self.nbands, :self.nbands] bias = ft[-1, :self.nbands] return fisher, bias
def _work_fisher_bias_m(self, mi): """Worker routine for calculating the Fisher and bias for a given m. This method estimates both quantities using Monte-Carlo estimation, and the fact that Cov(q_a, q_b) = F_ab. Parameters ---------- mi : integer m-mode to calculate. Returns ------- fisher : np.ndarray[nbands, nbands] Fisher matrix. bias : np.ndarray[nbands] Bias vector. """ qa = np.zeros((self.nbands, self.nsamples)) # Split calculation into subranges to save on memory usage num, starts, ends = mpiutil.split_m(self.nsamples, (self.nsamples / 1000) + 1) for n, s, e in zip(num, starts, ends): x = self.gen_sample(mi, n) qa[:, s:e] = self.q_estimator(mi, x) ft = np.cov(qa) fisher = np.cov(qa) #ft[:self.nbands, :self.nbands] #bias = ft[-1, :self.nbands] bias = qa.mean(axis=1) #[:self.nbands] return fisher, bias
def _generate_mfiles(self, regen=False): completed_file = self._mdir + 'COMPLETED_BEAM' if os.path.exists(completed_file) and not regen: if mpiutil.rank0: print print '=' * 80 print "******* Beam transfer m-files already generated ********" mpiutil.barrier() return if mpiutil.rank0: print print '=' * 80 print 'Create beam transfer m-files...' st = time.time() # Calculate the Beam Transfer Matrices nfreq = self.telescope.nfreq nbl = self.telescope.nbase npol = self.telescope.num_pol_sky ntheta = self.telescope.theta_size nphi = self.telescope.phi_size # create file to save beam transfer matrices dsize = (nfreq, nbl, npol, ntheta) csize = (nfreq, 1, npol, ntheta) mmax = self.telescope.mmax ms = np.concatenate([np.arange(0, mmax+1), np.arange(-mmax, 0)]) # get local section of m'th for ind, mi in enumerate(mpiutil.mpilist(ms, method='con')): with h5py.File(self._mfile(mi), 'w') as f: f.create_dataset('beam_m', dsize, chunks=csize, compression='lzf', dtype=np.complex128) f.attrs['m'] = mi # calculate the total memory needed for the transfer matrix total_memory = nfreq * nbl * npol * ntheta * nphi * 16.0 # Bytes, 16 for complex128 limit = 1.0 # GB, memory limit for each process # make each process have maximum `limit` GB sigle_memory = limit * 2**30 # Bytes # how many chunks num_chunks = np.int(np.ceil(total_memory / (mpiutil.size * sigle_memory))) # split bls to num_chunks sections if nbl < num_chunks: warnings.warn('Could not split to %d chunks for %d baselines' % (num_chunks, nbl)) num_chunks = min(num_chunks, nbl) num, start, end = mpiutil.split_m(nbl, num_chunks) for ci in range(num_chunks): if mpiutil.rank0: print "Starting chunk %i of %i" % (ci+1, num_chunks) tarray = self.telescope.transfer_matrices(np.arange(start[ci], end[ci]), np.arange(nfreq)) tarray = MPIArray.wrap(tarray, axis=0) # redistribute along different m tarray = tarray.redistribute(axis=3) # save beam transfer matrices to file for ind, mi in enumerate(mpiutil.mpilist(ms, method='con')): with h5py.File(self._mfile(mi), 'r+') as f: f['beam_m'][:, start[ci]:end[ci]] = tarray[..., ind].view(np.ndarray).reshape(nfreq, num[ci], npol, ntheta) mpiutil.barrier() et = time.time() if mpiutil.rank0: # Make file marker that the m's have been correctly generated: open(completed_file, 'a').close() # Print out timing print "=== Create beam transfer m-files took %f s ===" % (et - st)
def mapmake_full(self, nside, mapname, nbin=None, dirty=False, method='svd', normalize=True, threshold=1.0e3): nfreq = self.telescope.nfreq if nbin is not None and (nbin <= 0 or nbin >= nfreq): # invalid nbin nbin = None def _make_alm(mi): print "Making %i" % mi mmode = self.mmode(mi) if dirty: sphmode = self.beamtransfer.project_vector_backward_dirty( mi, mmode, nbin, normalize, threshold) else: if method == 'svd': sphmode = self.beamtransfer.project_vector_telescope_to_sky( mi, mmode, nbin) elif method == 'tk': sphmode = self.beamtransfer.project_vector_telescope_to_sky_tk( mi, mmode, nbin) else: raise ValueError('Unknown map-making method %s' % method) return sphmode alm_list = mpiutil.parallel_map(_make_alm, range(self.telescope.mmax + 1)) if mpiutil.rank0: # get center freq of each bin if nbin is not None: n, s, e = mpiutil.split_m(nfreq, nbin) cfreqs = np.array([ self.beamtransfer.telescope.frequencies[(s[i] + e[i]) / 2] for i in range(nbin) ]) else: nbin = nfreq cfreqs = self.beamtransfer.telescope.frequencies alm = np.zeros((nbin, self.telescope.num_pol_sky, self.telescope.lmax + 1, self.telescope.lmax + 1), dtype=np.complex128) for mi in range(self.telescope.mmax + 1): alm[..., mi] = alm_list[mi] skymap = hputil.sphtrans_inv_sky(alm, nside) with h5py.File(self.output_directory + '/' + mapname, 'w') as f: f.create_dataset('/map', data=skymap) f.attrs['frequency'] = cfreqs f.attrs['polarization'] = np.array( ['I', 'Q', 'U', 'V'])[:self.beamtransfer.telescope.num_pol_sky] mpiutil.barrier()
def process(self, ts): mask_daytime = self.params['mask_daytime'] mask_time_range = self.params['mask_time_range'] tsys = self.params['tsys'] accuracy_boost = self.params['accuracy_boost'] l_boost = self.params['l_boost'] bl_range = self.params['bl_range'] auto_correlations = self.params['auto_correlations'] time_avg = self.params['time_avg'] pol = self.params['pol'] interp = self.params['interp'] beam_dir = output_path(self.params['beam_dir']) use_existed_beam = self.params['use_existed_beam'] gen_inv = self.params['gen_invbeam'] noise_weight = self.params['noise_weight'] ts_dir = output_path(self.params['ts_dir']) ts_name = self.params['ts_name'] no_m_zero = self.params['no_m_zero'] simulate = self.params['simulate'] input_maps = self.params['input_maps'] prior_map = self.params['prior_map'] add_noise = self.params['add_noise'] dirty_map = self.params['dirty_map'] nbin = self.params['nbin'] method = self.params['method'] normalize = self.params['normalize'] threshold = self.params['threshold'] eps = self.params['epsilon'] correct_order = self.params['correct_order'] if use_existed_beam: # load the saved telescope from disk tel = None else: assert isinstance(ts, Timestream), '%s only works for Timestream object' % self.__class__.__name__ ts.redistribute('baseline') lat = ts.attrs['sitelat'] # lon = ts.attrs['sitelon'] lon = 0.0 # lon = np.degrees(ts['ra_dec'][0, 0]) # the first ra local_origin = False freqs = ts.freq[:] # MHz nfreq = freqs.shape[0] band_width = ts.attrs['freqstep'] # MHz try: ndays = ts.attrs['ndays'] except KeyError: ndays = 1 feeds = ts['feedno'][:] bl_order = mpiutil.gather_array(ts.local_bl, axis=0, root=None, comm=ts.comm) bls = [ tuple(bl) for bl in bl_order ] az, alt = ts['az_alt'][0] az = np.degrees(az) alt = np.degrees(alt) pointing = [az, alt, 0.0] feedpos = ts['feedpos'][:] if ts.is_dish: from tlpipe.map.drift.telescope import tl_dish dish_width = ts.attrs['dishdiam'] tel = tl_dish.TlUnpolarisedDishArray(lat, lon, freqs, band_width, tsys, ndays, accuracy_boost, l_boost, bl_range, auto_correlations, local_origin, dish_width, feedpos, pointing) elif ts.is_cylinder: from tlpipe.map.drift.telescope import tl_cylinder # factor = 1.2 # suppose an illumination efficiency, keep same with that in timestream_common factor = 0.79 # for xx # factor = 0.88 # for yy cyl_width = factor * ts.attrs['cywid'] tel = tl_cylinder.TlUnpolarisedCylinder(lat, lon, freqs, band_width, tsys, ndays, accuracy_boost, l_boost, bl_range, auto_correlations, local_origin, cyl_width, feedpos) else: raise RuntimeError('Unknown array type %s' % ts.attrs['telescope']) if not simulate: # select the corresponding vis and vis_mask if pol == 'xx': local_vis = ts.local_vis[:, :, 0, :] local_vis_mask = ts.local_vis_mask[:, :, 0, :] elif pol == 'yy': local_vis = ts.local_vis[:, :, 1, :] local_vis_mask = ts.local_vis_mask[:, :, 1, :] elif pol == 'I': xx_vis = ts.local_vis[:, :, 0, :] xx_vis_mask = ts.local_vis_mask[:, :, 0, :] yy_vis = ts.local_vis[:, :, 1, :] yy_vis_mask = ts.local_vis_mask[:, :, 1, :] local_vis = np.zeros_like(xx_vis) for ti in xrange(local_vis.shape[0]): for fi in xrange(local_vis.shape[1]): for bi in xrange(local_vis.shape[2]): if xx_vis_mask[ti, fi, bi] != yy_vis_mask[ti, fi, bi]: if xx_vis_mask[ti, fi, bi]: local_vis[ti, fi, bi] = yy_vis[ti, fi, bi] else: local_vis[ti, fi, bi] = xx_vis[ti, fi, bi] else: local_vis[ti, fi, bi] = 0.5 * (xx_vis[ti, fi, bi] + yy_vis[ti, fi, bi]) local_vis_mask = xx_vis_mask | yy_vis_mask else: raise ValueError('Invalid pol: %s' % pol) if interp != 'none': for fi in xrange(local_vis.shape[1]): for bi in xrange(local_vis.shape[2]): # interpolate for local_vis true_inds = np.where(local_vis_mask[:, fi, bi])[0] # masked inds if len(true_inds) > 0: false_inds = np.where(~local_vis_mask[:, fi, bi])[0] # un-masked inds if len(false_inds) > 0.1 * local_vis.shape[0]: # nearest interpolate for local_vis if interp in ('linear', 'nearest'): itp_real = interp1d(false_inds, local_vis[false_inds, fi, bi].real, kind=interp, fill_value='extrapolate', assume_sorted=True) itp_imag = interp1d(false_inds, local_vis[false_inds, fi, bi].imag, kind=interp, fill_value='extrapolate', assume_sorted=True) elif interp == 'rbf': itp_real = Rbf(false_inds, local_vis[false_inds, fi, bi].real, smooth=10) itp_imag = Rbf(false_inds, local_vis[false_inds, fi, bi].imag, smooth=10) else: raise ValueError('Unknown interpolation method: %s' % interp) local_vis[true_inds, fi, bi] = itp_real(true_inds) + 1.0J * itp_imag(true_inds) # the interpolated vis else: local_vis[:, fi, bi] = 0 # TODO: may need to take special care # average data nt = ts['sec1970'].shape[0] phi_size = 2*tel.mmax + 1 # phi = np.zeros((phi_size,), dtype=ts['ra_dec'].dtype) phi = np.linspace(0, 2*np.pi, phi_size, endpoint=False) vis = np.zeros((phi_size,)+local_vis.shape[1:], dtype=local_vis.dtype) if time_avg == 'avg': nt_m = float(nt) / phi_size # roll data to have phi=0 near the first roll_len = np.int(np.around(0.5*nt_m)) local_vis[:] = np.roll(local_vis[:], roll_len, axis=0) if interp == 'none': local_vis_mask[:] = np.roll(local_vis_mask[:], roll_len, axis=0) # ts['ra_dec'][:] = np.roll(ts['ra_dec'][:], roll_len, axis=0) repeat_inds = np.repeat(np.arange(nt), phi_size) num, start, end = mpiutil.split_m(nt*phi_size, phi_size) # average over time for idx in xrange(phi_size): inds, weight = unique(repeat_inds[start[idx]:end[idx]], return_counts=True) if interp == 'none': vis[idx] = average(np.ma.array(local_vis[inds], mask=local_vis_mask[inds]), axis=0, weights=weight) # time mean else: vis[idx] = average(local_vis[inds], axis=0, weights=weight) # time mean # phi[idx] = np.average(ts['ra_dec'][:, 0][inds], axis=0, weights=weight) elif time_avg == 'fft': if interp == 'none': raise ValueError('Can not do fft average without first interpolation') Vm = np.fft.fftshift(np.fft.fft(local_vis, axis=0), axes=0) vis[:] = np.fft.ifft(np.fft.ifftshift(Vm[nt/2-tel.mmax:nt/2+tel.mmax+1], axes=0), axis=0) / (1.0 * nt / phi_size) # for fi in xrange(vis.shape[1]): # for bi in xrange(vis.shape[2]): # # plot local_vis and vis # import matplotlib # matplotlib.use('Agg') # import matplotlib.pyplot as plt # phi0 = np.linspace(0, 2*np.pi, nt, endpoint=False) # phi1 = np.linspace(0, 2*np.pi, phi_size, endpoint=False) # plt.figure() # plt.subplot(211) # plt.plot(phi0, local_vis[:, fi, bi].real, label='v0.real') # plt.plot(phi1, vis[:, fi, bi].real, label='v1.real') # plt.legend() # plt.subplot(212) # plt.plot(phi0, local_vis[:, fi, bi].imag, label='v0.imag') # plt.plot(phi1, vis[:, fi, bi].imag, label='v1.imag') # plt.legend() # plt.savefig('vis_fft/vis_%d_%d.png' % (fi, bi)) # plt.close() else: raise ValueError('Unknown time_avg: %s' % time_avg) del local_vis del local_vis_mask # mask daytime data if mask_daytime: day_or_night = np.where(ts['local_hour'][:]>=mask_time_range[0] & ts['local_hour'][:]<=mask_time_range[1], True, False) day_inds = np.where(np.repeat(day_or_night, phi_size).reshape(nt, phi_size).astype(np.int).sum(axis=1).astype(bool))[0] vis[day_inds] = 0 del ts # no longer need ts # redistribute vis to time axis vis = mpiarray.MPIArray.wrap(vis, axis=2).redistribute(0).local_array allpairs = tel.allpairs redundancy = tel.redundancy nrd = len(redundancy) # reorder bls according to allpairs vis_tmp = np.zeros_like(vis) for ind, (a1, a2) in enumerate(allpairs): try: b_ind = bls.index((feeds[a1], feeds[a2])) vis_tmp[:, :, ind] = vis[:, :, b_ind] except ValueError: b_ind = bls.index((feeds[a2], feeds[a1])) vis_tmp[:, :, ind] = vis[:, :, b_ind].conj() del vis # average over redundancy vis_stream = np.zeros(vis_tmp.shape[:-1]+(nrd,), dtype=vis_tmp.dtype) red_bin = np.cumsum(np.insert(redundancy, 0, 0)) # redundancy bin # average over redundancy for ind in xrange(nrd): vis_stream[:, :, ind] = np.sum(vis_tmp[:, :, red_bin[ind]:red_bin[ind+1]], axis=2) / redundancy[ind] del vis_tmp # beamtransfer bt = beamtransfer.BeamTransfer(beam_dir, tel, noise_weight, True) if not use_existed_beam: bt.generate() if tel is None: tel = bt.telescope if simulate: ndays = 733 tstream = timestream.simulate(bt, ts_dir, ts_name, input_maps, ndays, add_noise=add_noise) else: # timestream and map-making tstream = timestream.Timestream(ts_dir, ts_name, bt, no_m_zero) parent_path = os.path.dirname(tstream._fdir(0)) if os.path.exists(parent_path + '/COMPLETED'): if mpiutil.rank0: print 'Use existed timestream_f files in %s' % parent_path else: for fi in mpiutil.mpirange(nfreq): # Make directory if required if not os.path.exists(tstream._fdir(fi)): os.makedirs(tstream._fdir(fi)) # create memh5 object and write data to temporary file vis_h5 = memh5.MemGroup(distributed=True) vis_h5.create_dataset('/timestream', data=mpiarray.MPIArray.wrap(vis_stream, axis=0)) tmp_file = parent_path +'/vis_stream_temp.hdf5' vis_h5.to_hdf5(tmp_file, hints=False) del vis_h5 # re-organize data as need for tstream # make load even among nodes for fi in mpiutil.mpirange(nfreq, method='rand'): # read the needed data from the temporary file with h5py.File(tmp_file, 'r') as f: vis_fi = f['/timestream'][:, fi, :] # Write file contents with h5py.File(tstream._ffile(fi), 'w') as f: # Timestream data # allocate space for vis_stream shp = (nrd, phi_size) f.create_dataset('/timestream', data=vis_fi.T) f.create_dataset('/phi', data=phi) # Telescope layout data f.create_dataset('/feedmap', data=tel.feedmap) f.create_dataset('/feedconj', data=tel.feedconj) f.create_dataset('/feedmask', data=tel.feedmask) f.create_dataset('/uniquepairs', data=tel.uniquepairs) f.create_dataset('/baselines', data=tel.baselines) # Telescope frequencies f.create_dataset('/frequencies', data=freqs) # Write metadata f.attrs['beamtransfer_path'] = os.path.abspath(bt.directory) f.attrs['ntime'] = phi_size mpiutil.barrier() # remove temp file if mpiutil.rank0: os.remove(tmp_file) # mark all frequencies tstream files are saved correctly open(parent_path + '/COMPLETED', 'a').close() tstream.generate_mmodes() nside = hputil.nside_for_lmax(tel.lmax, accuracy_boost=tel.accuracy_boost) if dirty_map: tstream.mapmake_full(nside, 'map_full_dirty.hdf5', nbin, dirty=True, method=method, normalize=normalize, threshold=threshold) else: tstream.mapmake_full(nside, 'map_full.hdf5', nbin, dirty=False, method=method, normalize=normalize, threshold=threshold, eps=eps, correct_order=correct_order, prior_map_file=prior_map) # ts.add_history(self.history) return tstream
def mapmake_full(self, nside, mapname, nbin=None, dirty=False, method='svd', normalize=True, threshold=1.0e3, eps=0.01, correct_order=0, prior_map_file=None): nfreq = self.telescope.nfreq if nbin is None: nbin = nfreq else: if (nbin < 1 or nbin > nfreq): # invalid nbin nbin = nfreq else: nbin = int(nbin) if prior_map_file is not None: # read in the prior sky map with h5py.File(prior_map_file, 'r') as f: prior_map = f['map'][:] # shape (nbin, npol, npix) # alm of the prior map alm0 = hputil.sphtrans_sky(prior_map, lmax=self.telescope.lmax).reshape(nbin, self.telescope.num_pol_sky, self.telescope.lmax+1, self.telescope.lmax+1) # shape (nbin, npol, lmax+1, lmax+1) else: alm0 = None def _make_alm(mi): print "Making %i" % mi mmode = self.mmode(mi) if dirty: sphmode = self.beamtransfer.project_vector_backward_dirty(mi, mmode, nbin, normalize, threshold) else: if method == 'svd': sphmode = self.beamtransfer.project_vector_telescope_to_sky(mi, mmode, nbin) elif method == 'tk': # sphmode = self.beamtransfer.project_vector_telescope_to_sky_tk(mi, mmode, nbin, eps=eps) mmode0 = alm0[:, :, :, mi] if alm0 is not None else None sphmode = self.beamtransfer.project_vector_telescope_to_sky_tk(mi, mmode, nbin, eps=eps, correct_order=correct_order, mmode0=mmode0) else: raise ValueError('Unknown map-making method %s' % method) return sphmode alm_list = mpiutil.parallel_map(_make_alm, range(self.telescope.mmax + 1), root=0, method='rand') if mpiutil.rank0: # get center freq of each bin n, s, e = mpiutil.split_m(nfreq, nbin) cfreqs = np.array([ self.beamtransfer.telescope.frequencies[(s[i]+e[i])/2] for i in range(nbin) ]) alm = np.zeros((nbin, self.telescope.num_pol_sky, self.telescope.lmax + 1, self.telescope.lmax + 1), dtype=np.complex128) mlist = range(1 if self.no_m_zero else 0, self.telescope.mmax + 1) for mi in mlist: alm[..., mi] = alm_list[mi] alm[:, :, 100:, 1] = 0 skymap = hputil.sphtrans_inv_sky(alm, nside) with h5py.File(self.output_directory + '/' + mapname, 'w') as f: f.create_dataset('/map', data=skymap) f.attrs['frequency'] = cfreqs f.attrs['polarization'] = np.array(['I', 'Q', 'U', 'V'])[:self.beamtransfer.telescope.num_pol_sky] mpiutil.barrier()
def process(self, ts): assert isinstance( ts, Timestream ), '%s only works for Timestream object' % self.__class__.__name__ mask_daytime = self.params['mask_daytime'] mask_time_range = self.params['mask_time_range'] beam_theta_range = self.params['beam_theta_range'] tsys = self.params['tsys'] accuracy_boost = self.params['accuracy_boost'] l_boost = self.params['l_boost'] bl_range = self.params['bl_range'] auto_correlations = self.params['auto_correlations'] pol = self.params['pol'] beam_dir = output_path(self.params['beam_dir']) gen_inv = self.params['gen_invbeam'] noise_weight = self.params['noise_weight'] ts_dir = output_path(self.params['ts_dir']) ts_name = self.params['ts_name'] simulate = self.params['simulate'] input_maps = self.params['input_maps'] add_noise = self.params['add_noise'] dirty_map = self.params['dirty_map'] nbin = self.params['nbin'] method = self.params['method'] normalize = self.params['normalize'] threshold = self.params['threshold'] ts.redistribute('frequency') lat = ts.attrs['sitelat'] # lon = ts.attrs['sitelon'] lon = 0.0 # lon = np.degrees(ts['ra_dec'][0, 0]) # the first ra local_origin = False freq = ts.freq freqs = ts.freq.data.to_numpy_array(root=None) # MHz band_width = ts.attrs['freqstep'] # MHz ndays = ts.attrs['ndays'] feeds = ts['feedno'][:] az, alt = ts['az_alt'][0] az = np.degrees(az) alt = np.degrees(alt) pointing = [az, alt, 0.0] feedpos = ts['feedpos'][:] if ts.is_dish: dish_width = ts.attrs['dishdiam'] tel = tl_dish.TlUnpolarisedDishArray(lat, lon, freqs, band_width, tsys, ndays, accuracy_boost, l_boost, bl_range, auto_correlations, local_origin, dish_width, feedpos, pointing) elif ts.is_cylinder: # factor = 1.2 # suppose an illumination efficiency, keep same with that in timestream_common factor = 0.79 # for xx # factor = 0.88 # for yy cyl_width = factor * ts.attrs['cywid'] tel = tl_cylinder.TlUnpolarisedCylinder( lat, lon, freqs, band_width, tsys, ndays, accuracy_boost, l_boost, bl_range, auto_correlations, local_origin, cyl_width, feedpos) else: raise RuntimeError('Unknown array type %s' % ts.attrs['telescope']) if not simulate: # mask daytime data if mask_daytime: day_inds = np.where( np.logical_and( ts['local_hour'][:] >= mask_time_range[0], ts['local_hour'][:] <= mask_time_range[1]))[0] ts.local_vis_mask[ day_inds] = True # do not change vis directly # average data nt = ts['sec1970'].shape[0] phi_size = 2 * tel.mmax + 1 nt_m = float(nt) / phi_size # roll data to have phi=0 near the first roll_len = np.int(np.around(0.5 * nt_m)) ts.local_vis[:] = np.roll(ts.local_vis[:], roll_len, axis=0) ts.local_vis_mask[:] = np.roll(ts.local_vis_mask[:], roll_len, axis=0) ts['ra_dec'][:] = np.roll(ts['ra_dec'][:], roll_len, axis=0) repeat_inds = np.repeat(np.arange(nt), phi_size) num, start, end = mpiutil.split_m(nt * phi_size, phi_size) # phi = np.zeros((phi_size,), dtype=ts['ra_dec'].dtype) phi = np.linspace(0, 2 * np.pi, phi_size, endpoint=False) vis = np.zeros((phi_size, ) + ts.local_vis.shape[1:], dtype=ts.vis.dtype) # average over time for idx in xrange(phi_size): inds, weight = unique(repeat_inds[start[idx]:end[idx]], return_counts=True) vis[idx] = average(np.ma.array(ts.local_vis[inds], mask=ts.local_vis_mask[inds]), axis=0, weights=weight) # time mean # phi[idx] = np.average(ts['ra_dec'][:, 0][inds], axis=0, weights=weight) if pol == 'xx': vis = vis[:, :, 0, :] elif pol == 'yy': vis = vis[:, :, 1, :] elif pol == 'I': vis = 0.5 * (vis[:, :, 0, :] + vis[:, :, 1, :]) elif pol == 'all': vis = np.sum(vis, axis=2) # sum over all pol else: raise ValueError('Invalid pol: %s' % pol) allpairs = tel.allpairs redundancy = tel.redundancy # reorder bls according to allpairs vis_tmp = np.zeros_like(vis) bls = [tuple(bl) for bl in ts['blorder'][:]] for ind, (a1, a2) in enumerate(allpairs): try: b_ind = bls.index((feeds[a1], feeds[a2])) vis_tmp[:, :, ind] = vis[:, :, b_ind] except ValueError: b_ind = bls.index((feeds[a2], feeds[a1])) vis_tmp[:, :, ind] = vis[:, :, b_ind].conj() # average over redundancy vis_stream = np.zeros(vis.shape[:-1] + (len(redundancy), ), dtype=vis_tmp.dtype) red_bin = np.cumsum(np.insert(redundancy, 0, 0)) # redundancy bin # average over redundancy for ind in xrange(len(redundancy)): vis_stream[:, :, ind] = np.sum( vis_tmp[:, :, red_bin[ind]:red_bin[ind + 1]], axis=2) / redundancy[ind] del vis del vis_tmp # beamtransfer bt = beamtransfer.BeamTransfer(beam_dir, tel, noise_weight, True) bt.generate() if simulate: ndays = 733 print ndays tstream = timestream.simulate(bt, ts_dir, ts_name, input_maps, ndays, add_noise=add_noise) else: # timestream and map-making tstream = timestream.Timestream(ts_dir, ts_name, bt) for lfi, fi in freq.data.enumerate(axis=0): # Make directory if required if not os.path.exists(tstream._fdir(fi)): os.makedirs(tstream._fdir(fi)) # Write file contents with h5py.File(tstream._ffile(fi), 'w') as f: # Timestream data f.create_dataset('/timestream', data=vis_stream[:, lfi].T) f.create_dataset('/phi', data=phi) # Telescope layout data f.create_dataset('/feedmap', data=tel.feedmap) f.create_dataset('/feedconj', data=tel.feedconj) f.create_dataset('/feedmask', data=tel.feedmask) f.create_dataset('/uniquepairs', data=tel.uniquepairs) f.create_dataset('/baselines', data=tel.baselines) # Telescope frequencies f.create_dataset('/frequencies', data=freqs) # Write metadata f.attrs['beamtransfer_path'] = os.path.abspath( bt.directory) f.attrs['ntime'] = phi_size mpiutil.barrier() tstream.generate_mmodes() nside = hputil.nside_for_lmax(tel.lmax, accuracy_boost=tel.accuracy_boost) if dirty_map: tstream.mapmake_full(nside, 'map_full_dirty.hdf5', nbin, dirty=True, method=method, normalize=normalize, threshold=threshold) else: tstream.mapmake_full(nside, 'map_full.hdf5', nbin, dirty=False, method=method, normalize=normalize, threshold=threshold) # ts.add_history(self.history) return tstream
def process(self, ts): mask_daytime = self.params['mask_daytime'] mask_time_range = self.params['mask_time_range'] beam_theta_range = self.params['beam_theta_range'] tsys = self.params['tsys'] accuracy_boost = self.params['accuracy_boost'] l_boost = self.params['l_boost'] bl_range = self.params['bl_range'] auto_correlations = self.params['auto_correlations'] pol = self.params['pol'] beam_dir = output_path(self.params['beam_dir']) gen_inv = self.params['gen_invbeam'] noise_weight = self.params['noise_weight'] ts_dir = output_path(self.params['ts_dir']) ts_name = self.params['ts_name'] simulate = self.params['simulate'] input_maps = self.params['input_maps'] add_noise = self.params['add_noise'] ts.redistribute('frequency') lat = ts.attrs['sitelat'] # lon = ts.attrs['sitelon'] lon = 0.0 # lon = np.degrees(ts['ra_dec'][0, 0]) # the first ra freqs = ts.freq.data.to_numpy_array(root=None) band_width = ts.attrs['freqstep'] # MHz ndays = ts.attrs['ndays'] feeds = ts['feedno'][:] az, alt = ts['az_alt'][0] az = np.degrees(az) alt = np.degrees(alt) pointing = [az, alt, 0.0] feedpos = ts['feedpos'][:] if ts.is_dish: dish_width = ts.attrs['dishdiam'] tel = tl_dish.TlUnpolarisedDishArray(lat, lon, freqs, beam_theta_range, tsys, ndays, accuracy_boost, l_boost, bl_range, auto_correlations, dish_width, feedpos, pointing) elif ts.is_cylinder: # factor = 1.2 # suppose an illumination efficiency, keep same with that in timestream_common factor = 0.79 # for xx # factor = 0.88 # for yy cyl_width = factor * ts.attrs['cywid'] tel = tl_cylinder.TlUnpolarisedCylinder( lat, lon, freqs, beam_theta_range, tsys, ndays, accuracy_boost, l_boost, bl_range, auto_correlations, cyl_width, feedpos) else: raise RuntimeError('Unknown array type %s' % ts.attrs['telescope']) # import matplotlib # matplotlib.use('Agg') # import matplotlib.pyplot as plt # plt.figure() # plt.plot(ts['ra_dec'][:]) # # plt.plot(ts['az_alt'][:]) # plt.savefig('ra_dec1.png') if not simulate: # mask daytime data if mask_daytime: day_inds = np.where( np.logical_and( ts['local_hour'][:] >= mask_time_range[0], ts['local_hour'][:] <= mask_time_range[1]))[0] ts.local_vis_mask[ day_inds] = True # do not change vis directly # average data nt = ts['sec1970'].shape[0] phi_size = tel.phi_size nt_m = float(nt) / phi_size # roll data to have phi=0 near the first roll_len = np.int(np.around(0.5 * nt_m)) ts.local_vis[:] = np.roll(ts.local_vis[:], roll_len, axis=0) ts.local_vis_mask[:] = np.roll(ts.local_vis_mask[:], roll_len, axis=0) ts['ra_dec'][:] = np.roll(ts['ra_dec'][:], roll_len, axis=0) repeat_inds = np.repeat(np.arange(nt), phi_size) num, start, end = mpiutil.split_m(nt * phi_size, phi_size) # phi = np.zeros((phi_size,), dtype=ts['ra_dec'].dtype) phi = np.linspace(0, 2 * np.pi, phi_size, endpoint=False) vis = np.zeros((phi_size, ) + ts.local_vis.shape[1:], dtype=ts.vis.dtype) # average over time for idx in xrange(phi_size): inds, weight = unique(repeat_inds[start[idx]:end[idx]], return_counts=True) vis[idx] = average(np.ma.array(ts.local_vis[inds], mask=ts.local_vis_mask[inds]), axis=0, weights=weight) # time mean # phi[idx] = np.average(ts['ra_dec'][:, 0][inds], axis=0, weights=weight) if pol == 'xx': vis = vis[:, :, 0, :] elif pol == 'yy': vis = vis[:, :, 1, :] elif pol == 'I': vis = 0.5 * (vis[:, :, 0, :] + vis[:, :, 1, :]) elif pol == 'all': vis = np.sum(vis, axis=2) # sum over all pol else: raise ValueError('Invalid pol: %s' % pol) allpairs = tel.allpairs redundancy = tel.redundancy # reorder bls according to allpairs vis_tmp = np.zeros_like(vis) bls = [tuple(bl) for bl in ts['blorder'][:]] for ind, (a1, a2) in enumerate(allpairs): try: b_ind = bls.index((feeds[a1], feeds[a2])) vis_tmp[:, :, ind] = vis[:, :, b_ind] except ValueError: b_ind = bls.index((feeds[a2], feeds[a1])) vis_tmp[:, :, ind] = vis[:, :, b_ind].conj() # average over redundancy vis_stream = np.zeros(vis.shape[:-1] + (len(redundancy), ), dtype=vis_tmp.dtype) red_bin = np.cumsum(np.insert(redundancy, 0, 0)) # redundancy bin # average over redundancy for ind in xrange(len(redundancy)): vis_stream[:, :, ind] = np.sum( vis_tmp[:, :, red_bin[ind]:red_bin[ind + 1]], axis=2) / redundancy[ind] del vis del vis_tmp vis_stream = mpiarray.MPIArray.wrap(vis_stream, axis=1) vis_h5 = memh5.MemGroup(distributed=True) vis_h5.create_dataset('/timestream', data=vis_stream) vis_h5.create_dataset('/phi', data=phi) # Telescope layout data vis_h5.create_dataset('/feedmap', data=tel.feedmap) vis_h5.create_dataset('/feedconj', data=tel.feedconj) vis_h5.create_dataset('/feedmask', data=tel.feedmask) vis_h5.create_dataset('/uniquepairs', data=tel.uniquepairs) vis_h5.create_dataset('/baselines', data=tel.baselines) # Telescope frequencies vis_h5.create_dataset('/frequencies', data=freqs) # Write metadata # vis_h5.attrs['beamtransfer_path'] = os.path.abspath(bt.directory) vis_h5.attrs['ntime'] = phi_size # beamtransfer bt = beamtransfer.BeamTransfer(beam_dir, tel, gen_inv, noise_weight) bt.generate() if simulate: ndays = 733 print ndays ts = timestream.simulate(bt, ts_dir, ts_name, input_maps, ndays, add_noise=add_noise) else: # timestream and map-making ts = timestream.Timestream(ts_dir, ts_name, bt) # Make directory if required try: os.makedirs(ts._tsdir) except OSError: # directory exists pass vis_h5.to_hdf5(ts._tsfile) # ts.generate_mmodes(vis_stream.to_numpy_array(root=None)) ts.generate_mmodes() nside = hputil.nside_for_lmax(tel.lmax, accuracy_boost=tel.accuracy_boost) ts.mapmake_full(nside, 'full') # ts.add_history(self.history) return ts
def process(self, ts): mask_daytime = self.params["mask_daytime"] mask_time_range = self.params["mask_time_range"] beam_theta_range = self.params["beam_theta_range"] tsys = self.params["tsys"] accuracy_boost = self.params["accuracy_boost"] l_boost = self.params["l_boost"] bl_range = self.params["bl_range"] auto_correlations = self.params["auto_correlations"] pol = self.params["pol"] beam_dir = output_path(self.params["beam_dir"]) gen_inv = self.params["gen_invbeam"] noise_weight = self.params["noise_weight"] ts_dir = output_path(self.params["ts_dir"]) ts_name = self.params["ts_name"] simulate = self.params["simulate"] input_maps = self.params["input_maps"] add_noise = self.params["add_noise"] ts.redistribute("frequency") lat = ts.attrs["sitelat"] # lon = ts.attrs['sitelon'] lon = 0.0 # lon = np.degrees(ts['ra_dec'][0, 0]) # the first ra freqs = ts.freq.data.to_numpy_array(root=None) ndays = 1 feeds = ts["feedno"][:] az, alt = ts["az_alt"][0] az = np.degrees(az) alt = np.degrees(alt) pointing = [az, alt, 0.0] feedpos = ts["feedpos"][:] if ts.is_dish: dish_width = ts.attrs["dishdiam"] tel = tl_dish.TlUnpolarisedDishArray( lat, lon, freqs, beam_theta_range, tsys, ndays, accuracy_boost, l_boost, bl_range, auto_correlations, dish_width, feedpos, pointing, ) elif ts.is_cylinder: cyl_width = ts.attrs["cywid"] tel = tl_cylinder.TlUnpolarisedCylinder( lat, lon, freqs, beam_theta_range, tsys, ndays, accuracy_boost, l_boost, bl_range, auto_correlations, cyl_width, feedpos, ) else: raise RuntimeError("Unknown array type %s" % ts.attrs["telescope"]) # import matplotlib # matplotlib.use('Agg') # import matplotlib.pyplot as plt # plt.figure() # plt.plot(ts['ra_dec'][:]) # # plt.plot(ts['az_alt'][:]) # plt.savefig('ra_dec1.png') if not simulate: # mask daytime data if mask_daytime: day_inds = np.where( np.logical_and(ts["local_hour"][:] >= mask_time_range[0], ts["local_hour"][:] <= mask_time_range[1]) )[0] ts.local_vis_mask[day_inds] = True # do not change vis directly # average data nt = ts["sec1970"].shape[0] phi_size = tel.phi_size nt_m = float(nt) / phi_size # roll data to have phi=0 near the first roll_len = np.int(np.around(0.5 * nt_m)) ts.local_vis[:] = np.roll(ts.local_vis[:], roll_len, axis=0) ts.local_vis_mask[:] = np.roll(ts.local_vis_mask[:], roll_len, axis=0) ts["ra_dec"][:] = np.roll(ts["ra_dec"][:], roll_len, axis=0) repeat_inds = np.repeat(np.arange(nt), phi_size) num, start, end = mpiutil.split_m(nt * phi_size, phi_size) # phi = np.zeros((phi_size,), dtype=ts['ra_dec'].dtype) phi = np.linspace(0, 2 * np.pi, phi_size, endpoint=False) vis = np.zeros((phi_size,) + ts.local_vis.shape[1:], dtype=ts.vis.dtype) # average over time for idx in xrange(phi_size): inds, weight = unique(repeat_inds[start[idx] : end[idx]], return_counts=True) vis[idx] = average( np.ma.array(ts.local_vis[inds], mask=ts.local_vis_mask[inds]), axis=0, weights=weight ) # time mean # phi[idx] = np.average(ts['ra_dec'][:, 0][inds], axis=0, weights=weight) if pol == "xx": vis = vis[:, :, 0, :] elif pol == "yy": vis = vis[:, :, 1, :] elif pol == "I": vis = 0.5 * (vis[:, :, 0, :] + vis[:, :, 1, :]) elif pol == "all": vis = np.sum(vis, axis=2) # sum over all pol else: raise ValueError("Invalid pol: %s" % pol) allpairs = tel.allpairs redundancy = tel.redundancy # reorder bls according to allpairs vis_tmp = np.zeros_like(vis) bls = [tuple(bl) for bl in ts["blorder"][:]] for ind, (a1, a2) in enumerate(allpairs): try: b_ind = bls.index((feeds[a1], feeds[a2])) vis_tmp[:, :, ind] = vis[:, :, b_ind] except ValueError: b_ind = bls.index((feeds[a2], feeds[a1])) vis_tmp[:, :, ind] = vis[:, :, b_ind].conj() # average over redundancy vis_stream = np.zeros(vis.shape[:-1] + (len(redundancy),), dtype=vis_tmp.dtype) red_bin = np.cumsum(np.insert(redundancy, 0, 0)) # redundancy bin # average over redundancy for ind in xrange(len(redundancy)): vis_stream[:, :, ind] = np.sum(vis_tmp[:, :, red_bin[ind] : red_bin[ind + 1]], axis=2) / redundancy[ind] del vis del vis_tmp vis_stream = mpiarray.MPIArray.wrap(vis_stream, axis=1) vis_h5 = memh5.MemGroup(distributed=True) vis_h5.create_dataset("/timestream", data=vis_stream) vis_h5.create_dataset("/phi", data=phi) # Telescope layout data vis_h5.create_dataset("/feedmap", data=tel.feedmap) vis_h5.create_dataset("/feedconj", data=tel.feedconj) vis_h5.create_dataset("/feedmask", data=tel.feedmask) vis_h5.create_dataset("/uniquepairs", data=tel.uniquepairs) vis_h5.create_dataset("/baselines", data=tel.baselines) # Telescope frequencies vis_h5.create_dataset("/frequencies", data=freqs) # Write metadata # vis_h5.attrs['beamtransfer_path'] = os.path.abspath(bt.directory) vis_h5.attrs["ntime"] = phi_size # beamtransfer bt = beamtransfer.BeamTransfer(beam_dir, tel, gen_inv, noise_weight) bt.generate() if simulate: ndays = 733 print ndays ts = timestream.simulate(bt, ts_dir, ts_name, input_maps, ndays, add_noise=add_noise) else: # timestream and map-making ts = timestream.Timestream(ts_dir, ts_name, bt) # Make directory if required try: os.makedirs(ts._tsdir) except OSError: # directory exists pass vis_h5.to_hdf5(ts._tsfile) # ts.generate_mmodes(vis_stream.to_numpy_array(root=None)) ts.generate_mmodes() nside = hputil.nside_for_lmax(tel.lmax, accuracy_boost=tel.accuracy_boost) ts.mapmake_full(nside, "full") # ts.add_history(self.history) return ts
def process(self, ts): assert isinstance(ts, Timestream), '%s only works for Timestream object' % self.__class__.__name__ mask_daytime = self.params['mask_daytime'] mask_time_range = self.params['mask_time_range'] beam_theta_range = self.params['beam_theta_range'] tsys = self.params['tsys'] accuracy_boost = self.params['accuracy_boost'] l_boost = self.params['l_boost'] bl_range = self.params['bl_range'] auto_correlations = self.params['auto_correlations'] pol = self.params['pol'] beam_dir = output_path(self.params['beam_dir']) gen_inv = self.params['gen_invbeam'] noise_weight = self.params['noise_weight'] ts_dir = output_path(self.params['ts_dir']) ts_name = self.params['ts_name'] simulate = self.params['simulate'] input_maps = self.params['input_maps'] add_noise = self.params['add_noise'] dirty_map = self.params['dirty_map'] nbin = self.params['nbin'] method = self.params['method'] normalize = self.params['normalize'] threshold = self.params['threshold'] ts.redistribute('frequency') lat = ts.attrs['sitelat'] # lon = ts.attrs['sitelon'] lon = 0.0 # lon = np.degrees(ts['ra_dec'][0, 0]) # the first ra local_origin = False freq = ts.freq freqs = ts.freq.data.to_numpy_array(root=None) # MHz band_width = ts.attrs['freqstep'] # MHz ndays = 1 feeds = ts['feedno'][:] az, alt = ts['az_alt'][0] az = np.degrees(az) alt = np.degrees(alt) pointing = [az, alt, 0.0] feedpos = ts['feedpos'][:] if ts.is_dish: dish_width = ts.attrs['dishdiam'] tel = tl_dish.TlUnpolarisedDishArray(lat, lon, freqs, band_width, tsys, ndays, accuracy_boost, l_boost, bl_range, auto_correlations, local_origin, dish_width, feedpos, pointing) elif ts.is_cylinder: factor = 1.2 # suppose an illumination efficiency, keep same with that in timestream_common cyl_width = factor * ts.attrs['cywid'] tel = tl_cylinder.TlUnpolarisedCylinder(lat, lon, freqs, band_width, tsys, ndays, accuracy_boost, l_boost, bl_range, auto_correlations, local_origin, cyl_width, feedpos) else: raise RuntimeError('Unknown array type %s' % ts.attrs['telescope']) if not simulate: # mask daytime data if mask_daytime: day_inds = np.where(np.logical_and(ts['local_hour'][:]>=mask_time_range[0], ts['local_hour'][:]<=mask_time_range[1]))[0] ts.local_vis_mask[day_inds] = True # do not change vis directly # average data nt = ts['sec1970'].shape[0] phi_size = 2*tel.mmax + 1 nt_m = float(nt) / phi_size # roll data to have phi=0 near the first roll_len = np.int(np.around(0.5*nt_m)) ts.local_vis[:] = np.roll(ts.local_vis[:], roll_len, axis=0) ts.local_vis_mask[:] = np.roll(ts.local_vis_mask[:], roll_len, axis=0) ts['ra_dec'][:] = np.roll(ts['ra_dec'][:], roll_len, axis=0) repeat_inds = np.repeat(np.arange(nt), phi_size) num, start, end = mpiutil.split_m(nt*phi_size, phi_size) # phi = np.zeros((phi_size,), dtype=ts['ra_dec'].dtype) phi = np.linspace(0, 2*np.pi, phi_size, endpoint=False) vis = np.zeros((phi_size,)+ts.local_vis.shape[1:], dtype=ts.vis.dtype) # average over time for idx in xrange(phi_size): inds, weight = unique(repeat_inds[start[idx]:end[idx]], return_counts=True) vis[idx] = average(np.ma.array(ts.local_vis[inds], mask=ts.local_vis_mask[inds]), axis=0, weights=weight) # time mean # phi[idx] = np.average(ts['ra_dec'][:, 0][inds], axis=0, weights=weight) if pol == 'xx': vis = vis[:, :, 0, :] elif pol == 'yy': vis = vis[:, :, 1, :] elif pol == 'I': vis = 0.5 * (vis[:, :, 0, :] + vis[:, :, 1, :]) elif pol == 'all': vis = np.sum(vis, axis=2) # sum over all pol else: raise ValueError('Invalid pol: %s' % pol) allpairs = tel.allpairs redundancy = tel.redundancy # reorder bls according to allpairs vis_tmp = np.zeros_like(vis) bls = [ tuple(bl) for bl in ts['blorder'][:] ] for ind, (a1, a2) in enumerate(allpairs): try: b_ind = bls.index((feeds[a1], feeds[a2])) vis_tmp[:, :, ind] = vis[:, :, b_ind] except ValueError: b_ind = bls.index((feeds[a2], feeds[a1])) vis_tmp[:, :, ind] = vis[:, :, b_ind].conj() # average over redundancy vis_stream = np.zeros(vis.shape[:-1]+(len(redundancy),), dtype=vis_tmp.dtype) red_bin = np.cumsum(np.insert(redundancy, 0, 0)) # redundancy bin # average over redundancy for ind in xrange(len(redundancy)): vis_stream[:, :, ind] = np.sum(vis_tmp[:, :, red_bin[ind]:red_bin[ind+1]], axis=2) / redundancy[ind] del vis del vis_tmp # beamtransfer bt = beamtransfer.BeamTransfer(beam_dir, tel, noise_weight, True) bt.generate() if simulate: ndays = 733 print ndays tstream = timestream.simulate(bt, ts_dir, ts_name, input_maps, ndays, add_noise=add_noise) else: # timestream and map-making tstream = timestream.Timestream(ts_dir, ts_name, bt) for lfi, fi in freq.data.enumerate(axis=0): # Make directory if required if not os.path.exists(tstream._fdir(fi)): os.makedirs(tstream._fdir(fi)) # Write file contents with h5py.File(tstream._ffile(fi), 'w') as f: # Timestream data f.create_dataset('/timestream', data=vis_stream[:, lfi].T) f.create_dataset('/phi', data=phi) # Telescope layout data f.create_dataset('/feedmap', data=tel.feedmap) f.create_dataset('/feedconj', data=tel.feedconj) f.create_dataset('/feedmask', data=tel.feedmask) f.create_dataset('/uniquepairs', data=tel.uniquepairs) f.create_dataset('/baselines', data=tel.baselines) # Telescope frequencies f.create_dataset('/frequencies', data=freqs) # Write metadata f.attrs['beamtransfer_path'] = os.path.abspath(bt.directory) f.attrs['ntime'] = phi_size mpiutil.barrier() tstream.generate_mmodes() nside = hputil.nside_for_lmax(tel.lmax, accuracy_boost=tel.accuracy_boost) if dirty_map: tstream.mapmake_full(nside, 'map_full_dirty.hdf5', nbin, dirty=True, method=method, normalize=normalize, threshold=threshold) else: tstream.mapmake_full(nside, 'map_full.hdf5', nbin, dirty=False, method=method, normalize=normalize, threshold=threshold) # ts.add_history(self.history) return tstream