示例#1
0
    def action(self, Data):
        # Figure out what map band this corresponds to.
        Data.calc_freq()
        freq = Data.freq
        # We are going to look for an exact match in for the map frequencies.
        # This could be made more general since the sub_map function can handle
        # partial overlap, but this will be fine for now.
        for band_maps in self.maps:
            maps_freq = band_maps[0].get_axis('freq')
            if sp.allclose(maps_freq, freq):
                maps = band_maps
                break
        else:
            raise NotImplementedError('No maps with frequency axis exactly'
                                      ' matching data.')
        # Now make sure we have the polarizations in the right order.
        data_pols = Data.field['CRVAL4'].copy()
        for ii in range(len(data_pols)):
            if (misc.polint2str(data_pols[ii])
                != self.params['map_polarizations'][ii]):
                raise NotImplementedError('Map polarizations not in same order'
                                          ' as data polarizations.')
        if (not self.params['solve_for_gain'] or
            self.params['gain_output_end'] is '') :
            sub_map(Data, maps, self.params['solve_for_gain'],
                    interpolation=self.params['interpolation'])
        else :
            block_gain = {}
            Data.calc_freq()
            block_gain['freq'] = sp.copy(Data.freq)
            block_gain['time'] = Data.field['DATE-OBS'][0]
            block_gain['scan'] = Data.field['SCAN']
            block_gain['gain'] = sub_map(Data, maps, True, 
                                interpolation=self.params['interpolation'])
            self.gain_list.append(block_gain)

        Data.add_history('Subtracted map from data.', 
                         ('Map root: ' + ku.abbreviate_file_path(
                         self.params['map_input_root']),))
        return Data
示例#2
0
def process_moonscan(moon_dataobj, outfile, avg_width=30):
    (ndata, npol, ntoggle, nfreq) = moon_dataobj.dims

    moon_dataobj.calc_freq()
    moon_dataobj.calc_pointing()
    moon_dataobj.calc_time()
    freq = moon_dataobj.freq
    ra = moon_dataobj.ra
    dec = moon_dataobj.dec
    date_time = moon_dataobj.field['DATE-OBS']
    time = moon_dataobj.time
    az = moon_dataobj.field['CRVAL2']
    el = moon_dataobj.field['CRVAL3']

    print moon_dataobj.field.keys()
    print moon_dataobj.field['CAL']

    pols = list(moon_dataobj.field['CRVAL4'])
    #print moon_dataobj.field['CRVAL4']
    pol_names = {}
    for pol_idx in range(npol):
        pol_names[utils.polint2str(pols[pol_idx])] = pol_idx

    print pol_names

    # find the peak in XX, YY
    maxlist = np.argmax(moon_dataobj.data[:, 0, 0, :], axis=0)
    maxlist = maxlist[np.where(maxlist > 0)]
    max_xx = np.mean(maxlist)
    print "max_xx", max_xx, np.std(maxlist)

    maxlist = np.argmax(moon_dataobj.data[:, 3, 0, :], axis=0)
    maxlist = maxlist[np.where(maxlist > 0)]
    max_yy = np.mean(maxlist)
    print "max_yy", max_yy, np.std(maxlist)

    time_max = int((max_xx + max_yy) / 2.)
    print time_max

    # find the max/min in YX (well-defined because of dipole leakage)
    maxlist = np.argmax(moon_dataobj.data[:, 2, 0, :], axis=0)
    maxlist = maxlist[np.where(maxlist > 0)]
    max_yx = int(np.mean(maxlist))
    print "max_yx", max_yx, np.std(maxlist)

    minlist = np.argmin(moon_dataobj.data[:, 2, 0, :], axis=0)
    minlist = minlist[np.where(minlist > 0)]
    min_yx = int(np.mean(minlist))
    print "min_yx", min_yx, np.std(minlist)

    before = np.mean(moon_dataobj.data[0:(2 * avg_width), :, 0, :], axis=0)
    during = np.mean(moon_dataobj.data[(time_max - avg_width): \
                                       (time_max + avg_width), :, 0, :], axis=0)
    during_u = np.mean(moon_dataobj.data[(max_yx - avg_width): \
                                       (max_yx + avg_width), :, 0, :], axis=0)
    during_d = np.mean(moon_dataobj.data[(min_yx - avg_width): \
                                       (min_yx + avg_width), :, 0, :], axis=0)
    after = np.mean(moon_dataobj.data[-(2 * avg_width):-1, :, 0, :], axis=0)

    offsrc = (before + after) / 2.
    during -= offsrc
    during_u -= offsrc
    during_d -= offsrc

    outfp = open(outfile, "w")

    for freq_idx in range(nfreq):
        compilation = [freq_idx]
        #compilation.extend(before[:, freq_idx])
        compilation.extend(during[:, freq_idx])
        compilation.extend(during_u[:, freq_idx])
        compilation.extend(during_d[:, freq_idx])
        #compilation.extend(after[:, freq_idx])
        outfp.write(("%.5g " * 13) % tuple(compilation) + "\n")

    outfp.close()
示例#3
0
 def calibrate_file(self, middle, gain, freq):
     # This function is largely cut and pasted from process file. I should
     # really combine the code into an iterator but that's a lot of work.
     # Alternativly, I could make a meta function and pass a function to it.
     params = self.params
     file_name = (params['input_root'] + middle
                  + params['input_end'])
     # Output parameters.
     Writer = core.fitsGBT.Writer(feedback=self.feedback)
     out_filename = (params['output_root'] + middle
                     + params['output_end'])
     band_inds = params["IFs"]
     Reader = core.fitsGBT.Reader(file_name, feedback=self.feedback)
     n_bands = len(Reader.IF_set)
     if not band_inds:
         band_inds = range(n_bands)
     # Number of bands we acctually process.
     n_bands_proc = len(band_inds)
     if not band_inds:
         band_inds = range(n_bands)
     # Number of bands we acctually process.
     n_bands_proc = len(band_inds)
     # Get the key that will group this file with other files.
     key = get_key(middle)
     # Read one block to figure out how many polarizations and channels
     # there are.
     Data = Reader.read(0,0)
     n_pol = Data.dims[1]
     n_cal = Data.dims[2]
     n_chan = Data.dims[3]
     # Allowcate memory for the outputs.
     corr = np.zeros((n_bands_proc, n_pol, n_cal, n_chan),
                      dtype=float)
     norm = np.zeros(corr.shape, dtype=int)
     freq = np.empty((n_bands_proc, n_chan))
     for ii in range(n_bands_proc):
         Blocks = Reader.read((), ii)
         Blocks[0].calc_freq()
         freq[ii,:] = Blocks[0].freq
         # We are going to look for an exact match in for the map 
         # frequencies. This could be made more general since the sub_map
         # function can handle partial overlap, but this will be fine for
         # now.
         for band_maps in self.maps:
             maps_freq = band_maps[0].get_axis('freq')
             if np.allclose(maps_freq, freq[ii,:]):
                 maps = band_maps
                 break
         else:
             raise NotImplementedError('No maps with frequency axis exactly'
                                       ' matching data.')
         # Check the polarization axis. If the same number of maps where
         # passed, check that the polarizations are in order.  If only one
         # map was passed, correlate all data polarizations against it.
         data_pols = Blocks[0].field['CRVAL4'].copy()
         if len(band_maps) == 1:
             maps_to_correlate = band_maps * len(data_pols)
         else:
             for ii in range(len(data_pols)):
                 if (misc.polint2str(data_pols[ii])
                     != self.params['map_polarizations'][ii]):
                     msg = ('Map polarizations not in same order'
                            ' as data polarizations.')
                     raise NotImplementedError(map)
             maps_to_correlate = band_maps
         # Now process each block.
         for Data in Blocks:
             if params['diff_gain_cal_only']:
                 if tuple(Data.field['CRVAL4']) != (-5, -7, -8, -6):
                     msg = ("Expected polarizations to be ordered "
                            "(XX, XY, YX, YY).")
                     raise NotImplementedError(msg)
                 Data.data[:,0,:,:] /= gain[ii,0,:,:]
                 Data.data[:,3,:,:] /= gain[ii,3,:,:]
                 cross_gain = np.sqrt(gain[ii,0,:,:] * gain[ii,3,:,:])
                 Data.data[:,1,:,:] /= cross_gain
                 Data.data[:,2,:,:] /= cross_gain
             else:
                 Data.data /= gain[ii,...]
             Writer.add_data(Data)
     # Write the data back out.
     utils.mkparents(out_filename)
     Writer.write(out_filename)
示例#4
0
 def process_file(self, middle, Pipe=None):
     params = self.params
     file_name = (params['input_root'] + middle
                  + params['input_end'])
     band_inds = params["IFs"]
     Reader = core.fitsGBT.Reader(file_name, feedback=self.feedback)
     n_bands = len(Reader.IF_set)
     if not band_inds:
         band_inds = range(n_bands)
     # Number of bands we acctually process.
     n_bands_proc = len(band_inds)
     if not band_inds:
         band_inds = range(n_bands)
     # Number of bands we acctually process.
     n_bands_proc = len(band_inds)
     # Get the key that will group this file with other files.
     key = get_key(middle)
     # Read one block to figure out how many polarizations and channels
     # there are.
     Data = Reader.read(0,0)
     n_pol = Data.dims[1]
     n_cal = Data.dims[2]
     n_chan = Data.dims[3]
     # Allowcate memory for the outputs.
     corr = np.zeros((n_bands_proc, n_pol, n_cal, n_chan),
                      dtype=float)
     norm = np.zeros(corr.shape, dtype=float)
     freq = np.empty((n_bands_proc, n_chan))
     for ii in range(n_bands_proc):
         Blocks = Reader.read((), ii)
         Blocks[0].calc_freq()
         freq[ii,:] = Blocks[0].freq
         # We are going to look for an exact match in for the map 
         # frequencies. This could be made more general since the sub_map
         # function can handle partial overlap, but this will be fine for
         # now.
         for band_maps in self.maps:
             maps_freq = band_maps[0].get_axis('freq')
             if np.allclose(maps_freq, freq[ii,:]):
                 maps = band_maps
                 break
         else:
             raise NotImplementedError('No maps with frequency axis exactly'
                                       ' matching data.')
         # Check the polarization axis. If the same number of maps where
         # passed, check that the polarizations are in order.  If only one
         # map was passed, correlate all data polarizations against it.
         data_pols = Blocks[0].field['CRVAL4'].copy()
         if len(band_maps) == 1:
             maps_to_correlate = band_maps * len(data_pols)
         else:
             for ii in range(len(data_pols)):
                 if (misc.polint2str(data_pols[ii])
                     != self.params['map_polarizations'][ii]):
                     msg = ('Map polarizations not in same order'
                            ' as data polarizations.')
                     raise NotImplementedError(map)
             maps_to_correlate = band_maps
         # Now process each block.
         for Data in Blocks:
             this_corr, this_norm = get_correlation(Data,
                             maps_to_correlate,
                             interpolation=params['interpolation'],
                             modes_subtract=params['smooth_modes_subtract'],
                             filter_type=params['filter_type'])
             # Check that the answer we got is sane, if not, throw away the
             # this set.
             tmp_corr = this_corr.copy()
             tmp_norm = this_norm.copy()
             tmp_corr[tmp_norm == 0] = 1.
             tmp_norm[tmp_norm == 0] = 1.
             tmp_gains = tmp_corr / tmp_norm
             if np.all(tmp_gains < 2) and np.all(tmp_gains > 0.5):
                 corr[ii,...] += this_corr
                 norm[ii,...] += this_norm
             else:
                 pass
     if Pipe is None:
         return key, corr, norm, freq
     else:
         Pipe.send((key, corr, norm, freq))
示例#5
0
def process_moonscan(moon_dataobj, outfile, avg_width=30):
    (ndata, npol, ntoggle, nfreq) = moon_dataobj.dims

    moon_dataobj.calc_freq()
    moon_dataobj.calc_pointing()
    moon_dataobj.calc_time()
    freq = moon_dataobj.freq
    ra = moon_dataobj.ra
    dec = moon_dataobj.dec
    date_time = moon_dataobj.field['DATE-OBS']
    time = moon_dataobj.time
    az = moon_dataobj.field['CRVAL2']
    el = moon_dataobj.field['CRVAL3']

    print moon_dataobj.field.keys()
    print moon_dataobj.field['CAL']

    pols = list(moon_dataobj.field['CRVAL4'])
    #print moon_dataobj.field['CRVAL4']
    pol_names = {}
    for pol_idx in range(npol):
        pol_names[utils.polint2str(pols[pol_idx])] = pol_idx

    print pol_names

    # find the peak in XX, YY
    maxlist = np.argmax(moon_dataobj.data[:, 0, 0, :], axis=0)
    maxlist = maxlist[np.where(maxlist > 0)]
    max_xx = np.mean(maxlist)
    print "max_xx", max_xx, np.std(maxlist)

    maxlist = np.argmax(moon_dataobj.data[:, 3, 0, :], axis=0)
    maxlist = maxlist[np.where(maxlist > 0)]
    max_yy = np.mean(maxlist)
    print "max_yy", max_yy, np.std(maxlist)

    time_max = int((max_xx + max_yy) / 2.)
    print time_max

    # find the max/min in YX (well-defined because of dipole leakage)
    maxlist = np.argmax(moon_dataobj.data[:, 2, 0, :], axis=0)
    maxlist = maxlist[np.where(maxlist > 0)]
    max_yx = int(np.mean(maxlist))
    print "max_yx", max_yx, np.std(maxlist)

    minlist = np.argmin(moon_dataobj.data[:, 2, 0, :], axis=0)
    minlist = minlist[np.where(minlist > 0)]
    min_yx = int(np.mean(minlist))
    print "min_yx", min_yx, np.std(minlist)

    before = np.mean(moon_dataobj.data[0:(2 * avg_width), :, 0, :], axis=0)
    during = np.mean(moon_dataobj.data[(time_max - avg_width): \
                                       (time_max + avg_width), :, 0, :], axis=0)
    during_u = np.mean(moon_dataobj.data[(max_yx - avg_width): \
                                       (max_yx + avg_width), :, 0, :], axis=0)
    during_d = np.mean(moon_dataobj.data[(min_yx - avg_width): \
                                       (min_yx + avg_width), :, 0, :], axis=0)
    after = np.mean(moon_dataobj.data[-(2 * avg_width):-1, :, 0, :], axis=0)

    offsrc = (before + after) / 2.
    during -= offsrc
    during_u -= offsrc
    during_d -= offsrc

    outfp = open(outfile, "w")

    for freq_idx in range(nfreq):
        compilation = [freq_idx]
        #compilation.extend(before[:, freq_idx])
        compilation.extend(during[:, freq_idx])
        compilation.extend(during_u[:, freq_idx])
        compilation.extend(during_d[:, freq_idx])
        #compilation.extend(after[:, freq_idx])
        outfp.write(("%.5g " * 13) % tuple(compilation) + "\n")

    outfp.close()