Esempio n. 1
0
def load_cal(filename, baseline, nd_models, freq_channel=None,channel_bw=10.0,channel_mask='',n_chan = 4096,channel_range=None,band_input=None):
    """ Load the dataset into memory """
    print('Loading noise diode models')

    try:
        d = scape.DataSet(filename, baseline=baseline, nd_models=nd_models,band=band_input)
    except IOError:
        nd = scape.gaincal.NoiseDiodeModel(freq=[1,2000],temp=[20,20])
        warnings.warn('Warning: Failed to load/find Noise Diode Models, setting models to 20K ')
        print('Warning: Failed to load/find Noise Diode Models, setting models to 20K ')
        d = scape.DataSet(filename, baseline=baseline,  nd_h_model = nd, nd_v_model=nd ,band=band_input)


    if not channel_range is None :
        start_freq_channel = int(channel_range.split(',')[0])
        end_freq_channel = int(channel_range.split(',')[1])
        edge = np.tile(True, n_chan)
        edge[slice(start_freq_channel, end_freq_channel)] = False
    else :
        edge = np.tile(False, n_chan)
    #load static flags if pickle file is given
    if len(channel_mask)>0:
        pickle_file = open(channel_mask,mode='rb')
        rfi_static_flags = pickle.load(pickle_file)
        pickle_file.close()
    else:
        rfi_static_flags = np.tile(False, n_chan)

    static_flags = np.logical_or(edge,rfi_static_flags)

    #d = d.select(freqkeep=~static_flags)
    freq_channel_flagged = []
    for band in freq_channel:
        tmp_band = []
        for channel in band :
            if not static_flags[channel] : # if not flagged
                tmp_band.append(channel)
        #if len(tmp_band) > 0 :
        freq_channel_flagged.append(tmp_band)

    #if not freq_channel is None :
    #    d = d.select(freqkeep=freq_channel)
    #print "Flagging RFI"
    #sd = remove_rfi(d,width=7,sigma=5)  # rfi flaging Needed ?
    print("Converting to Temperature")
    print("Plotting the number of channels in each band of the list of lists freq_channel_flagged will be useful")
    d = d.convert_power_to_temperature(freq_width=0.0)
    if not d is None:
        d = d.select(flagkeep='~nd_on')
        d = d.select(labelkeep='track', copy=False)
        d.average(channels_per_band=freq_channel_flagged)
    return d
Esempio n. 2
0
def read_and_select_file(file, bline=None, channels=None, rfi_mask=None, nd_models=None, **kwargs):
    """
    Read in the input h5 file using scape and make a selection.

    file:   {string} filename of h5 file to open in katdal

    Returns:
        the visibility data to plot, the frequency array to plot, the flags to plot
    """

    data = scape.DataSet(file, baseline=bline, nd_models=nd_models, katfile=True)
    # Secect desired channel range and tracks
    # Select frequency channels and setup defaults if not specified
    num_channels = len(data.channel_select)
    if channels is None:
        # Default is drop first and last 20% of the bandpass
        start_chan = num_channels // 4
        end_chan   = start_chan * 3
    else:
        start_chan = int(channels.split(',')[0])
        end_chan = int(channels.split(',')[1])
    chan_select = list(range(start_chan,end_chan+1))
    if rfi_mask:
        mask_file = open(rfi_mask)
        chan_select = ~(pickle.load(mask_file))
        mask_file.close()
        if len(chan_select) != num_channels:
            raise ValueError('Number of channels in provided mask does not match number of channels in data')
        chan_select[:start_chan] = False
        chan_select[end_chan:] = False

    #return the selected data
    return data.select(freqkeep=chan_select,labelkeep='track')
Esempio n. 3
0
def read_and_select_file(file, bline=None, channels=None, **kwargs):
    """
    Read in the input h5 file using scape and make a selection.

    file:   {string} filename of h5 file to open in katdal

    Returns:
        the visibility data to plot, the frequency array to plot, the flags to plot
    """

    data = scape.DataSet(file, baseline=bline, katfile=True)
    compscan_labels = []
    # Get compscan names (workaround for broken labelling after selection in scape)
    for compscan in data.compscans:
        compscan_labels.append(compscan.label)
    # Secect desired channel range and tracks
    # Select frequency channels and setup defaults if not specified
    num_channels = len(data.channel_select)
    if channels is None:
        # Default is drop first and last 20% of the bandpass
        start_chan = num_channels // 4
        end_chan = start_chan * 3
    else:
        start_chan = int(channels.split(',')[0])
        end_chan = int(channels.split(',')[1])
    chan_range = range(start_chan, end_chan + 1)
    data = data.select(freqkeep=chan_range, labelkeep='track')

    #return the selected data
    return data, compscan_labels
Esempio n. 4
0
def LoadHDF5(HDF5Filename, header=False):
    try:
        d = scape.DataSet(HDF5Filename, baseline=opts.baseline)
    except ValueError:
        print "WARNING:THIS FILE", HDF5Filename.split(
            '/'
        )[-1], "IS CORRUPTED AND SCAPE WILL NOT PROCESS IT, YOU MAY NEED TO REAUGMENT IT,BUT ITS AN EXPENSIVE TASK..!!"
    else:
        print "SUCCESSFULLY LOADED: Wellcome to scape Library and scape is busy processing your request"

        lo_freq = 4200.0 + d.freqs[len(d.freqs) / 2.0]

        # try to check all the rfi channels across all the channels
        rfi_chan_across_all = d.identify_rfi_channels()

        d = d.select(freqkeep=range(100, 420))
        # rfi channels across fringe finder channels ( i.e frequancy range around 100 to 420)
        rfi_channels = d.identify_rfi_channels()
        freqs = d.freqs
        sky_frequency = d.freqs[rfi_channels]
        ant = d.antenna.name
        data_filename = os.path.splitext(
            os.path.basename(HDF5Filename))[0] + '.h5'
        # obs_date = os.path.splitext(os.path.basename(HDF5Filename))[0]
        #date = time.ctime(float(obs_date))

        for compscan in d.compscans:
            azimuth = np.hstack(
                [scan.pointing['az'] for scan in compscan.scans])
            elevation = np.hstack(
                [scan.pointing['el'] for scan in compscan.scans])
            compscan_times = np.hstack(
                [scan.timestamps for scan in compscan.scans])
            compscan_start_time = np.hstack(
                [scan.timestamps[0] for scan in compscan.scans])
            compscan_end_time = np.hstack(
                [scan.timestamps[-1] for scan in compscan.scans])
            middle_time = np.median(compscan_times, axis=None)
            obs_date = katpoint.Timestamp(middle_time)
            middle_start_time = np.median(compscan_start_time)
            middle_end_time = np.median(compscan_end_time)
            end_time = katpoint.Timestamp(middle_end_time)
            min_compscan_az = katpoint.rad2deg(azimuth.min())
            max_compscan_az = katpoint.rad2deg(azimuth.max())
            min_compscan_el = katpoint.rad2deg(elevation.min())
            max_compscan_el = katpoint.rad2deg(elevation.max())
            start_time = katpoint.Timestamp(middle_start_time)
            requested_azel = compscan.target.azel(middle_time)
            #ant_az = katpoint.rad2deg(np.array(requested_azel[0]))
            #ant_el = katpoint.rad2deg(np.array(requested_azel[1]))
            target = compscan.target.name

            f = file(opts.outfilebase + '.csv', 'a')
            for index in range(0, len(rfi_channels)):
                rfi_chan = rfi_channels[index] + 100
                rfi_freq = freqs[rfi_channels[index]]
                f.write('%s, %s, %s, %s, %s,%f, %f,%f,%f, %f, %d, %f\n' % (data_filename,start_time, end_time, ant,target,min_compscan_az,max_compscan_az,\
                min_compscan_el, max_compscan_el,lo_freq, rfi_chan, rfi_freq))
            f.close()
Esempio n. 5
0
def load_cal(filename, baseline, start_freq_channel, end_freq_channel, nd_models):
    """ Load the dataset into memory """
    d = scape.DataSet(filename, baseline=baseline, nd_models=nd_models)
    d = d.select(freqkeep=range(start_freq_channel, end_freq_channel + 1))
    d = d.convert_power_to_temperature(min_duration=opts.min_nd, jump_significance=10.0)
    d = d.select(flagkeep='~nd_on')
    d = d.select(labelkeep='track', copy=False)
    d.average()
    return d
Esempio n. 6
0
def extract_cal_dataset(dataset):
    """Build data set from scans in original dataset containing noise diode firings."""
    compscanlist = []
    for compscan in dataset.compscans:
        # Extract scans containing noise diode firings (make copy, as this will be modified by gain cal)
        # Don't rely on 'cal' labels, as the KAT-7 system does not produce it anymore
        scanlist = [scan.select(copy=True) for scan in compscan.scans
                    if 'nd_on' in scan.flags.dtype.fields and scan.flags['nd_on'].any()]
        if scanlist:
            compscanlist.append(scape.CompoundScan(scanlist, compscan.target))
    return scape.DataSet(None, compscanlist, dataset.experiment_id, dataset.observer,
                         dataset.description, dataset.data_unit, dataset.corrconf.select(copy=True),
                         dataset.antenna, dataset.antenna2, dataset.nd_h_model, dataset.nd_v_model, dataset.enviro)
Esempio n. 7
0
def reduce_compscan_with_uncertainty(dataset, compscan_index=0, mc_iterations=1, batch=True,
                                     keep_all=True, num_compscans=0, **kwargs):
    """Do complete point source reduction on a compound scan, with uncertainty."""
    dataset = scape.DataSet(None, [dataset.compscans[compscan_index]], dataset.experiment_id, dataset.observer,
                            dataset.description, dataset.data_unit, dataset.corrconf,
                            dataset.antenna, dataset.antenna2, dataset.nd_h_model, dataset.nd_v_model, dataset.enviro)
    scan_dataset = dataset.select(labelkeep='scan', copy=False)
    compscan = scan_dataset.compscans[0]
    if 'logger' in kwargs:
        kwargs['logger'].info("==== Processing compound scan %d of %d: '%s' ====",
                              compscan_index + 1, num_compscans, ' '.join(compscan_key(compscan)))
    # Build data set containing a single compound scan at a time (make copy, as reduction modifies it)
    scan_dataset.compscans = [compscan]
    # If there are no noise diode models assume that there are no noise diodes
    if dataset.nd_h_model is not None and dataset.nd_v_model is not None:
        compscan_dataset = scan_dataset.select(flagkeep='~nd_on', copy=True)
    else:
        compscan_dataset = dataset.select(labelkeep='scan', copy=True)
    cal_dataset = extract_cal_dataset(dataset)
    # Do first reduction run
    main_compscan = compscan_dataset.compscans[0]
    fixed, variable = reduce_compscan(main_compscan, cal_dataset, **kwargs)
    # Produce data set that has counts converted to Kelvin, but no averaging (for spectral plots)
    unavg_compscan_dataset = scan_dataset.select(flagkeep='~nd_on', copy=True)
    unavg_compscan_dataset.nd_gain = cal_dataset.nd_gain
    unavg_compscan_dataset.convert_power_to_temperature()
    # Add data from Monte Carlo perturbations
    iter_outputs = [np.rec.fromrecords([tuple(variable.values())], names=list(variable.keys()))]
    for m in range(mc_iterations - 1):
        if 'logger' in kwargs:
            kwargs['logger'].info("---- Monte Carlo iteration %d of %d ----",
                                  m + 2, mc_iterations)
        compscan_dataset = scan_dataset.select(flagkeep='~nd_on', copy=True).perturb()
        cal_dataset = extract_cal_dataset(dataset).perturb()
        fixed, variable = reduce_compscan(compscan_dataset.compscans[0], cal_dataset, **kwargs)
        iter_outputs.append(np.rec.fromrecords([tuple(variable.values())], names=list(variable.keys())))
    # Get mean and uncertainty of variable part of output data (assumed to be floats)
    var_output = np.concatenate(iter_outputs).view(np.float).reshape(mc_iterations, -1)
    var_mean = dict(zip(variable.keys(), var_output.mean(axis=0)))
    var_std = dict(zip([name + '_std' for name in variable], var_output.std(axis=0)))
    # Keep scan only with a valid beam in batch mode (otherwise keep button has to do it explicitly)
    keep = batch and main_compscan.beam is not None and (keep_all or main_compscan.beam.is_valid)
    if 'logger' in kwargs:
        kwargs['logger'].debug("keep_all=%s, main_compscan.beam.is_valid=%s, keep=%s",
                               keep_all, main_compscan.beam is not None and main_compscan.beam.is_valid, keep)
    output_dict = {'keep': keep, 'compscan': main_compscan, 'unavg_dataset': unavg_compscan_dataset}
    output_dict.update(fixed)
    output_dict.update(var_mean)
    output_dict.update(var_std)
    return output_dict
Esempio n. 8
0
def plot_ts(h5, on_ts=None):
    import scape
    fig = plt.figure(figsize=(20, 5))
    a = h5.ants[0]
    nd = scape.gaincal.NoiseDiodeModel(freq=[856, 1712], temp=[20, 20])
    d = scape.DataSet(h5, nd_h_model=nd, nd_v_model=nd)
    scape.plot_xyz(d, 'time', 'amp', label='Average of the data')
    if on_ts is not None:
        on = on_ts
    else:
        on = h5.sensor['Antennas/' + a.name + '/nd_coupler']
    ts = h5.timestamps - h5.timestamps[0]
    plt.plot(ts,
             np.array(on).astype(float) * 4000,
             'g',
             label='katdal ND sensor')
    plt.title("Timeseries for antenna %s - %s" % (a.name, git_info()))
    plt.legend()
    return fig
Esempio n. 9
0
def main():
    # Parse command-line options and arguments
    parser = optparse.OptionParser(
        usage='%prog [options] <data file> [<data file> ...]',
        description='Display a horizon mask from a set of data files.')
    parser.add_option(
        '-a',
        '--baseline',
        dest='baseline',
        type="string",
        metavar='BASELINE',
        default='A1A1',
        help=
        "Baseline to load (e.g. 'A1A1' for antenna 1), default is first single-dish baseline in file"
    )
    parser.add_option('-o',
                      '--output',
                      dest='output',
                      type="string",
                      metavar='OUTPUTFILE',
                      default=None,
                      help="Write out intermediate h5 file")
    parser.add_option('-s',
                      '--split',
                      dest='split',
                      action="store_true",
                      metavar='SPLIT',
                      default=False,
                      help="Whether to split each horizon plot in half")
    parser.add_option('-z',
                      '--azshift',
                      dest='azshift',
                      type='float',
                      metavar='AZIMUTH_SHIFT',
                      default=45.0,
                      help="Degrees to rotate azimuth window by.")
    parser.add_option(
        '--temp-limit',
        dest='temp_limit',
        type='float',
        default=40.0,
        help=
        "The Tempreture Limit to make the cut-off for the mask. This is calculated "
        "as the T_sys at zenith plus the atmospheric noise contrabution at 10 degrees"
        "elevation as per R.T. 199  .")
    parser.add_option(
        "-n",
        "--nd-models",
        help="Name of optional directory containing noise diode model files")

    (opts, args) = parser.parse_args()

    # Check arguments
    if len(args) < 1:
        raise RuntimeError('Please specify the data file to reduce')

    # Load data set
    gridtemp = []
    for filename in args:
        print 'Loading baseline', opts.baseline, 'from data file', filename
        d = scape.DataSet(filename,
                          baseline=opts.baseline,
                          nd_models=opts.nd_models)
        if len(d.freqs) > 1:
            # Only keep main scans (discard slew and cal scans) a
            d = d.select(freqkeep=range(200, 800))
            d = remove_rfi(d, width=7, sigma=5)
            d = d.convert_power_to_temperature(min_duration=3,
                                               jump_significance=4.0)
            d = d.select(flagkeep='~nd_on')
            d = d.select(labelkeep='scan', copy=False)
            # Average all frequency channels into one band
            d.average()

        # Extract azimuth and elevation angle from (azel) target associated with scan, in degrees
        azimuth, elevation, temp = [], [], []
        for s in d.scans:
            azimuth.extend(rad2deg(s.pointing['az']))
            elevation.extend(rad2deg(s.pointing['el']))
            temp.extend(tuple(np.sqrt(s.pol('HH')[:, 0] * s.pol('VV')[:, 0])))
        assert len(azimuth) == len(elevation) == len(temp), "sizes don't match"

        data = (azimuth, elevation, temp)
        np.array(azimuth) < -89
        print "Gridding the data"
        print "data shape = ", np.shape(data[0] + (
            np.array(azimuth)[np.array(azimuth) < -89] + 360.0).tolist())
        print np.shape(data[1] +
                       np.array(elevation)[np.array(azimuth) < -89].tolist())
        print np.shape(data[2] +
                       np.array(temp)[np.array(azimuth) < -89].tolist())
        gridtemp.append(
            mlab.griddata(
                data[0] +
                (np.array(azimuth)[np.array(azimuth) < -89] + 360.0).tolist(),
                data[1] +
                np.array(elevation)[np.array(azimuth) < -89].tolist(),
                data[2] + np.array(temp)[np.array(azimuth) < -89].tolist(),
                np.arange(-90, 271, 1), np.arange(4, 16, 0.1)))
        # The +361 is to ensure that the point are well spaced,
        #this offset is not a problem as it is just for sorting out a boundery condition
        print "Completed Gridding the data"

    print "Making the mask"
    mask = gridtemp[0] >= opts.temp_limit
    for grid in gridtemp:
        mask = mask * (grid >= opts.temp_limit)
    maskr = np.zeros((len(np.arange(-90, 271, 1)), 2))
    for i, az in enumerate(np.arange(-90, 271, 1)):
        print 'at az %f' % (az, )
        maskr[i] = az, np.max(elevation)
        for j, el in enumerate(np.arange(4, 16, 0.1)):
            if ~mask.data[j, i] and ~mask.mask[j, i]:
                maskr[i] = az, el
                break
    np.savetxt('horizon_mask_%s.dat' % (opts.baseline), maskr[1:, :])
Esempio n. 10
0
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt

import scape
import katpoint

# Load temporary noise diode models
a1h = np.loadtxt('noise_diode_models/T_nd_A1H_coupler.txt', delimiter=',')
a1v = np.loadtxt('noise_diode_models/T_nd_A1V_coupler.txt', delimiter=',')
a2h = np.loadtxt('noise_diode_models/T_nd_A2H_coupler.txt', delimiter=',')
a2v = np.loadtxt('noise_diode_models/T_nd_A2V_coupler.txt', delimiter=',')

# Load data set and do standard continuum reduction
d = scape.DataSet('1268855687.h5', baseline='A1A1')
d.nd_model = scape.gaincal.NoiseDiodeModel(a1h, a1v, std_temp=0.04)
d = d.select(freqkeep=range(95, 380))
d.convert_power_to_temperature()
d = d.select(labelkeep='scan', copy=False)
d.average()

# Edit out some RFI
d.scans = d.compscans[0].scans
d.scans[37] = d.scans[37].select(timekeep=range(76), copy=True)
d.scans[38] = d.scans[38].select(timekeep=range(12,
                                                len(d.scans[38].timestamps)),
                                 copy=True)
d.scans[72] = d.scans[72].select(timekeep=range(2,
                                                len(d.scans[72].timestamps)),
                                 copy=True)
Esempio n. 11
0
        user_logger.info("Waiting for HDF5 file '%s' to appear in archive" % (session.output_file,))
        h5file = session.get_archived_product(download_dir=os.path.abspath(os.path.curdir))
        if not os.path.isfile(h5file):
            raise RuntimeError("Could not download '%s' to %d" % (h5file, os.path.abspath(download_dir)))

if not kat.dry_run:
    cfg = kat.system

    # Obtain list of antennas and polarisations present in data set
    user_logger.info('Loading HDF5 file into scape and reducing the data')
    h5 = katfile.open(h5file)
    # Iterate through antennas
    for ant in h5.ants:
        ant_num = int(ant.name[3:])
        # Load file and do standard processing
        d = scape.DataSet(h5file, baseline='A%dA%d' % (ant_num, ant_num))
        d = d.select(freqkeep=range(start_freq_channel, end_freq_channel + 1))
        channel_freqs = d.freqs
        d.convert_power_to_temperature()
        d = d.select(labelkeep='scan', copy=False)
        d.average()
        # Only use the first compound scan for fitting beam and baseline
        compscan = d.compscans[0]
        # Calculate average target flux over entire band
        flux_spectrum = [compscan.target.flux_density(freq) for freq in channel_freqs]
        average_flux = np.mean([flux for flux in flux_spectrum if flux])
        # Fit individual polarisation beams first, to get gains and system temperatures
        gain_hh, gain_vv = None, None
        baseline_hh, baseline_vv = None, None
        if (ant.name + 'H') in h5.inputs:
            d.fit_beams_and_baselines(pol='HH', circular_beam=False)
Esempio n. 12
0
def analyse_point_source_scans(filename, opts):
    dataset_name = os.path.splitext(os.path.basename(filename))[0]
    # Default output file names are based on input file name
    if opts.outfilebase is None:
        opts.outfilebase = dataset_name + '_point_source_scans'

    # Set up logging: logging everything (DEBUG & above), both to console and file
    logger = logging.root
    logger.setLevel(logging.DEBUG)
    fh = logging.FileHandler(opts.outfilebase + '.log', 'w')
    fh.setLevel(logging.DEBUG)
    fh.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
    logger.addHandler(fh)

    # Produce canonical version of baseline string (remove duplicate antennas)
    baseline_ants = opts.baseline.split(',')
    if len(baseline_ants) == 2 and baseline_ants[0] == baseline_ants[1]:
        opts.baseline = baseline_ants[0]

    # Load old CSV file used to select compound scans from dataset
    keep_scans = keep_datasets = None
    if opts.keepfilename:
        ant_name = katpoint.Antenna(
            file(opts.keepfilename).readline().strip().partition('=')[2]).name
        try:
            data = np.loadtxt(opts.keepfilename,
                              dtype='string',
                              comments='#',
                              delimiter=', ')
        except ValueError:
            raise ValueError(
                "CSV file '%s' contains rows with a different number of columns/commas"
                % opts.keepfilename)
        try:
            fields = data[0].tolist()
            id_fields = [
                fields.index('dataset'),
                fields.index('target'),
                fields.index('timestamp_ut')
            ]
        except (IndexError, ValueError):
            raise ValueError("CSV file '%s' do not have the expected columns" %
                             opts.keepfilename)
        keep_scans = set(
            [ant_name + ' ' + ' '.join(line) for line in data[1:, id_fields]])
        keep_datasets = set(data[1:, id_fields[0]])
        # Switch to batch mode if CSV file is given
        opts.batch = True
        logger.debug(
            "Loaded CSV file '%s' containing %d dataset(s) and %d compscan(s) for antenna '%s'"
            %
            (opts.keepfilename, len(keep_datasets), len(keep_scans), ant_name))
        # Ensure we are using antenna found in CSV file (this assumes single dish setup for now)
        csv_baseline = ant_name
        if opts.baseline != 'sd' and opts.baseline != csv_baseline:
            logger.warn(
                "Requested baseline '%s' does not match baseline '%s' in CSV file '%s'"
                % (opts.baseline, csv_baseline, opts.keepfilename))
        logger.warn("Using baseline '%s' found in CSV file '%s'" %
                    (csv_baseline, opts.keepfilename))
        opts.baseline = csv_baseline

    # Avoid loading the data set if it does not appear in specified CSV file
    if keep_datasets and dataset_name not in keep_datasets:
        raise RuntimeError("Skipping dataset '%s' (based on CSV file)" %
                           (filename, ))

    # Load data set
    logger.info("Loading dataset '%s'" % (filename, ))
    dataset = scape.DataSet(filename,
                            baseline=opts.baseline,
                            nd_models=opts.nd_models,
                            time_offset=opts.time_offset,
                            katfile=not opts.old_loader)

    # Select frequency channels and setup defaults if not specified
    num_channels = len(dataset.channel_select)
    if opts.freq_chans is None:
        # Default is drop first and last 25% of the bandpass
        start_chan = num_channels // 4
        end_chan = start_chan * 3
    else:
        start_chan = int(opts.freq_chans.split(',')[0])
        end_chan = int(opts.freq_chans.split(',')[1])
    chan_range = range(start_chan, end_chan + 1)
    dataset = dataset.select(freqkeep=chan_range)

    # Check scan count
    if len(dataset.compscans) == 0 or len(dataset.scans) == 0:
        raise RuntimeError('No scans found in file, skipping data set')
    scan_dataset = dataset.select(labelkeep='scan', copy=False)
    if len(scan_dataset.compscans) == 0 or len(scan_dataset.scans) == 0:
        raise RuntimeError(
            'No scans left after standard reduction, skipping data set (no scans labelled "scan", perhaps?)'
        )
    # Override pointing model if it is specified (useful if it is not in data file, like on early KAT-7)
    if opts.pointing_model:
        pm = file(opts.pointing_model).readline().strip()
        logger.debug("Loaded %d-parameter pointing model from '%s'" %
                     (len(pm.split(',')), opts.pointing_model))
        dataset.antenna.pointing_model = katpoint.PointingModel(pm,
                                                                strict=False)

    # Initialise the output data cache (None indicates the compscan has not been processed yet)
    reduced_data = [{} for n in range(len(scan_dataset.compscans))]

    ### BATCH MODE ###

    # This will cycle through all data sets and stop when done
    if opts.batch:
        # Go one past the end of compscan list to write the output data out to CSV file
        for current_compscan in range(len(scan_dataset.compscans) + 1):
            # Look up compscan key in list of compscans to keep (if provided, only applicable to batch mode anyway)
            if keep_scans and (current_compscan < len(scan_dataset.compscans)):
                cs_key = ' '.join(
                    compscan_key(scan_dataset.compscans[current_compscan]))
                if cs_key not in keep_scans:
                    logger.info(
                        "==== Skipping compound scan '%s' (based on CSV file) ===="
                        % (cs_key, ))
                    continue
            output = reduce_and_plot(dataset,
                                     current_compscan,
                                     reduced_data,
                                     opts,
                                     logger=logger)
        return output

    ### INTERACTIVE MODE ###
    else:
        if not plt:
            raise ImportError(
                'Interactive use of this script requires matplotlib - please install it or run in batch mode'
            )
        # Set up figure with buttons
        plt.ion()
        fig = plt.figure(1)
        plt.clf()
        if opts.plot_spectrum:
            plt.subplot(311)
            plt.subplot(312)
            plt.subplot(313)
        else:
            plt.subplot(211)
            plt.subplot(212)
        plt.subplots_adjust(bottom=0.2, hspace=0.25)
        plt.figtext(0.05, 0.05, '', va='bottom', ha='left')
        plt.figtext(0.05, 0.945, '', va='bottom', ha='left')

        # Make button context manager that disables buttons during processing and re-enables it afterwards
        class DisableButtons(object):
            def __init__(self):
                """Start with empty button list."""
                self.buttons = []

            def append(self, button):
                """Add button to list."""
                self.buttons.append(button)

            def __enter__(self):
                """Disable buttons on entry."""
                if plt.fignum_exists(1):
                    for button in self.buttons:
                        button.eventson = False
                        button.hovercolor = '0.85'
                        button.label.set_color('gray')
                    plt.draw()

            def __exit__(self, exc_type, exc_value, traceback):
                """Re-enable buttons on exit."""
                if plt.fignum_exists(1):
                    for button in self.buttons:
                        button.eventson = True
                        button.hovercolor = '0.95'
                        button.label.set_color('k')
                    plt.draw()

        all_buttons = DisableButtons()

        # Create buttons and their callbacks
        spectrogram_button = widgets.Button(plt.axes([0.37, 0.05, 0.1, 0.075]),
                                            'Spectrogram')

        def spectrogram_callback(event):
            with all_buttons:
                plt.figure(2)
                plt.clf()
                out = reduced_data[fig.current_compscan]
                ax = scape.plot_xyz(out['unavg_dataset'],
                                    'time',
                                    'freq',
                                    'amp',
                                    power_in_dB=True)
                ax.set_title(out['target'], size='medium')

        spectrogram_button.on_clicked(spectrogram_callback)
        all_buttons.append(spectrogram_button)

        keep_button = widgets.Button(plt.axes([0.48, 0.05, 0.1, 0.075]),
                                     'Keep')

        def keep_callback(event):
            with all_buttons:
                reduced_data[fig.current_compscan]['keep'] = True
                fig.current_compscan += 1
                reduce_and_plot(dataset,
                                fig.current_compscan,
                                reduced_data,
                                opts,
                                fig,
                                logger=logger)

        keep_button.on_clicked(keep_callback)
        all_buttons.append(keep_button)

        discard_button = widgets.Button(plt.axes([0.59, 0.05, 0.1, 0.075]),
                                        'Discard')

        def discard_callback(event):
            with all_buttons:
                reduced_data[fig.current_compscan]['keep'] = False
                fig.current_compscan += 1
                reduce_and_plot(dataset,
                                fig.current_compscan,
                                reduced_data,
                                opts,
                                fig,
                                logger=logger)

        discard_button.on_clicked(discard_callback)
        all_buttons.append(discard_button)

        back_button = widgets.Button(plt.axes([0.7, 0.05, 0.1, 0.075]), 'Back')

        def back_callback(event):
            with all_buttons:
                if fig.current_compscan > 0:
                    fig.current_compscan -= 1
                    reduce_and_plot(dataset,
                                    fig.current_compscan,
                                    reduced_data,
                                    opts,
                                    fig,
                                    logger=logger)

        back_button.on_clicked(back_callback)
        all_buttons.append(back_button)

        done_button = widgets.Button(plt.axes([0.81, 0.05, 0.1, 0.075]),
                                     'Done')

        def done_callback(event):
            with all_buttons:
                fig.current_compscan = len(reduced_data)
                reduce_and_plot(dataset,
                                fig.current_compscan,
                                reduced_data,
                                opts,
                                fig,
                                logger=logger)

        done_button.on_clicked(done_callback)
        all_buttons.append(done_button)

        # Start off the processing on the first compound scan
        fig.current_compscan = 0
        reduce_and_plot(dataset,
                        fig.current_compscan,
                        reduced_data,
                        opts,
                        fig,
                        logger=logger)
        # Display plots - this should be called ONLY ONCE, at the VERY END of the script
        # The script stops here until you close the plots...
        plt.show()
# Only import matplotlib if not in batch mode
if not opts.batch:
    import matplotlib.pyplot as plt
    import matplotlib.widgets as widgets

# Avoid loading the data set if it does not appear in specified CSV file
if keep_datasets and dataset_name not in keep_datasets:
    raise RuntimeError("Skipping dataset '%s' (based on CSV file)" %
                       (filename, ))

# Load data set
logger.info("Loading dataset '%s'" % (filename, ))
dataset = scape.DataSet(filename,
                        baseline=opts.baseline,
                        nd_models=opts.nd_models,
                        time_offset=opts.time_offset,
                        katfile=not opts.old_loader)

# Select frequency channels and setup defaults if not specified
num_channels = len(dataset.channel_select)
if opts.freq_chans is None:
    # Default is drop first and last 25% of the bandpass
    start_chan = num_channels // 4
    end_chan = start_chan * 3
else:
    start_chan = int(opts.freq_chans.split(',')[0])
    end_chan = int(opts.freq_chans.split(',')[1])
chan_range = range(start_chan, end_chan + 1)
dataset = dataset.select(freqkeep=chan_range)
def analyse_point_source_scans(filename, h5file, opts):
    # Default output file names are based on input file name
    dataset_name = os.path.splitext(os.path.basename(filename))[0]
    if opts.outfilebase is None:
        opts.outfilebase = dataset_name + '_point_source_scans'

    kwargs = {}

    #Force centre freqency if ku-band option is set
    if opts.ku_band:
        kwargs['centre_freq'] = 12.5005e9

    # Produce canonical version of baseline string (remove duplicate antennas)
    baseline_ants = opts.baseline.split(',')
    if len(baseline_ants) == 2 and baseline_ants[0] == baseline_ants[1]:
        opts.baseline = baseline_ants[0]

    # Load data set
    if opts.baseline not in [ant.name for ant in h5file.ants]:
        raise RuntimeError('Cannot find antenna %s in dataset' % opts.baseline)
    # dataset = scape.DataSet(h5file, baseline=opts.baseline, nd_models=opts.nd_models,
    #                         time_offset=opts.time_offset, **kwargs)
    dataset = scape.DataSet(filename,
                            baseline=opts.baseline,
                            nd_models=opts.nd_models,
                            time_offset=opts.time_offset,
                            **kwargs)

    # Select frequency channels and setup defaults if not specified
    num_channels = len(dataset.channel_select)
    if opts.freq_chans is None:
        # Default is drop first and last 25% of the bandpass
        start_chan = num_channels // 4
        end_chan = start_chan * 3
    else:
        start_chan = int(opts.freq_chans.split(',')[0])
        end_chan = int(opts.freq_chans.split(',')[1])
    chan_select = list(range(start_chan, end_chan + 1))

    # Check if a channel mask is specified and apply
    if opts.channel_mask:
        mask_file = open(opts.channel_mask, mode='rb')
        chan_select = ~(pickle.load(mask_file))
        mask_file.close()
        if len(chan_select) != num_channels:
            raise ValueError(
                'Number of channels in provided mask does not match number of channels in data'
            )
        chan_select[:start_chan] = False
        chan_select[end_chan:] = False
    dataset = dataset.select(freqkeep=chan_select)

    # Check scan count
    if len(dataset.compscans) == 0 or len(dataset.scans) == 0:
        raise RuntimeError('No scans found in file, skipping data set')
    scan_dataset = dataset.select(labelkeep='scan', copy=False)
    if len(scan_dataset.compscans) == 0 or len(scan_dataset.scans) == 0:
        raise RuntimeError(
            'No scans left after standard reduction, skipping data set (no scans labelled "scan", perhaps?)'
        )

    # Override pointing model if it is specified (useful if it is not in data file, like on early KAT-7)
    if opts.pointing_model:
        if opts.pointing_model.split('/')[-2] == 'mkat':
            if opts.ku_band: band = 'ku'
            else: band = 'l'
            pt_file = os.path.join(opts.pointing_model,
                                   '%s.%s.pm.csv' % (opts.baseline, band))
        else:
            pt_file = os.path.join(opts.pointing_model,
                                   '%s.pm.csv' % (opts.baseline))
        if not os.path.isfile(pt_file):
            raise RuntimeError('Cannot find file %s' % (pt_file))
        pm = file(pt_file).readline().strip()
        dataset.antenna.pointing_model = katpoint.PointingModel(pm)

    # Remove any noise diode models if the ku band option is set and flag for spikes
    if opts.ku_band:
        dataset.nd_h_model = None
        dataset.nd_v_model = None
        for i in range(len(dataset.scans)):
            dataset.scans[i].data = scape.stats.remove_spikes(
                dataset.scans[i].data, axis=1, spike_width=3, outlier_sigma=5.)

    # Initialise the output data cache (None indicates the compscan has not been processed yet)
    reduced_data = [{} for n in range(len(scan_dataset.compscans))]

    # Go one past the end of compscan list to write the output data out to CSV file
    for current_compscan in range(len(scan_dataset.compscans) + 1):
        # make things play nice
        opts.batch = True
        try:
            the_compscan = scan_dataset.compscans[current_compscan]
        except:
            the_compscan = None
        fig = plt.figure(1, figsize=(8, 8))
        plt.clf()
        if opts.plot_spectrum:
            plt.subplot(311)
            plt.subplot(312)
            plt.subplot(313)
        else:
            plt.subplot(211)
            plt.subplot(212)
        plt.subplots_adjust(bottom=0.2, hspace=0.25)
        plt.figtext(0.05, 0.05, '', va='bottom', ha='left')
        plt.figtext(0.05, 0.945, '', va='bottom', ha='left')
        # Start off the processing on the first compound scan
        logger = logging.root
        fig.current_compscan = 0
        reduce_and_plot(dataset,
                        fig.current_compscan,
                        reduced_data,
                        opts,
                        fig,
                        logger=logger)

    # Initialise the output data cache (None indicates the compscan has not been processed yet)
    reduced_data = [{} for n in range(len(scan_dataset.compscans))]
    # Go one past the end of compscan list to write the output data out to CSV file
    for current_compscan in range(len(scan_dataset.compscans) + 1):
        # make things play nice
        opts.batch = True
        try:
            the_compscan = scan_dataset.compscans[current_compscan]
        except:
            the_compscan = None
        logger = logging.root
        output = local_reduce_and_plot(dataset,
                                       current_compscan,
                                       reduced_data,
                                       opts,
                                       logger=logger)
    offsetdata = output[1]
    from katpoint import deg2rad

    def angle_wrap(angle, period=2.0 * np.pi):
        """wrap angle into the interval -*period* / 2 ... *period* / 2."""
        return (angle + 0.5 * period) % period - 0.5 * period

    az, el = angle_wrap(deg2rad(offsetdata['azimuth'])), deg2rad(
        offsetdata['elevation'])
    model_delta_az, model_delta_el = ant.pointing_model.offset(az, el)
    measured_delta_az = offsetdata[
        'delta_azimuth'] - model_delta_az  # pointing model correction
    measured_delta_el = offsetdata[
        'delta_elevation'] - model_delta_el  # pointing model correction
    """determine new residuals from current pointing model"""
    residual_az = measured_delta_az - model_delta_az
    residual_el = measured_delta_el - model_delta_el
    residual_xel = residual_az * np.cos(el)
    # Initialise new pointing model and set default enabled parameters
    keep = np.ones((len(offsetdata)), dtype=np.bool)
    min_rms = np.sqrt(2) * 60. * 1e-12
    use_stats = True
    new_model = katpoint.PointingModel()
    num_params = len(new_model)
    default_enabled = np.array([1, 3, 4, 5, 6, 7]) - 1
    enabled_params = np.tile(False, num_params)
    enabled_params[default_enabled] = True
    enabled_params = enabled_params.tolist()
    # Fit new pointing model
    az, el = angle_wrap(deg2rad(offsetdata['azimuth'])), deg2rad(
        offsetdata['elevation'])
    measured_delta_az, measured_delta_el = deg2rad(
        offsetdata['delta_azimuth']), deg2rad(offsetdata['delta_elevation'])
    # Uncertainties are optional
    min_std = deg2rad(min_rms / 60. / np.sqrt(2))
    std_delta_az = np.clip(deg2rad(offsetdata['delta_azimuth_std']), min_std, np.inf) \
    if 'delta_azimuth_std' in offsetdata.dtype.fields and use_stats else np.tile(min_std, len(az))
    std_delta_el = np.clip(deg2rad(offsetdata['delta_elevation_std']), min_std, np.inf) \
    if 'delta_elevation_std' in offsetdata.dtype.fields and use_stats else np.tile(min_std, len(el))

    params, sigma_params = new_model.fit(az[keep], el[keep],
                                         measured_delta_az[keep],
                                         measured_delta_el[keep],
                                         std_delta_az[keep],
                                         std_delta_el[keep], enabled_params)
    """Determine new residuals from new fit"""
    newmodel_delta_az, newmodel_delta_el = new_model.offset(az, el)
    residual_az = measured_delta_az - newmodel_delta_az
    residual_el = measured_delta_el - newmodel_delta_el
    residual_xel = residual_az * np.cos(el)

    # Show actual scans
    h5file.select(scans='scan')
    fig1 = plt.figure(2, figsize=(8, 8))
    plt.scatter(h5file.ra,
                h5file.dec,
                s=np.mean(np.abs(h5file.vis[:, 2200:2400, 1]), axis=1))
    plt.title('Raster scan over target')
    plt.ylabel('Dec [deg]')
    plt.xlabel('Ra [deg]')

    # Try to fit beam
    for c in h5file.compscans():
        if not dataset is None:
            dataset = dataset.select(flagkeep='~nd_on')
        dataset.average()
        dataset.fit_beams_and_baselines()

    # Generate output report
    with PdfPages(opts.outfilebase + '_' + opts.baseline + '.pdf') as pdf:
        out = reduced_data[0]
        offset_az, offset_el = "%.1f" % (
            60. * out['delta_azimuth'], ), "%.1f" % (60. *
                                                     out['delta_elevation'], )
        beam_width, beam_height = "%.1f" % (
            60. * out['beam_width_I'], ), "%.2f" % (out['beam_height_I'], )
        baseline_height = "%.1f" % (out['baseline_height_I'], )
        pagetext = "\nCheck Point Source Scan"
        pagetext += "\n\nDescription: %s\nName: %s\nExperiment ID: %s" % (
            h5file.description, h5file.name, h5file.experiment_id)
        pagetext = pagetext + "\n"
        pagetext += "\n\nTest Setup:"
        pagetext += "\nRaster Scan across bright source"
        pagetext += "\n\nAntenna %(antenna)s" % out
        pagetext += "\n------------"
        pagetext += ("\nTarget = '%(target)s', azel=(%(azimuth).1f, %(elevation).1f) deg, " % out) +\
                    ("offset=(%s, %s) arcmin" % (offset_az, offset_el))
        pagetext += ("\nBeam height = %s %s") % (beam_height, out['data_unit'])
        pagetext += ("\nBeamwidth = %s' (expected %.1f')") % (
            beam_width, 60. * out['beam_expected_width_I'])
        pagetext += ("\nHH gain = %.3f Jy/%s") % (
            out['flux'] / out['beam_height_HH'], out['data_unit'])
        pagetext += ("\nVV gain = %.3f Jy/%s") % (
            out['flux'] / out['beam_height_VV'], out['data_unit'])
        pagetext += ("\nBaseline height = %s %s") % (baseline_height,
                                                     out['data_unit'])
        pagetext = pagetext + "\n"
        pagetext += ("\nCurrent model AzEl=(%.3f, %.3f) deg" %
                     (model_delta_az[0], model_delta_el[0]))
        pagetext += ("\nMeasured coordinates using rough fit")
        pagetext += ("\nMeasured AzEl=(%.3f, %.3f) deg" %
                     (measured_delta_az[0], measured_delta_el[0]))
        pagetext = pagetext + "\n"
        pagetext += ("\nDetermine residuals from current pointing model")
        residual_az = measured_delta_az - model_delta_az
        residual_el = measured_delta_el - model_delta_el
        pagetext += ("\nResidual AzEl=(%.3f, %.3f) deg" %
                     (residual_az[0], residual_el[0]))
        if dataset.compscans[0].beam is not None:
            if not dataset.compscans[0].beam.is_valid:
                pagetext += ("\nPossible bad fit!")
        if (residual_az[0] < 1.) and (residual_el[0] < 1.):
            pagetext += ("\nResiduals withing L-band beam")
        else:
            pagetext += ("\nMaximum Residual, %.2f, larger than L-band beam" %
                         (numpy.max(residual_az[0], residual_el[0])))
        pagetext = pagetext + "\n"
        pagetext += ("\nFitted parameters \n%s" % str(params[:5]))

        plt.figure(None, figsize=(16, 8))
        plt.axes(frame_on=False)
        plt.xticks([])
        plt.yticks([])
        plt.title("AR1 Report %s" % opts.outfilebase,
                  fontsize=14,
                  fontweight="bold")
        plt.text(0, 0, pagetext, fontsize=12)
        pdf.savefig()
        plt.close()
        pdf.savefig(fig)
        pdf.savefig(fig1)

        d = pdf.infodict()
        import datetime
        d['Title'] = h5file.description
        d['Author'] = 'AR1'
        d['Subject'] = 'AR1 check point source scan'
        d['CreationDate'] = datetime.datetime(2015, 8, 13)
        d['ModDate'] = datetime.datetime.today()
Esempio n. 15
0
gain_hh = np.array(())
gain_vv = np.array(())
timestamps = np.array(())
filename = args[0]
nice_filename = filename.split('/')[-1] + '_' + opts.ant + '_gain_stability'
pp = PdfPages(nice_filename + '.pdf')

for filename in args:
    h5 = katdal.open(filename)
    if opts.ant == '':
        ant = h5.ants[0].name
    else:
        ant = opts.ant
    #h5.select(ants=ant)
    d = scape.DataSet(filename, baseline="%s,%s" % (ant, ant))
    d = d.select(freqkeep=range(start_freq_channel, end_freq_channel + 1))
    d = remove_rfi(d, width=21, sigma=5)  # rfi flaging
    #Leave the d dataset unchanged after this so that it can be examined interactively if necessary
    antenna = d.antenna
    d_uncal = d.select(copy=True)
    d_uncal.average()
    d_cal = d.select(copy=True)
    print("do selects")
    #extract timestamps from data
    timestampfile = np.hstack([scan.timestamps for scan in d_cal.scans])
    #get a user-friendly time axis that will plot in the same way as plot_xyz
    time = scape.extract_scan_data(d_cal.scans, 'time')
    tmin = np.min(np.hstack(time.data))
    tmax = np.max(np.hstack(time.data))
    #Get the gain from the noise diodes
Esempio n. 16
0
                  help="Baseline to calibrate (e.g. 'A1A2'), default is first interferometric baseline in file")
parser.add_option('-p', '--pol', metavar='POL', default='HH',
                  help="Polarisation term to use ('HH' or 'VV'), default is %default")
parser.add_option('-s', '--max-sigma', type='float', default=0.05,
                  help="Threshold on std deviation of normalised group delay, default is %default")
(opts, args) = parser.parse_args()

if len(args) < 1:
    print 'Please specify the data file to reduce'
    sys.exit(1)

# Load data sets
datasets, scans = [], []
for filename in args:
    print 'Loading baseline', opts.baseline, 'from data file', filename
    d = scape.DataSet(filename, baseline=opts.baseline)
    # Discard 'slew' scans and channels outside the Fringe Finder band
    d = d.select(labelkeep='scan', freqkeep=range(100, 420), copy=True)
    channel_freqs, channel_bw = d.freqs, d.bandwidths[0]
    antenna, antenna2 = d.antenna, d.antenna2
    datasets.append(d)
    scans.extend(d.scans)
time_origin = np.min([scan.timestamps.min() for scan in scans])
# Since the phase slope is sampled, the derived delay exhibits aliasing with a period of 1 / channel_bandwidth
# The maximum delay that can be reliably represented is therefore +- 0.5 / channel_bandwidth
max_delay = 1e-6 / channel_bw / 2
# Maximum standard deviation of delay occurs when delay samples are uniformly distributed between +- max_delay
# In this case, delay is completely random / unknown, and its estimate cannot be trusted
# The division by sqrt(N-1) converts the per-channel standard deviation to a per-snapshot deviation
max_sigma_delay = np.abs(2 * max_delay) / np.sqrt(12) / np.sqrt(len(channel_freqs) - 1)