Example #1
0
    def is_daytime(self, key, csd):

        src = self[key] if isinstance(key, basestring) else key

        is_daytime = 0

        src_ra, src_dec = ephemeris.object_coords(src.body, date=ephemeris.csd_to_unix(csd), deg=True)

        transit_start = ephemeris.csd_to_unix(csd + (src_ra - src.window) / 360.0)
        transit_end = ephemeris.csd_to_unix(csd + (src_ra + src.window) / 360.0)

        solar_rise = ephemeris.solar_rising(transit_start - 24.0*3600.0, end_time=transit_end)

        for rr in solar_rise:

            ss = ephemeris.solar_setting(rr)[0]

            rrex = rr + self._extend_night
            ssex = ss - self._extend_night

            if ((transit_start <= ssex) and (rrex <= transit_end)):

                is_daytime += 1

                tt = ephemeris.solar_transit(rr)[0]
                if (transit_start <= tt) and (tt <= transit_end):
                    is_daytime += 1

                break

        return is_daytime
Example #2
0
def daytime_flag(time):

    flag = np.zeros(time.size, dtype=np.bool)
    rise = ephemeris.solar_rising(time[0] - 24.0 * 3600.0, end_time=time[-1])
    for rr in rise:
        ss = ephemeris.solar_setting(rr)[0]
        flag |= ((time >= rr) & (time <= ss))

    return flag
Example #3
0
def main(config_file=None, logging_params=DEFAULT_LOGGING):

    # Setup logging
    log.setup_logging(logging_params)
    mlog = log.get_logger(__name__)

    # Set config
    config = DEFAULTS.deepcopy()
    if config_file is not None:
        config.merge(NameSpace(load_yaml_config(config_file)))

    # Set niceness
    current_niceness = os.nice(0)
    os.nice(config.niceness - current_niceness)
    mlog.info('Changing process niceness from %d to %d.  Confirm:  %d' %
              (current_niceness, config.niceness, os.nice(0)))

    # Find acquisition files
    acq_files = sorted(glob(os.path.join(config.data_dir, config.acq, "*.h5")))
    nfiles = len(acq_files)

    # Determine time range of each file
    findex = []
    tindex = []
    for ii, filename in enumerate(acq_files):
        subdata = andata.CorrData.from_acq_h5(filename, datasets=())

        findex += [ii] * subdata.ntime
        tindex += range(subdata.ntime)

    findex = np.array(findex)
    tindex = np.array(tindex)

    # Determine transits within these files
    transits = []

    data = andata.CorrData.from_acq_h5(acq_files, datasets=())

    solar_rise = ephemeris.solar_rising(data.time[0] - 24.0 * 3600.0,
                                        end_time=data.time[-1])

    for rr in solar_rise:

        ss = ephemeris.solar_setting(rr)[0]

        solar_flag = np.flatnonzero((data.time >= rr) & (data.time <= ss))

        if solar_flag.size > 0:

            solar_flag = solar_flag[::config.downsample]

            tval = data.time[solar_flag]

            this_findex = findex[solar_flag]
            this_tindex = tindex[solar_flag]

            file_list, tindices = [], []

            for ii in range(nfiles):

                this_file = np.flatnonzero(this_findex == ii)

                if this_file.size > 0:

                    file_list.append(acq_files[ii])
                    tindices.append(this_tindex[this_file])

            date = ephemeris.unix_to_datetime(rr).strftime('%Y%m%dT%H%M%SZ')
            transits.append((date, tval, file_list, tindices))

    # Specify some parameters for algorithm
    N = 2048

    noffset = len(config.offsets)

    if config.sep_pol:
        rank = 1
        cross_pol = False
        pol = np.array(['S', 'E'])
        pol_s = np.array(
            [rr + 256 * xx for xx in range(0, 8, 2) for rr in range(256)])
        pol_e = np.array(
            [rr + 256 * xx for xx in range(1, 8, 2) for rr in range(256)])
        prod_ss = []
        prod_ee = []
    else:
        rank = 8
        cross_pol = config.cross_pol
        pol = np.array(['all'])

    npol = pol.size

    # Create file prefix and suffix
    prefix = []

    prefix.append("gain_solutions")

    if config.output_prefix is not None:
        prefix.append(config.output_prefix)

    prefix = '_'.join(prefix)

    suffix = []

    suffix.append("pol_%s" % '_'.join(pol))

    suffix.append("niter_%d" % config.niter)

    if cross_pol:
        suffix.append("zerocross")
    else:
        suffix.append("keepcross")

    if config.normalize:
        suffix.append("normed")
    else:
        suffix.append("notnormed")

    suffix = '_'.join(suffix)

    # Loop over solar transits
    for date, timestamps, files, time_indices in transits:

        nfiles = len(files)

        mlog.info("%s (%d files) " % (date, nfiles))

        output_file = os.path.join(
            config.output_dir, "%s_SUN_%s_%s.pickle" % (prefix, date, suffix))

        mlog.info("Saving to:  %s" % output_file)

        # Get info about this set of files
        data = andata.CorrData.from_acq_h5(files, datasets=['flags/inputs'])

        prod = data.prod

        coord = sun_coord(timestamps, deg=True)

        fstart = config.freq_start if config.freq_start is not None else 0
        fstop = config.freq_stop if config.freq_stop is not None else data.freq.size
        freq_index = range(fstart, fstop)

        freq = data.freq[freq_index]

        ntime = timestamps.size
        nfreq = freq.size

        # Determind bad inputs
        if config.bad_input_file is None or not os.path.isfile(
                config.bad_input_file):
            bad_input = np.flatnonzero(
                ~np.all(data.flags['inputs'][:], axis=-1))
        else:
            with open(config.bad_input_file, 'r') as handler:
                bad_input = pickle.load(handler)

        mlog.info("%d inputs flagged as bad." % bad_input.size)
        bad_prod = np.array([
            ii for ii, pp in enumerate(prod)
            if (pp[0] in bad_input) or (pp[1] in bad_input)
        ])

        # Create arrays to hold the results
        ores = {}
        ores['date'] = date
        ores['coord'] = coord
        ores['time'] = timestamps
        ores['freq'] = freq
        ores['offsets'] = config.offsets
        ores['pol'] = pol

        ores['evalue'] = np.zeros((noffset, nfreq, ntime, N), dtype=np.float32)
        ores['resp'] = np.zeros((noffset, nfreq, ntime, N, config.neigen),
                                dtype=np.complex64)
        ores['resp_err'] = np.zeros((noffset, nfreq, ntime, N, config.neigen),
                                    dtype=np.float32)

        # Loop over frequencies
        for ff, find in enumerate(freq_index):

            mlog.info("Freq %d of %d.  %0.2f MHz." % (ff + 1, nfreq, freq[ff]))

            cnt = 0

            # Loop over files
            for ii, (filename, tind) in enumerate(zip(files, time_indices)):

                ntind = len(tind)
                mlog.info("Processing file %s (%d time samples)" %
                          (filename, ntind))

                # Loop over times
                for tt in tind:

                    t0 = time.time()

                    mlog.info("Time %d of %d.  %d index of current file." %
                              (cnt + 1, ntime, tt))

                    # Load visibilities
                    with h5py.File(filename, 'r') as hf:

                        vis = hf['vis'][find, :, tt]

                    # Set bad products equal to zero
                    vis[bad_prod] = 0.0

                    # Different code if we are separating polarisations
                    if config.sep_pol:

                        if not any(prod_ss):

                            for pind, pp in enumerate(prod):
                                if (pp[0] in pol_s) and (pp[1] in pol_s):
                                    prod_ss.append(pind)

                                elif (pp[0] in pol_e) and (pp[1] in pol_e):
                                    prod_ee.append(pind)

                            prod_ss = np.array(prod_ss)
                            prod_ee = np.array(prod_ee)

                            mlog.info("Product sizes: %d, %d" %
                                      (prod_ss.size, prod_ee.size))

                        # Loop over polarisations
                        for pp, (input_pol,
                                 prod_pol) in enumerate([(pol_s, prod_ss),
                                                         (pol_e, prod_ee)]):

                            visp = vis[prod_pol]

                            mlog.info("pol %s, visibility size:  %d" %
                                      (pol[pp], visp.size))

                            # Loop over offsets
                            for oo, off in enumerate(config.offsets):

                                mlog.info(
                                    "pol %s, rank %d, niter %d, offset %d, cross_pol %s, neigen %d"
                                    % (pol[pp], rank, config.niter, off,
                                       cross_pol, config.neigen))

                                ev, rr, rre = solve_gain(
                                    visp,
                                    cutoff=off,
                                    cross_pol=cross_pol,
                                    normalize=config.normalize,
                                    rank=rank,
                                    niter=config.niter,
                                    neigen=config.neigen)

                                ores['evalue'][oo, ff, cnt, input_pol] = ev
                                ores['resp'][oo, ff, cnt, input_pol, :] = rr
                                ores['resp_err'][oo, ff, cnt,
                                                 input_pol, :] = rre

                    else:

                        # Loop over offsets
                        for oo, off in enumerate(config.offsets):

                            mlog.info(
                                "rank %d, niter %d, offset %d, cross_pol %s, neigen %d"
                                % (rank, config.niter, off, cross_pol,
                                   config.neigen))

                            ev, rr, rre = solve_gain(
                                vis,
                                cutoff=off,
                                cross_pol=cross_pol,
                                normalize=config.normalize,
                                rank=rank,
                                niter=config.niter,
                                neigen=config.neigen)

                            ores['evalue'][oo, ff, cnt, :] = ev
                            ores['resp'][oo, ff, cnt, :, :] = rr
                            ores['resp_err'][oo, ff, cnt, :, :] = rre

                    # Increment time counter
                    cnt += 1

                    # Print time elapsed
                    mlog.info("Took %0.1f seconds." % (time.time() - t0, ))

        # Save to pickle file
        with open(output_file, 'w') as handle:

            pickle.dump(ores, handle)
Example #4
0
def main(config_file=None, logging_params=DEFAULT_LOGGING):

    # Setup logging
    log.setup_logging(logging_params)
    mlog = log.get_logger(__name__)

    # Set config
    config = DEFAULTS.deepcopy()
    if config_file is not None:
        config.merge(NameSpace(load_yaml_config(config_file)))

    # Set niceness
    current_niceness = os.nice(0)
    os.nice(config.niceness - current_niceness)
    mlog.info('Changing process niceness from %d to %d.  Confirm:  %d' %
              (current_niceness, config.niceness, os.nice(0)))

    # Find acquisition files
    acq_files = sorted(glob(os.path.join(config.data_dir, config.acq, "*.h5")))
    nfiles = len(acq_files)

    # Determine time range of each file
    findex = []
    tindex = []
    for ii, filename in enumerate(acq_files):
        subdata = andata.CorrData.from_acq_h5(filename, datasets=())

        findex += [ii] * subdata.ntime
        tindex += range(subdata.ntime)

    findex = np.array(findex)
    tindex = np.array(tindex)

    # Determine transits within these files
    transits = []

    data = andata.CorrData.from_acq_h5(acq_files, datasets=())

    solar_rise = ephemeris.solar_rising(data.time[0] - 24.0 * 3600.0,
                                        end_time=data.time[-1])

    for rr in solar_rise:

        ss = ephemeris.solar_setting(rr)[0]

        solar_flag = np.flatnonzero((data.time >= rr) & (data.time <= ss))

        if solar_flag.size > 0:

            solar_flag = solar_flag[::config.downsample]

            tval = data.time[solar_flag]

            this_findex = findex[solar_flag]
            this_tindex = tindex[solar_flag]

            file_list, tindices = [], []

            for ii in range(nfiles):

                this_file = np.flatnonzero(this_findex == ii)

                if this_file.size > 0:

                    file_list.append(acq_files[ii])
                    tindices.append(this_tindex[this_file])

            date = ephemeris.unix_to_datetime(rr).strftime('%Y%m%dT%H%M%SZ')
            transits.append((date, tval, file_list, tindices))

    # Create file prefix and suffix
    prefix = []

    prefix.append("redundant_calibration")

    if config.output_prefix is not None:
        prefix.append(config.output_prefix)

    prefix = '_'.join(prefix)

    suffix = []

    if config.include_auto:
        suffix.append("wauto")
    else:
        suffix.append("noauto")

    if config.include_intracyl:
        suffix.append("wintra")
    else:
        suffix.append("nointra")

    if config.fix_degen:
        suffix.append("fixed_degen")
    else:
        suffix.append("degen")

    suffix = '_'.join(suffix)

    # Loop over solar transits
    for date, timestamps, files, time_indices in transits:

        nfiles = len(files)

        mlog.info("%s (%d files) " % (date, nfiles))

        output_file = os.path.join(config.output_dir,
                                   "%s_SUN_%s_%s.h5" % (prefix, date, suffix))

        mlog.info("Saving to:  %s" % output_file)

        # Get info about this set of files
        data = andata.CorrData.from_acq_h5(files,
                                           datasets=['flags/inputs'],
                                           apply_gain=False,
                                           renormalize=False)

        coord = sun_coord(timestamps, deg=True)

        fstart = config.freq_start if config.freq_start is not None else 0
        fstop = config.freq_stop if config.freq_stop is not None else data.freq.size
        freq_index = range(fstart, fstop)

        freq = data.freq[freq_index]

        ntime = timestamps.size
        nfreq = freq.size

        # Determind bad inputs
        if config.bad_input_file is None or not os.path.isfile(
                config.bad_input_file):
            bad_input = np.flatnonzero(
                ~np.all(data.flags['inputs'][:], axis=-1))
        else:
            with open(config.bad_input_file, 'r') as handler:
                bad_input = pickle.load(handler)

        mlog.info("%d inputs flagged as bad." % bad_input.size)

        nant = data.ninput

        # Determine polarization product maps
        dbinputs = tools.get_correlator_inputs(ephemeris.unix_to_datetime(
            timestamps[0]),
                                               correlator='chime')

        dbinputs = tools.reorder_correlator_inputs(data.input, dbinputs)

        feedpos = tools.get_feed_positions(dbinputs)

        prod = defaultdict(list)
        dist = defaultdict(list)

        for pp, this_prod in enumerate(data.prod):

            aa, bb = this_prod
            inp_aa = dbinputs[aa]
            inp_bb = dbinputs[bb]

            if (aa in bad_input) or (bb in bad_input):
                continue

            if not tools.is_chime(inp_aa) or not tools.is_chime(inp_bb):
                continue

            if not config.include_intracyl and (inp_aa.cyl == inp_bb.cyl):
                continue

            if not config.include_auto and (aa == bb):
                continue

            this_dist = list(feedpos[aa, :] - feedpos[bb, :])

            if tools.is_array_x(inp_aa) and tools.is_array_x(inp_bb):
                key = 'XX'

            elif tools.is_array_y(inp_aa) and tools.is_array_y(inp_bb):
                key = 'YY'

            elif not config.include_crosspol:
                continue

            elif tools.is_array_x(inp_aa) and tools.is_array_y(inp_bb):
                key = 'XY'

            elif tools.is_array_y(inp_aa) and tools.is_array_x(inp_bb):
                key = 'YX'

            else:
                raise RuntimeError("CHIME feeds not polarized.")

            prod[key].append(pp)
            dist[key].append(this_dist)

        polstr = sorted(prod.keys())
        polcnt = 0
        pol_sky_id = []
        bmap = {}
        for key in polstr:
            prod[key] = np.array(prod[key])
            dist[key] = np.array(dist[key])

            p_bmap, p_ubaseline = generate_mapping(dist[key])
            nubase = p_ubaseline.shape[0]

            bmap[key] = p_bmap + polcnt

            if polcnt > 0:

                ubaseline = np.concatenate((ubaseline, p_ubaseline), axis=0)
                pol_sky_id += [key] * nubase

            else:

                ubaseline = p_ubaseline.copy()
                pol_sky_id = [key] * nubase

            polcnt += nubase
            mlog.info("%d unique baselines" % polcnt)

        nsky = ubaseline.shape[0]

        # Create arrays to hold the results
        ores = {}
        ores['freq'] = freq
        ores['input'] = data.input
        ores['time'] = timestamps
        ores['coord'] = coord
        ores['pol'] = np.array(pol_sky_id)
        ores['baseline'] = ubaseline

        # Create array to hold gain results
        ores['gain'] = np.zeros((nfreq, nant, ntime), dtype=np.complex)
        ores['sky'] = np.zeros((nfreq, nsky, ntime), dtype=np.complex)
        ores['err'] = np.zeros((nfreq, nant + nsky, ntime, 2), dtype=np.float)

        # Loop over polarisations
        for key in polstr:

            reverse_map = bmap[key]
            p_prod = prod[key]

            isort = np.argsort(reverse_map)

            p_prod = p_prod[isort]

            p_ant1 = data.prod['input_a'][p_prod]
            p_ant2 = data.prod['input_b'][p_prod]
            p_vismap = reverse_map[isort]

            # Find the redundant groups
            tmp = np.where(np.diff(p_vismap) != 0)[0]
            edges = np.zeros(2 + tmp.size, dtype='int')
            edges[0] = 0
            edges[1:-1] = tmp + 1
            edges[-1] = p_vismap.size

            kept_base = np.unique(p_vismap)

            # Determine the unique antennas
            kept_ants = np.unique(np.concatenate([p_ant1, p_ant2]))
            antmap = np.zeros(kept_ants.max() + 1, dtype='int') - 1

            p_nant = kept_ants.size
            for i in range(p_nant):
                antmap[kept_ants[i]] = i

            p_ant1_use = antmap[p_ant1].copy()
            p_ant2_use = antmap[p_ant2].copy()

            # Create matrix
            p_nvis = p_prod.size
            nred = edges.size - 1

            npar = p_nant + nred

            A = np.zeros((p_nvis, npar), dtype=np.float32)
            B = np.zeros((p_nvis, npar), dtype=np.float32)

            for kk in range(p_nant):

                flag_ant1 = p_ant1_use == kk
                if np.any(flag_ant1):
                    A[flag_ant1, kk] = 1.0
                    B[flag_ant1, kk] = 1.0

                flag_ant2 = p_ant2_use == kk
                if np.any(flag_ant2):
                    A[flag_ant2, kk] = 1.0
                    B[flag_ant2, kk] = -1.0

            for ee in range(nred):

                A[edges[ee]:edges[ee + 1], p_nant + ee] = 1.0

                B[edges[ee]:edges[ee + 1], p_nant + ee] = 1.0

            # Add equations to break degeneracy
            if config.fix_degen:
                A = np.concatenate((A, np.zeros((1, npar), dtype=np.float32)))
                A[-1, 0:p_nant] = 1.0

                B = np.concatenate((B, np.zeros((3, npar), dtype=np.float32)))
                B[-3, 0:p_nant] = 1.0
                B[-2, 0:p_nant] = feedpos[kept_ants, 0]
                B[-1, 0:p_nant] = feedpos[kept_ants, 1]

            # Loop over frequencies
            for ff, find in enumerate(freq_index):

                mlog.info("Freq %d of %d.  %0.2f MHz." %
                          (ff + 1, nfreq, freq[ff]))

                cnt = 0

                # Loop over files
                for ii, (filename, tind) in enumerate(zip(files,
                                                          time_indices)):

                    ntind = len(tind)
                    mlog.info("Processing file %s (%d time samples)" %
                              (filename, ntind))

                    # Compute noise weight
                    with h5py.File(filename, 'r') as hf:
                        wnoise = np.median(hf['flags/vis_weight'][find, :, :],
                                           axis=-1)

                    # Loop over times
                    for tt in tind:

                        t0 = time.time()

                        mlog.info("Time %d of %d.  %d index of current file." %
                                  (cnt + 1, ntime, tt))

                        # Load visibilities
                        with h5py.File(filename, 'r') as hf:

                            snap = hf['vis'][find, :, tt]
                            wsnap = wnoise * (
                                (hf['flags/vis_weight'][find, :, tt] > 0.0) &
                                (np.abs(snap) > 0.0)).astype(np.float32)

                        # Extract relevant products for this polarization
                        snap = snap[p_prod]
                        wsnap = wsnap[p_prod]

                        # Turn into amplitude and phase, avoiding NaN
                        mask = (wsnap > 0.0)

                        amp = np.where(mask, np.log(np.abs(snap)), 0.0)
                        phi = np.where(mask, np.angle(snap), 0.0)

                        # Deal with phase wrapping
                        for aa, bb in zip(edges[:-1], edges[1:]):
                            dphi = phi[aa:bb] - np.sort(phi[aa:bb])[int(
                                (bb - aa) / 2)]
                            phi[aa:bb] += (2.0 * np.pi * (dphi < -np.pi) -
                                           2.0 * np.pi * (dphi > np.pi))

                        # Add elements to fix degeneracy
                        if config.fix_degen:
                            amp = np.concatenate((amp, np.zeros(1)))
                            phi = np.concatenate((phi, np.zeros(3)))

                        # Determine noise matrix
                        inv_diagC = wsnap * np.abs(snap)**2 * 2.0

                        if config.fix_degen:
                            inv_diagC = np.concatenate((inv_diagC, np.ones(1)))

                        # Amplitude estimate and covariance
                        amp_param_cov = np.linalg.inv(
                            np.dot(A.T, inv_diagC[:, np.newaxis] * A))
                        amp_param = np.dot(amp_param_cov,
                                           np.dot(A.T, inv_diagC * amp))

                        # Phase estimate and covariance
                        if config.fix_degen:
                            inv_diagC = np.concatenate((inv_diagC, np.ones(2)))

                        phi_param_cov = np.linalg.inv(
                            np.dot(B.T, inv_diagC[:, np.newaxis] * B))
                        phi_param = np.dot(phi_param_cov,
                                           np.dot(B.T, inv_diagC * phi))

                        # Save to large array
                        ores['gain'][ff, kept_ants,
                                     cnt] = np.exp(amp_param[0:p_nant] +
                                                   1.0J * phi_param[0:p_nant])

                        ores['sky'][ff, kept_base,
                                    cnt] = np.exp(amp_param[p_nant:] +
                                                  1.0J * phi_param[p_nant:])

                        ores['err'][ff, kept_ants, cnt,
                                    0] = np.diag(amp_param_cov[0:p_nant,
                                                               0:p_nant])
                        ores['err'][ff, nant + kept_base, cnt,
                                    0] = np.diag(amp_param_cov[p_nant:,
                                                               p_nant:])

                        ores['err'][ff, kept_ants, cnt,
                                    1] = np.diag(phi_param_cov[0:p_nant,
                                                               0:p_nant])
                        ores['err'][ff, nant + kept_base, cnt,
                                    1] = np.diag(phi_param_cov[p_nant:,
                                                               p_nant:])

                        # Increment time counter
                        cnt += 1

                        # Print time elapsed
                        mlog.info("Took %0.1f seconds." % (time.time() - t0, ))

        # Save to pickle file
        with h5py.File(output_file, 'w') as handler:

            handler.attrs['date'] = date

            for key, val in ores.iteritems():
                handler.create_dataset(key, data=val)
Example #5
0
def _cut_daytime(visi, tmstp):
    """Returns visibilities with night time only.
    Returns an array if a single night is present.
    Returns a list of arrays if multiple nights are present.
    """

    tstp = tmstp[1] - tmstp[0]  # Get time step

    risings = ch_eph.solar_rising(tmstp[0], tmstp[-1])
    settings = ch_eph.solar_setting(tmstp[0], tmstp[-1])

    if len(risings) == 0 and len(settings) == 0:
        next_rising = ch_eph.solar_rising(tmstp[-1])
        next_setting = ch_eph.solar_setting(tmstp[-1])

        if next_setting < next_rising:
            # All data is in daylight time
            cut_vis = None
            cut_tmstp = None
        else:
            # All data is in night time
            cut_vis = np.copy(visi)
            cut_tmstp = tmstp

    elif len(settings) == 0:  # Only one rising:
        sr = risings[0]
        # Find time bin index closest to solar rising:
        idx = np.argmin(np.abs(tmstp - sr))

        # Determine time limits to cut:
        # (20 min after setting and before rising, if within range)
        cut_low = max(0,
                      idx - int(20.0 * 60.0 / tstp))  # lower limit of time cut

        # Cut daylight times:
        cut_vis = np.copy(visi[:, :cut_low])
        cut_tmstp = tmstp[:cut_low]

    elif len(risings) == 0:  # Only one setting:
        ss = settings[0]
        # Find time bin index closest to solar setting:
        idx = np.argmin(np.abs(tmstp - ss))

        # Determine time limits to cut:
        # (20 min after setting and before rising, if within range)
        cut_up = min(len(tmstp), idx +
                     int(20.0 * 60.0 / tstp))  # upper limit of time to cut

        # Cut daylight times:
        cut_vis = np.copy(visi[:, cut_up:])
        cut_tmstp = tmstp[cut_up:]

    else:
        cut_pairs = []
        if risings[0] > settings[0]:
            cut_pairs.append([tmstp[0], settings[0]])
            for ii in range(1, len(settings)):
                cut_pairs.append([risings[ii - 1], settings[ii]])
            if len(risings) == len(settings):
                cut_pairs.append([risings[-1], tmstp[-1]])
        else:
            for ii in range(len(settings)):
                cut_pairs.append([risings[ii], settings[ii]])
            if len(risings) > len(settings):
                cut_pairs.append([risings[-1], tmstp[-1]])

        cut_tmstp = []
        cut_vis = []
        tmstp_remain = tmstp
        vis_remain = np.copy(visi)

        for cp in cut_pairs:
            # Find time bin index closest to cuts:
            idx_low = np.argmin(np.abs(tmstp_remain - cp[0]))
            idx_up = np.argmin(np.abs(tmstp_remain - cp[1]))

            # Determine time limits to cut:
            # (20 min after setting and before rising, if within range)
            cut_low = max(0, idx_low -
                          int(20.0 * 60.0 / tstp))  # lower limit of time cut
            cut_up = min(len(tmstp_remain), idx_up +
                         int(20.0 * 60.0 / tstp))  # upper limit of time to cut

            if len(tmstp_remain[:cut_low]) > 0:
                cut_vis.append(vis_remain[:, :cut_low])
                cut_tmstp.append(tmstp_remain[:cut_low]
                                 )  # Append times before rising to cut_tmstp
            vis_remain = vis_remain[:, cut_up:]
            tmstp_remain = tmstp_remain[
                cut_up:]  # Use times after setting for further cuts
        if len(tmstp_remain) > 0:
            # If there is a bit of night data in the end, append it:
            cut_tmstp.append(tmstp_remain)
            cut_vis.append(vis_remain)

    return cut_vis, cut_tmstp
Example #6
0
    def process(self, sstream, inputmap, inputmask):
        """Determine calibration from a timestream.

        Parameters
        ----------
        sstream : andata.CorrData or containers.SiderealStream
            Timestream collected during the day.
        inputmap : list of :class:`CorrInput`
            A list describing the inputs as they are in the file.
        inputmask : containers.CorrInputMask
            Mask indicating which correlator inputs to use in the
            eigenvalue decomposition.

        Returns
        -------
        suntrans : containers.SunTransit
            Response to the sun.
        """

        from operator import itemgetter
        from itertools import groupby
        from .calibration import _extract_diagonal, solve_gain

        # Ensure that we are distributed over frequency
        sstream.redistribute("freq")

        # Find the local frequencies
        nfreq = sstream.vis.local_shape[0]
        sfreq = sstream.vis.local_offset[0]
        efreq = sfreq + nfreq

        # Get the local frequency axis
        freq = sstream.freq["centre"][sfreq:efreq]
        wv = 3e2 / freq

        # Get times
        if hasattr(sstream, "time"):
            time = sstream.time
            ra = ephemeris.transit_RA(time)
        else:
            ra = sstream.index_map["ra"][:]
            csd = (sstream.attrs["lsd"]
                   if "lsd" in sstream.attrs else sstream.attrs["csd"])
            csd = csd + ra / 360.0
            time = ephemeris.csd_to_unix(csd)

        # Only examine data between sunrise and sunset
        time_flag = np.zeros(len(time), dtype=np.bool)
        rise = ephemeris.solar_rising(time[0] - 24.0 * 3600.0,
                                      end_time=time[-1])
        for rr in rise:
            ss = ephemeris.solar_setting(rr)[0]
            time_flag |= (time >= rr) & (time <= ss)

        if not np.any(time_flag):
            self.log.debug(
                "No daytime data between %s and %s.",
                ephemeris.unix_to_datetime(time[0]).strftime("%b %d %H:%M"),
                ephemeris.unix_to_datetime(time[-1]).strftime("%b %d %H:%M"),
            )
            return None

        # Convert boolean flag to slices
        time_index = np.where(time_flag)[0]

        time_slice = []
        ntime = 0
        for key, group in groupby(
                enumerate(time_index),
                lambda index_item: index_item[0] - index_item[1]):
            group = list(map(itemgetter(1), group))
            ngroup = len(group)
            time_slice.append(
                (slice(group[0], group[-1] + 1), slice(ntime, ntime + ngroup)))
            ntime += ngroup

        time = np.concatenate([time[slc[0]] for slc in time_slice])
        ra = np.concatenate([ra[slc[0]] for slc in time_slice])

        # Get ra, dec, alt of sun
        sun_pos = np.array([
            ra_dec_of(ephemeris.skyfield_wrapper.ephemeris["sun"], t)
            for t in time
        ])

        # Convert from ra to hour angle
        sun_pos[:, 0] = np.radians(ra) - sun_pos[:, 0]

        # Determine good inputs
        nfeed = len(inputmap)
        good_input = np.arange(
            nfeed, dtype=np.int)[inputmask.datasets["input_mask"][:]]

        # Use input map to figure out which are the X and Y feeds
        xfeeds = np.array([
            idx for idx, inp in enumerate(inputmap)
            if tools.is_chime_x(inp) and (idx in good_input)
        ])
        yfeeds = np.array([
            idx for idx, inp in enumerate(inputmap)
            if tools.is_chime_y(inp) and (idx in good_input)
        ])

        self.log.debug(
            "Performing sun calibration with %d/%d good feeds (%d xpol, %d ypol).",
            len(good_input),
            nfeed,
            len(xfeeds),
            len(yfeeds),
        )

        # Construct baseline vector for each visibility
        feed_pos = tools.get_feed_positions(inputmap)
        vis_pos = np.array([
            feed_pos[ii] - feed_pos[ij]
            for ii, ij in sstream.index_map["prod"][:]
        ])
        vis_pos = np.where(np.isnan(vis_pos), np.zeros_like(vis_pos), vis_pos)

        u = (vis_pos[np.newaxis, :, 0] / wv[:, np.newaxis])[:, :, np.newaxis]
        v = (vis_pos[np.newaxis, :, 1] / wv[:, np.newaxis])[:, :, np.newaxis]

        # Create container to hold results of fit
        suntrans = containers.SunTransit(time=time,
                                         pol_x=xfeeds,
                                         pol_y=yfeeds,
                                         axes_from=sstream)
        for key in suntrans.datasets.keys():
            suntrans.datasets[key][:] = 0.0

        # Set coordinates
        suntrans.coord[:] = sun_pos

        # Loop over time slices
        for slc_in, slc_out in time_slice:

            # Extract visibility slice
            vis_slice = sstream.vis[..., slc_in].copy()

            ha = (sun_pos[slc_out, 0])[np.newaxis, np.newaxis, :]
            dec = (sun_pos[slc_out, 1])[np.newaxis, np.newaxis, :]

            # Extract the diagonal (to be used for weighting)
            norm = (_extract_diagonal(vis_slice, axis=1).real)**0.5
            norm = tools.invert_no_zero(norm)

            # Fringestop
            if self.fringestop:
                vis_slice *= tools.fringestop_phase(
                    ha, np.radians(ephemeris.CHIMELATITUDE), dec, u, v)

            # Solve for the point source response of each set of polarisations
            ev_x, resp_x, err_resp_x = solve_gain(vis_slice,
                                                  feeds=xfeeds,
                                                  norm=norm[:, xfeeds])
            ev_y, resp_y, err_resp_y = solve_gain(vis_slice,
                                                  feeds=yfeeds,
                                                  norm=norm[:, yfeeds])

            # Save to container
            suntrans.evalue_x[..., slc_out] = ev_x
            suntrans.evalue_y[..., slc_out] = ev_y

            suntrans.response[:, xfeeds, slc_out] = resp_x
            suntrans.response[:, yfeeds, slc_out] = resp_y

            suntrans.response_error[:, xfeeds, slc_out] = err_resp_x
            suntrans.response_error[:, yfeeds, slc_out] = err_resp_y

        # If requested, fit a model to the primary beam of the sun transit
        if self.model_fit:

            # Estimate peak RA
            i_transit = np.argmin(np.abs(sun_pos[:, 0]))

            body = ephemeris.skyfield_wrapper.ephemeris["sun"]
            obs = ephemeris._get_chime()
            obs.date = ephemeris.unix_to_ephem_time(time[i_transit])
            body.compute(obs)

            peak_ra = ephemeris.peak_RA(body)
            dra = ra - peak_ra
            dra = np.abs(dra - (dra > np.pi) * 2.0 * np.pi)[np.newaxis,
                                                            np.newaxis, :]

            # Estimate FWHM
            sig_x = cal_utils.guess_fwhm(freq,
                                         pol="X",
                                         dec=body.dec,
                                         sigma=True)[:, np.newaxis, np.newaxis]
            sig_y = cal_utils.guess_fwhm(freq,
                                         pol="Y",
                                         dec=body.dec,
                                         sigma=True)[:, np.newaxis, np.newaxis]

            # Only fit ra values above the specified dynamic range threshold
            fit_flag = np.zeros([nfreq, nfeed, ntime], dtype=np.bool)
            fit_flag[:, xfeeds, :] = dra < (self.nsig * sig_x)
            fit_flag[:, yfeeds, :] = dra < (self.nsig * sig_y)

            # Fit model for the complex response of each feed to the point source
            param, param_cov = cal_utils.fit_point_source_transit(
                ra,
                suntrans.response[:],
                suntrans.response_error[:],
                flag=fit_flag)

            # Save to container
            suntrans.add_dataset("flag")
            suntrans.flag[:] = fit_flag

            suntrans.add_dataset("parameter")
            suntrans.parameter[:] = param

            suntrans.add_dataset("parameter_cov")
            suntrans.parameter_cov[:] = param_cov

        # Update attributes
        units = "sqrt(" + sstream.vis.attrs.get("units",
                                                "correlator-units") + ")"
        suntrans.response.attrs["units"] = units
        suntrans.response_error.attrs["units"] = units

        suntrans.attrs["source"] = "Sun"

        # Return sun transit
        return suntrans