Exemple #1
0
    def generateUVW(self, src='ZEN', update_src=True, conjugate=False, use_stored=False):
        """ Generate UVW coordinates based on timestamps and array geometry

        Updates UVW coordinates to phase to a given source. Uses pyEphem observer
        along with methods is lib.uvw for computations

        src (str): Source to phase to. Sources are three capital letters:
            ZEN: Zenith (RA will be computed from timestamps)
            CYG: Cygnus A
            CAS: Cassiopeia A
            TAU: Taurus A
            VIR: Virgo A

        use_stored (bool): If True, uses stored UVW coordinates (does not recompute).
                           this is faster than recomputing.
        update_src (bool): Default True, update the SOURCE table.
        conjugate (bool): Conjuagte UVW coordinates? Do this if things are flipped in map.
        """

        self.pp.h1("Generating UVW coordinates")
        ra_deg, dec_deg, lst_deg, ha_deg = self._compute_lst_ha(src)
        H = np.deg2rad(ha_deg)
        d = np.deg2rad(dec_deg)

        self.pp.pp("LST:        %2.3f deg" % lst_deg)
        self.pp.pp("Source RA:  %2.3f deg" % ra_deg)
        self.pp.pp("Source DEC: %2.3f deg" % dec_deg)
        self.pp.pp("HA:         %2.3f deg" % np.rad2deg(H))

        try:
            assert H < 2 * np.pi and d < 2 * np.pi
        except AssertionError:
            raise ValueError("HA and DEC are too large (may not be in radians).")

        # Recreate list of baselines
        self.pp.h2("Computing UVW coordinates for %s" % src)
        xyz = self.d_array_geometry['STABXYZ']
        if 257 in set(self.d_uv_data["BASELINE"]):
            bl_ids, ant_arr = coords.generateBaselineIds(self.n_ant)
            bl_vecs = coords.computeBaselineVectors(xyz)
        else:
            bl_ids, ant_arr = coords.generateBaselineIds(self.n_ant, autocorrs=False)
            bl_vecs = coords.computeBaselineVectors(xyz, autocorrs=False)

        n_iters = int(len(self.d_uv_data["BASELINE"]) / len(bl_ids))

        self.pp.h2("Generating timestamps")
        dd, tt = [], []
        for ii in range(n_iters):
            jd, jt = coords.convertToJulianTuple(self.date_obs)
            tdelta = self.t_int * ii / 86400.0  # In days
            jds = [jd for jj in range(len(ant_arr))]
            jts = [jt + tdelta for jj in range(len(ant_arr))]
            dd.append(jds)
            tt.append(jts)

        self.d_uv_data["DATE"] = np.array(dd, dtype='float64').ravel()
        self.d_uv_data["TIME"] = np.array(tt, dtype='float64').ravel()

        if use_stored:
            self.pp.h2("Loading stored values")
            self.loadUVW()
        else:

            uvw = coords.computeUVW(bl_vecs, H, d, conjugate=conjugate)

            # Fill with data
            # TODO: update this so that it can lock to zenith or phase to src
            uu, vv, ww = [], [], []

            for ii in range(n_iters):
                uu.append(uvw[:, 0])
                vv.append(uvw[:, 1])
                ww.append(uvw[:, 2])

            self.d_uv_data["UU"] = np.array(uu).ravel()
            self.d_uv_data["VV"] = np.array(vv).ravel()
            self.d_uv_data["WW"] = np.array(ww).ravel()

        if update_src:
            self.pp.h2("Updating SOURCE table")
            self.d_source["SOURCE"] = self.s2arr(src)
            self.d_source["RAEPO"] = self.s2arr(ra_deg)
            self.d_source["DECEPO"] = self.s2arr(dec_deg)
            self.source = src
Exemple #2
0
    def readDada(self, n_int=None, xmlbase=None, header_dict=None, data_arr=None, inspectOnly=False):
        """ Read a LEDA DADA file.

            header_dict (dict): psrdada header. Defaults to None. If a dict is passed, then instead of
                                loading data from file, data will be loaded from data_arr
            data_arr (np.ndarray): data array. This should be a preformatted FLUX data array.
            """

        self.pp.h1("Loading DADA data")
        if type(header_dict) is dict:
            self.pp.h2("Loading from shared memory")
            d = HeaderDataUnit(header_dict, data_arr)
            flux = data_arr
            self.pp.h2("Generating baseline IDs")
            bls, ant_arr = coords.generateBaselineIds(d.n_ant)
            bl_lower = []
            while len(bl_lower) < len(flux):
                bl_lower += bls
        else:
            self.pp.h2("Loading visibility data")
            d = dada.DadaReader(self.filename, n_int, inspectOnly=inspectOnly)
            vis = d.data
            self.dada_header = d.header
            try:
                n_ant = d.n_ant
                n_int = d.n_int
                self.n_ant = n_ant
            except ValueError:
                raise RuntimeError("Cannot load NCHAN / NPOL / NSTATION from dada file")

        if not header_dict:
            self.pp.h2("Converting visibilities to FLUX columns")
            do_remap = False
            if d.header["TELESCOPE"] in ('LEDA', 'LWAOVRO', 'LWA-OVRO', 'LEDAOVRO', 'LEDA512', 'LEDA-OVRO'):
                do_remap = False
            flux = self._vis_matrix_to_flux(vis, remap=do_remap)
            bls, ant_arr = coords.generateBaselineIds(n_ant)
            bl_lower = []
            for dd in range(vis.shape[0] / n_int):
                bl_lower += bls

        self.d_uv_data["BASELINE"] = np.array([bl_lower for ii in range(n_int)]).flatten()
        self.d_uv_data["FLUX"] = flux

        self.pp.h1("Generating FITS-IDI schema from XML")
        if xmlbase is None:
            dirname, filename = os.path.split(os.path.abspath(__file__))
            xmlbase = os.path.join(dirname, 'config/config.xml')
        self.xmlData = etree.parse(xmlbase)

        hdu_primary = make_primary(config=self.xmlData)
        tbl_array_geometry = make_array_geometry(config=self.xmlData, num_rows=n_ant)
        tbl_antenna = make_antenna(config=self.xmlData, num_rows=n_ant)
        tbl_frequency = make_frequency(config=self.xmlData, num_rows=1)
        tbl_source = make_source(config=self.xmlData, num_rows=1)

        #h1('Creating HDU list')
        hdulist = pf.HDUList(
            [hdu_primary,
             tbl_array_geometry,
             tbl_frequency,
             tbl_antenna,
             tbl_source])

        self.fits = hdulist

        self.stokes_vals = [-5, -6, -7, -8]
        self.readFitsidi(from_file=False, load_uv_data=False)
        #hdulist.verify()

        self.pp.h2("Populating interfits dictionaries")
        self.setDefaults(n_uv_rows=len(bl_lower * n_int))
        self.obs_code = ''
        self.correlator = d.header["INSTRUMENT"]
        self.instrument = d.header["INSTRUMENT"]
        self.telescope = d.header["TELESCOPE"]

        # Compute the integration time
        tsamp = float(d.header["TSAMP"]) * 1e-6  # Sampling time per channel, in microseconds
        navg = int(d.header["NAVG"])  # Number of averages per integration
        int_tim = tsamp * navg  # Integration time is tsamp * navg
        self.t_int = d.t_int

        # Compute time offset
        self.pp.h2("Computing UTC offsets")
        dt_obj = datetime.strptime(d.header["UTC_START"], "%Y-%m-%d-%H:%M:%S")
        time_offset = d.t_offset  # Time offset since observation began
        dt_obj = dt_obj + timedelta(seconds=time_offset)
        date_obs = dt_obj.strftime("%Y-%m-%dT%H:%M:%S")
        dd_obs = dt_obj.strftime("%Y-%m-%d")

        self.pp.pp("UTC START:   %s" % d.header["UTC_START"])
        self.pp.pp("TIME OFFSET: %s" % timedelta(seconds=time_offset))
        self.pp.pp("NEW START:   %s" % date_obs)

        self.date_obs = date_obs
        self.h_params["NSTOKES"] = 4
        self.h_params["NBAND"] = 1
        self.h_params["NCHAN"] = d.n_chans
        self.h_common["NO_CHAN"] = d.n_chans
        self.h_common["REF_FREQ"] = d.c_freq_mhz * 1e6
        self.h_common["CHAN_BW"] = d.chan_bw_mhz * 1e6
        self.h_common["REF_PIXL"] = d.n_chans / 2 + 1
        self.h_common["RDATE"] = dd_obs  # Ignore time component
        self.h_common["STK_1"] = -5

        self.d_frequency["CH_WIDTH"] = d.chan_bw_mhz * 1e6
        self.d_frequency["TOTAL_BANDWIDTH"] = d.bandwidth_mhz * 1e6
        self.stokes_axis = ['XX', 'YY', 'XY', 'YX']
        self.stokes_vals = [-5, -6, -7, -8]

        self.d_array_geometry["ANNAME"] = ["Stand%03d" % i for i in range(len(self.d_array_geometry["ANNAME"]))]
        self.d_array_geometry["NOSTA"] = [i for i in range(len(self.d_array_geometry["NOSTA"]))]

        self.d_uv_data["INTTIM"] = np.ones_like(self.d_uv_data["INTTIM"]) * d.t_int

        # Recreate list of baselines
        bl_ids, ant_arr = coords.generateBaselineIds(self.n_ant, autocorrs=True)
        n_iters = int(len(self.d_uv_data["BASELINE"]) / len(bl_ids))

        self.pp.h2("Generating timestamps")
        dd, tt = [], []
        for ii in range(n_iters):
            jd, jt = coords.convertToJulianTuple(self.date_obs)
            tdelta = int_tim * ii / 86400.0  # In days
            jds = [jd for jj in range(len(ant_arr))]
            jts = [jt + tdelta for jj in range(len(ant_arr))]
            dd.append(jds)
            tt.append(jts)

        self.d_uv_data["DATE"] = np.array(dd, dtype='float64').ravel()
        self.d_uv_data["TIME"] = np.array(tt, dtype='float64').ravel()

        # Load array geometry from file, based on TELESCOP name
        self.loadAntArr()