Ejemplo n.º 1
0
def calc_uvw(phase_centre,
             timestamps,
             antlist,
             ant1,
             ant2,
             ant_descriptions,
             refant_ind=0):
    """
    Calculate uvw coordinates

    Parameters
    ----------
    phase_centre
        katpoint target for phase centre position
    timestamps
        times, array of floats, shape(nrows)
    antlist
        list of antenna names - used for associating antenna descriptions with
        an1 and ant2 indices, shape(nant)
    ant1, ant2
        array of antenna indices, shape(nrows)
    antenna_descriptions
        description strings for the antennas, same order as antlist, list of string
    refant_ind
        index of reference antenna in antlist, integer

    Returns
    -------
    uvw
        uvw coordinates numpy array, shape (3, nbl x ntimes)
    """
    # use the lat-long-alt values of one of the antennas as the array reference position
    refant = katpoint.Antenna(ant_descriptions[antlist[refant_ind]])
    array_reference_position = katpoint.Antenna('array_position',
                                                *refant.ref_position_wgs84)
    # use the array reference position for the basis
    basis = phase_centre.uvw_basis(timestamp=to_ut(timestamps),
                                   antenna=array_reference_position)
    # get enu vector for each row in MS, for each antenna in the baseline pair for that row
    antenna1_uvw = np.empty([3, len(timestamps)])
    antenna2_uvw = np.empty([3, len(timestamps)])
    for i, [a1, a2] in enumerate(zip(ant1, ant2)):
        antenna1 = katpoint.Antenna(ant_descriptions[antlist[a1]])
        enu1 = np.array(antenna1.baseline_toward(array_reference_position))
        antenna1_uvw[..., i] = np.tensordot(basis[..., i], enu1, ([1], [0]))

        antenna2 = katpoint.Antenna(ant_descriptions[antlist[a2]])
        enu2 = np.array(antenna2.baseline_toward(array_reference_position))
        antenna2_uvw[..., i] = np.tensordot(basis[..., i], enu2, ([1], [0]))

    # then subtract the vectors for each antenna to get the baseline vectors
    baseline_uvw = np.empty([3, len(timestamps)])
    for i, [a1, a2] in enumerate(zip(ant1, ant2)):
        baseline_uvw[..., i] = -antenna1_uvw[..., i] + antenna2_uvw[..., i]

    return baseline_uvw
Ejemplo n.º 2
0
 def setUp(self):
     self.flux_target = katpoint.Target(
         'flux, radec, 0.0, 0.0, (1.0 2.0 2.0 0.0 0.0)')
     self.antenna = katpoint.Antenna(
         'XDM, -25:53:23.05075, 27:41:03.36453, 1406.1086, 15.0')
     self.antenna2 = katpoint.Antenna(
         'XDM2, -25:53:23.05075, 27:41:03.36453, 1406.1086, 15.0, 100.0 0.0 0.0'
     )
     self.timestamp = time.mktime(
         time.strptime('2009/06/14 12:34:56', '%Y/%m/%d %H:%M:%S'))
Ejemplo n.º 3
0
 def setUp(self):
     self.target1 = katpoint.construct_azel_target('45:00:00.0',
                                                   '75:00:00.0')
     self.target2 = katpoint.Target('Sun, special')
     self.ant1 = katpoint.Antenna('A1, -31.0, 18.0, 0.0, 12.0, 0.0 0.0 0.0')
     self.ant2 = katpoint.Antenna(
         'A2, -31.0, 18.0, 0.0, 12.0, 10.0 -10.0 0.0')
     self.ant3 = katpoint.Antenna(
         'A3, -31.0, 18.0, 0.0, 12.0, 5.0 10.0 3.0')
     self.ts = katpoint.Timestamp('2013-08-14 08:25')
     self.delays = katpoint.DelayCorrection([self.ant2, self.ant3],
                                            self.ant1, 1.285e9)
Ejemplo n.º 4
0
def load_antennas():
    """Return an array of katpoint antenna objects"""
    antennas = []
    with pkg_resources.resource_stream(__name__, 'meerkat_antennas.txt') as f:
        for line in f:
            antennas.append(katpoint.Antenna(line))
    return antennas
Ejemplo n.º 5
0
    def _get_ants(filename):
        """Quick look function to get the list of antennas in a data file.

        This is intended to be called without createing a full katdal object.

        Parameters
        ----------
        filename : string
            Data file name

        Returns
        -------
        antennas : list of :class:'katpoint.Antenna' objects

        """
        f, version = H5DataV2._open(filename)
        config_group = f['MetaData/Configuration']
        all_ants = [ant for ant in config_group['Antennas']]
        script_ants = config_group['Observation'].attrs.get('script_ants')
        script_ants = script_ants.split(',') if script_ants else all_ants
        return [
            katpoint.Antenna(
                config_group['Antennas'][ant].attrs['description'])
            for ant in script_ants if ant in all_ants
        ]
Ejemplo n.º 6
0
def fringe_correction(h5):
    h5.select(corrprods='cross')
    center_freqs = h5.channel_freqs
    #print center_freqs.mean()
    wavelengths = 3.0e8 / center_freqs
    vis_set = None
    # Number of turns of phase that signal B is behind signal A due to cable / receiver delay
    tar = h5.catalogue.targets[0]
    antlook = {}
    for ant in h5.ants: antlook[ant.name] = ant # make lookup dict
    tar.antenna = antlook['m024']
    anttmp = antlook['m025'].description.split(',')
    #anttmp[5] =  "1257.713 2728.218 -7.917" #old
    #anttmp[5] =  "1254.75111151  2725.01575851    -9.93164 "
    #anttmp[5] = "1258.828 2728.943 -7.283" # only strong sources
    #anttmp[5] = "1258.862 2729.194 -7.234" # pointy sources 
    anttmp[5] = "1258.921 2729.124 -6.825" #best
    anttmp[5] = "1258.892 2729.159 -7.029" #???
    cable_delay_turns = 0.0  #  add delays correction 
    for compscan_no,compscan_label,target in h5.compscans():
        #print compscan_no,target.description
        #print "loop",compscan_no,compscan_label,target,h5.shape
        vis = h5.vis[:]
        # Number of turns of phase that signal B is behind signal A due to geometric delay
        target.antenna = antlook['m024']
        new_w =np.array(target.uvw(antenna2=katpoint.Antenna(','.join(anttmp) ),timestamp=h5.timestamps))[2,:]
        w =  (new_w-h5.w[:,0])  
        geom_delay_turns =  w[:, np.newaxis, np.newaxis] / wavelengths[:, np.newaxis]
        # Visibility <A, B*> has phase (A - B), therefore add (B - A) phase to stop fringes (i.e. do delay tracking)
        vis *= np.exp(2j * np.pi * (geom_delay_turns + cable_delay_turns))    
        if vis_set is None:
            vis_set = vis.copy()
        else:
            vis_set = np.append(vis_set,vis,axis = 0)
    return vis_set
Ejemplo n.º 7
0
def parse_csv(filename):
    """ Make an antenna object and a data array from the input csv file
    update the data array with the desired flux for the give polarisation

    Parameters
    ----------
    filename : string
        Filename containing the result of analyse_point_source_scans.py
        first line will contain the info to construct the antenna object

    Return
    ------
    :class: katpoint Antenna object
    data : heterogeneous record array
    """
    antenna = katpoint.Antenna(
        open(filename).readline().strip().partition('=')[2])
    #Open the csv file as an array of strings without comment fields (antenna fields are comments)
    data = np.loadtxt(filename, dtype='string', comments='#', delimiter=', ')
    #First non-comment line is the header with fieldnames
    fieldnames = data[0].tolist()
    #Setup all fields as float32
    formats = np.tile('float32', len(fieldnames))
    #Label the string fields as input datatype
    formats[[
        fieldnames.index(name) for name in STRING_FIELDS if name in fieldnames
    ]] = data.dtype
    #Save the data as a heterogeneous record array
    data = np.rec.fromarrays(data[1:].transpose(),
                             dtype=zip(fieldnames, formats))
    return data, antenna
Ejemplo n.º 8
0
def makeKatPointAntenna(antennaString):
    antennaKat = []

    for antenna in antennaString:
        antkat = katpoint.Antenna(antenna)
        antennaKat.append(antkat)
    return antennaKat
Ejemplo n.º 9
0
    def _get_ants(filename):
        """Quick look function to get the list of antennas in a data file.

        This is intended to be called without creating a complete katdal object.

        Parameters
        ----------
        filename : string
            Data file name

        Returns
        -------
        antennas : list of :class:'katpoint.Antenna' objects

        """
        f, version = H5DataV3._open(filename)
        obs_params = {}
        tm_group = f['TelescopeModel']
        all_ants = [
            ant for ant in tm_group
            if tm_group[ant].attrs.get('class') == 'AntennaPositioner'
        ]
        tm_params = tm_group['obs/params']
        for obs_param in tm_params['value']:
            key, val = obs_param.split(' ', 1)
            obs_params[key] = np.lib.utils.safe_eval(val)
        obs_ants = obs_params.get('ants')
        # By default, only pick antennas that were in use by the script
        obs_ants = obs_ants.split(',') if obs_ants else all_ants
        return [
            katpoint.Antenna(tm_group[ant].attrs['description'])
            for ant in obs_ants if ant in all_ants
        ]
Ejemplo n.º 10
0
def lst2utc(req_lst, ref_location, date=None):
    def get_lst_range(date):
        date_timestamp = time.mktime(
            date.timetuple())  # this will be local time
        time_range = katpoint.Timestamp(date_timestamp).secs + \
            numpy.arange(0, 24.*3600., 60)
        lst_range = numpy.degrees(
            target.antenna.local_sidereal_time(time_range)) / 15.
        return time_range, lst_range

    req_lst = float(req_lst)
    cat = katpoint.Catalogue(add_specials=True)
    cat.antenna = katpoint.Antenna(ref_location)
    target = cat['Zenith']
    if date is None:  # find the best UTC for today
        date = datetime.date.today()
    else:
        date = date.replace(hour=0, minute=0, second=0, microsecond=0)
    [time_range, lst_range] = get_lst_range(date)
    lst_idx = numpy.abs(lst_range - req_lst).argmin()
    if lst_range[lst_idx] < req_lst:
        x = lst_range[lst_idx:lst_idx + 2]
        y = time_range[lst_idx:lst_idx + 2]
    else:
        x = lst_range[lst_idx - 1:lst_idx + 1]
        y = time_range[lst_idx - 1:lst_idx + 1]
    linefit = numpy.poly1d(numpy.polyfit(x, y, 1))
    return datetime.datetime.utcfromtimestamp(linefit(req_lst))
Ejemplo n.º 11
0
    def get_location(self):
        """Get the default reference location.

        Calls the katpoint.Antenna object,
        a MeerKAT wrapper around the PyEphem.observer object

        """
        return katpoint.Antenna(self.location)
Ejemplo n.º 12
0
def collect_targets(kat, args):
    """Collect targets into katpoint catalogue.

    Parameters
    ----------
    kat: session kat container-like object

    """
    from_names = from_strings = from_catalogues = num_catalogues = 0
    catalogue = katpoint.Catalogue()
    catalogue.antenna = katpoint.Antenna(_ref_location)

    setobserver(catalogue.antenna.observer)

    for arg in args:
        try:
            # First assume the string is a catalogue file name
            count_before_add = len(catalogue)
            try:
                catalogue.add(open(arg))
            except ValueError:
                msg = "Catalogue {} contains bad targets".format(arg)
                user_logger.warning(msg)
            from_catalogues += len(catalogue) - count_before_add
            num_catalogues += 1
        except IOError:
            # If the file failed to load,
            # assume it is a name or description string
            # With no comma in target string,
            # assume it's the name of a target
            # to be looked up in standard catalogue
            if arg.find(",") < 0:
                target = kat.sources[arg]
                if target is None:
                    msg = "Unknown target or catalogue {}, skipping it".format(
                        arg)
                    user_logger.warning(msg)
                else:
                    catalogue.add(target)
                    from_names += 1
            else:
                # Assume the argument is a target description string
                try:
                    catalogue.add(arg)
                    from_strings += 1
                except ValueError as err:
                    msg = "Invalid target {}, skipping it [{}]".format(
                        arg, err)
                    user_logger.warning(msg)
    if len(catalogue) == 0:
        raise ValueError("No known targets found in argument list")
    msg = (
        "Found {} target(s): {} from {} catalogue(s), {} from default catalogue and "
        "{} as target string(s)".format(len(catalogue), from_catalogues,
                                        num_catalogues, from_names,
                                        from_strings))
    user_logger.info(msg)
    return catalogue
Ejemplo n.º 13
0
    def _create_subarrays(self, subarray_defs):
        """
        Create subarrays, setting default subarray properties
        for this dataset to the first subarray.

        Parameters
        ----------
        subarray_defs : list of dicts
            List of subarray definition dictionaries
            { 'antenna' : list, 'corr_products' : list}
        """

        subarrays = []

        for subarray_def in subarray_defs:
            try:
                ants = subarray_def['antenna']
            except KeyError as e:
                raise KeyError("Subarray definition '%s' "
                               "missing '%s'" % (subarray_def, str(e)))

            ants = [
                a if isinstance(a, katpoint.Antenna) else katpoint.Antenna(a)
                for a in ants
            ]

            try:
                corr_products = subarray_def['corr_products']
            except KeyError:
                # Generate correlation products for all antenna pairs
                # including auto-correlations
                corr_products = np.array([(a1.name + c1, a2.name + c2)
                                          for i, a1 in enumerate(ants)
                                          for a2 in ants[i:]
                                          for c1 in ('h', 'v')
                                          for c2 in ('h', 'v')])

            subarrays.append(Subarray(ants, corr_products))

        try:
            subarray = subarrays[0]
        except IndexError:
            raise ValueError("No subarrays were defined in '%s'" %
                             subarray_defs)

        self.subarrays = subarrays
        self.subarray = 0
        self.inputs = subarray.inputs
        self.ants = subarray.ants
        self.corr_products = subarray.corr_products

        subarray_catdata = CategoricalData(self.subarrays, [0, self._ndumps])
        subarray_index_catdata = CategoricalData([self.subarray],
                                                 [0, self._ndumps])
        self.sensor['Observation/subarray'] = subarray_catdata
        self.sensor['Observation/subarray_index'] = subarray_index_catdata
Ejemplo n.º 14
0
 def setUp(self):
     start_time = datetime.strptime("2018-12-07 05:00:00",
                                    "%Y-%m-%d %H:%M:%S")
     observer = ephem.Observer()
     observer.date = ephem.Date(start_time)
     simulate.setobserver(observer)
     self.antenna = katpoint.Antenna(observatory._ref_location)
     self.mock_kat = mock.Mock()
     self.mock_kat.obs_params = {"durations": {"start_time": start_time}}
     self.DUT = simulate.SimSession(self.mock_kat)
Ejemplo n.º 15
0
def makeKatPointAntenna(antennaString):
	'''
	Takes in antenna number and returns katpoint Antenna object.
	'''

	antennaKat = []

	for antenna in antennaString:
		antkat = katpoint.Antenna(antenna)
		antennaKat.append(antkat)

	return antennaKat
Ejemplo n.º 16
0
 def setUp(self):
     self.tle_lines = [
         'GPS BIIA-21 (PRN 09)    \n',
         '1 22700U 93042A   %02d266.32333151  .00000012  00000-0  10000-3 0  805%1d\n'
         % (YY, (YY // 10 + YY - 7 + 4) % 10),
         '2 22700  55.4408  61.3790 0191986  78.1802 283.9935  2.00561720104282\n'
     ]
     self.edb_lines = [
         'HIC 13847,f|S|A4,2:58:16.03,-40:18:17.1,2.906,2000,\n'
     ]
     self.antenna = katpoint.Antenna(
         'XDM, -25:53:23.05075, 27:41:03.36453, 1406.1086, 15.0')
Ejemplo n.º 17
0
 def get_scan_area_extents(self, test_date):
     # Test Antenna: 0-m dish at lat 0:00:00.0, long 0:00:00.0, alt 0.0 m
     antenna = katpoint.Antenna("Test Antenna", 0, 0, 0)
     target_list = [
         katpoint.Target("t1, radec, 05:16:00.0, -25:42:00.0", antenna=antenna),
         katpoint.Target("t2, radec, 05:16:00.0, -35:36:00.0", antenna=antenna),
         katpoint.Target("t3, radec, 06:44:00.0, -35:36:00.0", antenna=antenna),
         katpoint.Target("t4, radec, 06:44:00.0, -25:42:00.0", antenna=antenna),
     ]
     el, az_min, az_max, t_start, t_end = scans._get_scan_area_extents(target_list,
                                                                       antenna,
                                                                       test_date)
     return el, az_min, az_max, t_start, t_end
Ejemplo n.º 18
0
 def test_local_sidereal_time(self):
     """Test sidereal time and the use of date/time strings vs floats as timestamps."""
     ant = katpoint.Antenna(self.valid_antennas[0])
     utc_secs = time.mktime(
         time.strptime(self.timestamp, '%Y/%m/%d %H:%M:%S')) - time.timezone
     sid1 = ant.local_sidereal_time(self.timestamp)
     sid2 = ant.local_sidereal_time(utc_secs)
     self.assertAlmostEqual(
         sid1,
         sid2,
         places=10,
         msg='Sidereal time differs for float and date/time string')
     sid3 = ant.local_sidereal_time([self.timestamp, self.timestamp])
     sid4 = ant.local_sidereal_time([utc_secs, utc_secs])
     assert_angles_almost_equal(sid3, sid4, decimal=12)
Ejemplo n.º 19
0
def lst2utc(req_lst, ref_location, date=None):
    """Find LST for given date else for Today.

    Parameters
    ----------
    req_lst: datetime
        Request LST
    ref_location: `EarthLocation()`
        Location on earth where LST is being measured
    date: datetime
        Date when LST is being measured

    Returns
    -------
        time_range: katpoint.Timestamp
            UTC date and time
        lst_range: float
            LST range

    """
    def get_lst_range(date):
        date_timestamp = time.mktime(
            date.timetuple())  # this will be local time
        time_range = katpoint.Timestamp(date_timestamp).secs + numpy.arange(
            0, 24.0 * 3600.0, 60)
        lst_range = numpy.degrees(
            target.antenna.local_sidereal_time(time_range)) / 15.0
        return time_range, lst_range

    req_lst = float(req_lst)
    cat = katpoint.Catalogue(add_specials=True)
    cat.antenna = katpoint.Antenna(ref_location)
    target = cat["Zenith"]
    if date is None:  # find the best UTC for today
        date = datetime.date.today()
    else:
        date = date.replace(hour=0, minute=0, second=0, microsecond=0)
    [time_range, lst_range] = get_lst_range(date)
    lst_idx = numpy.abs(lst_range - req_lst).argmin()
    if lst_range[lst_idx] < req_lst:
        x = lst_range[lst_idx:lst_idx + 2]
        y = time_range[lst_idx:lst_idx + 2]
    else:
        x = lst_range[lst_idx - 1:lst_idx + 1]
        y = time_range[lst_idx - 1:lst_idx + 1]
    linefit = numpy.poly1d(numpy.polyfit(x, y, 1))
    return datetime.datetime.utcfromtimestamp(linefit(req_lst))
Ejemplo n.º 20
0
def read_offsetfile(filename):
    # Load data file in one shot as an array of strings
    string_fields = ['dataset', 'target', 'timestamp_ut', 'data_unit']
    data = np.loadtxt(filename, dtype='string', comments='#', delimiter=', ')
    # Interpret first non-comment line as header
    fields = data[0].tolist()
    # By default, all fields are assumed to contain floats
    formats = np.tile(np.float, len(fields))
    # The string_fields are assumed to contain strings - use data's string type, as it is of sufficient length
    formats[[fields.index(name) for name in string_fields if name in fields]] = data.dtype
    # Convert to heterogeneous record array
    data = np.rec.fromarrays(data[1:].transpose(), dtype=list(zip(fields, formats)))
    # Load antenna description string from first line of file and construct antenna object from it
    antenna = katpoint.Antenna(file(filename).readline().strip().partition('=')[2])
    # Use the pointing model contained in antenna object as the old model (if not overridden by file)
    # If the antenna has no model specified, a default null model will be used
    return data,antenna
Ejemplo n.º 21
0
    def _get_ants(filename):
        """Quick look function to get the list of antennas in a data file.

        This is intended to be called without creating a complete katdal object.

        Parameters
        ----------
        filename : string
            Data file name

        Returns
        -------
        antennas : list of :class:'katpoint.Antenna' objects

        """
        f, version = H5DataV3._open(filename)
        obs_params = {}
        tm_group = f['TelescopeModel']
        ants = []
        for name in tm_group:
            if tm_group[name].attrs.get('class') != 'AntennaPositioner':
                continue
            try:
                ant_description = tm_group[name]['observer']['value'][0]
            except KeyError:
                try:
                    ant_description = tm_group[name].attrs['observer']
                except KeyError:
                    ant_description = tm_group[name].attrs['description']
            ants.append(katpoint.Antenna(ant_description))
        cam_ants = set(ant.name for ant in ants)
        # Original list of correlation products as pairs of input labels
        corrprods = H5DataV3._get_corrprods(f)
        # Find names of all antennas with associated correlator data
        cbf_ants = set([cp[0][:-1] for cp in corrprods] + [cp[1][:-1] for cp in corrprods])
        # By default, only pick antennas that were in use by the script
        tm_params = tm_group['obs/params']
        for obs_param in tm_params['value']:
            if obs_param:
                key, val = obs_param.split(' ', 1)
                obs_params[key] = np.lib.utils.safe_eval(val)
        obs_ants = obs_params.get('ants')
        # Otherwise fall back to the list of antennas common to CAM and CBF
        obs_ants = obs_ants.split(',') if obs_ants else list(cam_ants & cbf_ants)
        return [ant for ant in ants if ant.name in obs_ants]
Ejemplo n.º 22
0
    def _create_antenna_sensors(self, antenna):
        """
        Create antenna sensors.

        Parameters
        ----------
        antenna : list of :class:`katpoint.Antenna`
            Antenna objects
        """
        for ant in antenna:
            ant_catdata = CategoricalData([ant], [0, self._ndumps])
            self.sensor['Antennas/%s/antenna' % (ant.name, )] = ant_catdata

        # Extract array reference from first antenna (first 5 fields of description)
        array_ant_fields = ['array'] + antenna[0].description.split(',')[1:5]
        array_ant = katpoint.Antenna(','.join(array_ant_fields))
        self.sensor['Antennas/array/antenna'] = CategoricalData(
            [array_ant], [0, self._ndumps])
Ejemplo n.º 23
0
    def check_antennas(antennas):
        """
        check the type of the inputs. if they are katpoint objects,
        then extract the latitude, longitude, elevation information

        arguments:
        antennas -- either can be a list of katpoint antenna objects or
                    a list of [latitude, longitude, elevation]

        return:
        a list of antenna geographic coordinates in the order of
        [latitude, longitude, elevation]

        """
        def from_katpoint_list(antennas):
            antenna_list = []
            for antenna in antennas:
                antenna_list.append([
                    np.rad2deg(antenna.observer.lat),
                    np.rad2deg(antenna.observer.lon), antenna.observer.elev
                ])
            return np.array(antenna_list)

        antennas = np.array(antennas)
        if isinstance(antennas[0], np.ndarray):
            antenna_coordinates = antennas
            names = ["%03d" % i for i in range(len(antennas))]
        elif isinstance(antennas[0], katpoint.Antenna):
            antenna_coordinates = from_katpoint_list(antennas)
            names = [ant.name for ant in antennas]
        elif isinstance(antennas[0], str):
            katpoint_antennas = [katpoint.Antenna(i) for i in antennas]
            antenna_coordinates = from_katpoint_list(katpoint_antennas)
            names = [ant.name for ant in katpoint_antennas]
        else:
            raise Exception("Antennas are passed in unknown format")

        antenna_objects = [
            coord.Antenna(names[idx], coordinate)
            for idx, coordinate in enumerate(antenna_coordinates)
        ]

        return antenna_objects
Ejemplo n.º 24
0
    def _get_ants(filename):
        """Quick look function to get the list of antennas in a data file.

        This is intended to be called without creating a full katdal object.

        Parameters
        ----------
        filename : string
            Data file name

        Returns
        -------
        antennas : list of :class:'katpoint.Antenna' objects

        """
        f, version = H5DataV1._open(filename)
        ants_group = f['Antennas']
        antennas = [katpoint.Antenna(to_str(ants_group[group].attrs['description']))
                    for group in ants_group]
        return antennas
Ejemplo n.º 25
0
def make_uvw(args, n_time):
    start = 946728000.0  # J2000, in UNIX time
    dec = -np.pi / 4
    target = katpoint.construct_radec_target(0, dec)
    timestamps = np.arange(n_time) * args.int_time + start
    ref = katpoint.Antenna('', *args.antennas[0].ref_position_wgs84)
    basis = target.uvw_basis(timestamp=timestamps, antenna=ref)
    antenna_uvw = []
    rows = [[], [], []]
    for antenna in args.antennas:
        enu = np.array(ref.baseline_toward(antenna))
        antenna_uvw.append(np.tensordot(basis, enu, ([1], [0])))
    for i in range(len(args.antennas)):
        for j in range(i):
            u, v, w = antenna_uvw[j] - antenna_uvw[i]
            rows[0].append(u)
            rows[1].append(v)
            rows[2].append(w)
    uvw = np.array(rows, dtype=np.float32)
    return uvw.reshape(3, uvw.shape[1] * n_time).T.copy()
Ejemplo n.º 26
0
    def update_config_params(self):
        """
        In this method parameters related to the resources assigned, are updated every time assign, release
        or configure commands are executed.

        :param argin: None

        :return: None

        """
        assigned_receptors_dict = {}
        assigned_receptors = []
        self.fsids_list = []

        self.logger.info("Updating config parameters.")

        # Load a set of antenna descriptions and construct Antenna objects from them
        with importlib.resources.open_text("cspsubarrayleafnode",
                                           "ska_antennas.txt") as f:
            descriptions = f.readlines()
        antennas = [katpoint.Antenna(line) for line in descriptions]
        # Create a dictionary including antenna objects
        antennas_dict = {ant.name: ant for ant in antennas}
        antenna_keys_list = antennas_dict.keys()
        for receptor in self.device_data.receptorIDList_str:
            if receptor in antenna_keys_list:
                assigned_receptors.append(antennas_dict[receptor])
                # Create a dictionary including antennas (objects) assigned to the Subarray
                assigned_receptors_dict[receptor] = antennas_dict[receptor]
        # Antenna having key 'ref_ant' from antennas_dict, is referred as a reference antenna.
        ref_ant = antennas_dict["ref_ant"]
        # Create DelayCorrection Object
        self.delay_correction_object = katpoint.DelayCorrection(
            assigned_receptors, ref_ant)
        self.antenna_names = list(
            self.delay_correction_object.ant_models.keys())
        # list of frequency slice ids
        for fsp_entry in self.device_data.fsp_ids_object:
            self.fsids_list.append(fsp_entry["fspID"])
        self.logger.info("Completed updating config parameters.")
Ejemplo n.º 27
0
 def test_construct_antenna(self):
     """Test construction of antennas from strings and vice versa."""
     valid_antennas = [
         katpoint.Antenna(descr) for descr in self.valid_antennas
     ]
     valid_strings = [a.description for a in valid_antennas]
     for descr in valid_strings:
         ant = katpoint.Antenna(descr)
         print('%s %s' % (str(ant), repr(ant)))
         self.assertEqual(
             descr, ant.description,
             'Antenna description differs from original string')
         self.assertEqual(ant.description, ant.format_katcp(),
                          'Antenna description differs from KATCP format')
     for descr in self.invalid_antennas:
         self.assertRaises(ValueError, katpoint.Antenna, descr)
     descr = valid_antennas[0].description
     self.assertEqual(descr,
                      katpoint.Antenna(*descr.split(', ')).description)
     self.assertRaises(ValueError, katpoint.Antenna, descr,
                       *descr.split(', ')[1:])
     # Check that description string updates when object is updated
     a1 = katpoint.Antenna(
         'FF1, -30:43:17.3, 21:24:38.5, 1038.0, 12.0, 18.4 -8.7 0.0')
     a2 = katpoint.Antenna(
         'FF2, -30:43:17.3, 21:24:38.5, 1038.0, 13.0, 18.4 -8.7 0.0, 0.1, 1.22'
     )
     self.assertNotEqual(a1, a2, 'Antennas should be inequal')
     a1.name = 'FF2'
     a1.diameter = 13.0
     a1.pointing_model = katpoint.PointingModel('0.1')
     a1.beamwidth = 1.22
     self.assertEqual(a1.description, a2.description,
                      'Antenna description string not updated')
     self.assertEqual(a1, a2.description,
                      'Antenna not equal to description string')
     self.assertEqual(a1, a2, 'Antennas not equal')
     self.assertEqual(a1, katpoint.Antenna(a2),
                      'Construction with antenna object failed')
     self.assertEqual(a1, pickle.loads(pickle.dumps(a1)), 'Pickling failed')
     try:
         self.assertEqual(hash(a1), hash(a2), 'Antenna hashes not equal')
     except TypeError:
         self.fail('Antenna object not hashable')
Ejemplo n.º 28
0
 def download_IERS_file(self):
     """ This method performs one delay calculation with dummy values to download the IERS file in advanced 
     to the delay calcualtions at the initialization of the device ."""
     # Create an example radec target
     ra = '21:08:47.92'
     dec = '-88:57:22.9'
     target = katpoint.Target.from_radec(ra, dec)
     descriptions = '''
     ref_ant, -30:42:39.8, 21:26:38.0, 1086, 13.5, 0 0 0 0 0 0,0, 0
     ref_ant, -30:42:39.8, 21:26:38.0, 1086, 13.5, 0 0 0 0 0 0,0, 0
     '''.strip().split('\n')
     antennas = [katpoint.Antenna(line) for line in descriptions]
     ref_ant = antennas[0]
     ants = antennas[1:]
     try:
         delay_correction = katpoint.DelayCorrection(ants, ref_ant)
     except Exception as delay_execption:
         log_msg = f"Exception in DelayCorrection Katpoint API {delay_execption}"
         self.logger.exception(log_msg)
     # Get delays towards target for example timestamp
     exa_time_t0 = '2021-05-04 12:54:09.686556'
     time_t0_obj = datetime.strptime(exa_time_t0, '%Y-%m-%d %H:%M:%S.%f')
     delays = delay_correction.delays(target, time_t0_obj)
     self.logger.info("delays are: '%s'", delays)
Ejemplo n.º 29
0
    def __init__(self, source, ref_ant='', time_offset=0.0, **kwargs):
        DataSet.__init__(self, source.name, ref_ant, time_offset)
        attrs = source.metadata.attrs

        # ------ Extract timestamps ------

        self.source = source
        self.file = {}
        self.version = '4.0'
        self.dump_period = attrs['int_time']
        num_dumps = len(source.timestamps)
        source.timestamps += self.time_offset
        if source.timestamps[0] < 1e9:
            logger.warning(
                "Data set has invalid first correlator timestamp "
                "(%f)", source.timestamps[0])
        half_dump = 0.5 * self.dump_period
        self.start_time = katpoint.Timestamp(source.timestamps[0] - half_dump)
        self.end_time = katpoint.Timestamp(source.timestamps[-1] + half_dump)
        self._time_keep = np.full(num_dumps, True, dtype=np.bool_)
        all_dumps = [0, num_dumps]

        # Assemble sensor cache
        self.sensor = SensorCache(source.metadata.sensors, source.timestamps,
                                  self.dump_period, self._time_keep,
                                  SENSOR_PROPS, VIRTUAL_SENSORS,
                                  SENSOR_ALIASES)

        # ------ Extract flags ------

        # Internal flag mask overridden whenever _flags_keep is set via select()
        self._flags_select = np.array([255], dtype=np.uint8)
        self._flags_keep = 'all'

        # ------ Extract observation parameters and script log ------

        self.obs_params = attrs['obs_params']
        # Get observation script parameters, with defaults
        self.observer = self.obs_params.get('observer', '')
        self.description = self.obs_params.get('description', '')
        self.experiment_id = self.obs_params.get('experiment_id', '')
        # Extract script log data verbatim (it is not a standard sensor anyway)
        try:
            self.obs_script_log = self.sensor.get(
                'obs_script_log', extract=False)['value'].tolist()
        except KeyError:
            self.obs_script_log = []

        # ------ Extract subarrays ------

        # List of correlation products as pairs of input labels
        corrprods = attrs['bls_ordering']
        # Crash if there is mismatch between labels and data shape (bad labels?)
        if source.data and (len(corrprods) != source.data.shape[2]):
            raise BrokenFile('Number of baseline labels (containing expected '
                             'antenna names) received from correlator (%d) '
                             'differs from number of baselines in data (%d)' %
                             (len(corrprods), source.data.shape[2]))
        # Find all antennas in subarray with valid katpoint Antenna objects
        ants = []
        for resource in attrs['sub_pool_resources'].split(','):
            try:
                ant_description = attrs[resource + '_observer']
                ants.append(katpoint.Antenna(ant_description))
            except (KeyError, ValueError):
                continue
        # Keep the basic list sorted as far as possible
        ants = sorted(ants)
        cam_ants = set(ant.name for ant in ants)
        # Find names of all antennas with associated correlator data
        sdp_ants = set([cp[0][:-1] for cp in corrprods] +
                       [cp[1][:-1] for cp in corrprods])
        # By default, only pick antennas that were in use by the script
        obs_ants = self.obs_params.get('ants')
        # Otherwise fall back to the list of antennas common to CAM and SDP / CBF
        obs_ants = obs_ants.split(',') if obs_ants else list(cam_ants
                                                             & sdp_ants)
        self.ref_ant = obs_ants[0] if not ref_ant else ref_ant

        self.subarrays = subs = [Subarray(ants, corrprods)]
        self.sensor['Observation/subarray'] = CategoricalData(subs, all_dumps)
        self.sensor['Observation/subarray_index'] = CategoricalData([0],
                                                                    all_dumps)
        # Store antenna objects in sensor cache too, for use in virtual sensors
        for ant in ants:
            sensor_name = 'Antennas/%s/antenna' % (ant.name, )
            self.sensor[sensor_name] = CategoricalData([ant], all_dumps)

        # ------ Extract spectral windows / frequencies ------

        # Get the receiver band identity ('l', 's', 'u', 'x')
        band = attrs['sub_band']
        # Populate antenna -> receiver mapping and figure out noise diode
        for ant in cam_ants:
            # Try sanitised version of RX serial number first
            rx_serial = attrs.get('%s_rsc_rx%s_serial_number' % (ant, band), 0)
            self.receivers[ant] = '%s.%d' % (band, rx_serial)
            nd_sensor = '%s_dig_%s_band_noise_diode' % (ant, band)
            if nd_sensor in self.sensor:
                # A sensor alias would be ideal for this but it only deals with suffixes ATM
                new_nd_sensor = 'Antennas/%s/nd_coupler' % (ant, )
                self.sensor[new_nd_sensor] = self.sensor.get(nd_sensor,
                                                             extract=False)
        num_chans = attrs['n_chans']
        bandwidth = attrs['bandwidth']
        centre_freq = attrs['center_freq']
        channel_width = bandwidth / num_chans
        # Continue with different channel count, but invalidate centre freq
        # (keep channel width though)
        if source.data and (num_chans != source.data.shape[1]):
            logger.warning(
                'Number of channels reported in metadata (%d) differs'
                ' from actual number of channels in data (%d) - '
                'trusting the latter', num_chans, source.data.shape[1])
            num_chans = source.data.shape[1]
            centre_freq = 0.0
        product = attrs.get('sub_product', '')
        sideband = 1
        band_map = dict(l='L', s='S', u='UHF', x='X')
        spw_params = (centre_freq, channel_width, num_chans, product, sideband,
                      band_map[band])
        # We only expect a single spectral window within a single v4 data set
        self.spectral_windows = spws = [SpectralWindow(*spw_params)]
        self.sensor['Observation/spw'] = CategoricalData(spws, all_dumps)
        self.sensor['Observation/spw_index'] = CategoricalData([0], all_dumps)

        # ------ Extract scans / compound scans / targets ------

        # Use the activity sensor of reference antenna to partition the data
        # set into scans (and to set their states)
        scan = self.sensor.get(self.ref_ant + '_activity')
        # If the antenna starts slewing on the second dump, incorporate the
        # first dump into the slew too. This scenario typically occurs when the
        # first target is only set after the first dump is received.
        # The workaround avoids putting the first dump in a scan by itself,
        # typically with an irrelevant target.
        if len(scan) > 1 and scan.events[1] == 1 and scan[1] == 'slew':
            scan.events, scan.indices = scan.events[1:], scan.indices[1:]
            scan.events[0] = 0
        # Use labels to partition the data set into compound scans
        try:
            label = self.sensor.get('obs_label')
        except KeyError:
            label = CategoricalData([''], all_dumps)
        # Discard empty labels (typically found in raster scans, where first
        # scan has proper label and rest are empty) However, if all labels are
        # empty, keep them, otherwise whole data set will be one pathological
        # compscan...
        if len(label.unique_values) > 1:
            label.remove('')
        # Create duplicate scan events where labels are set during a scan
        # (i.e. not at start of scan)
        # ASSUMPTION: Number of scans >= number of labels
        # (i.e. each label should introduce a new scan)
        scan.add_unmatched(label.events)
        self.sensor['Observation/scan_state'] = scan
        self.sensor['Observation/scan_index'] = CategoricalData(
            range(len(scan)), scan.events)
        # Move proper label events onto the nearest scan start
        # ASSUMPTION: Number of labels <= number of scans
        # (i.e. only a single label allowed per scan)
        label.align(scan.events)
        # If one or more scans at start of data set have no corresponding label,
        # add a default label for them
        if label.events[0] > 0:
            label.add(0, '')
        self.sensor['Observation/label'] = label
        self.sensor['Observation/compscan_index'] = CategoricalData(
            range(len(label)), label.events)
        # Use the target sensor of reference antenna to set target for each scan
        target = self.sensor.get(self.ref_ant + '_target')
        # Remove initial blank target (typically because antenna starts stopped)
        if len(target) > 1 and target[0] == 'Nothing, special':
            target.events, target.indices = target.events[1:], target.indices[
                1:]
            target.events[0] = 0
        # Move target events onto the nearest scan start
        # ASSUMPTION: Number of targets <= number of scans
        # (i.e. only a single target allowed per scan)
        target.align(scan.events)
        self.sensor['Observation/target'] = target
        self.sensor['Observation/target_index'] = CategoricalData(
            target.indices, target.events)
        # Set up catalogue containing all targets in file, with reference
        # antenna as default antenna
        self.catalogue.add(target.unique_values)
        ref_sensor = 'Antennas/%s/antenna' % (self.ref_ant, )
        self.catalogue.antenna = self.sensor.get(ref_sensor)[0]
        # Ensure that each target flux model spans all frequencies
        # in data set if possible
        self._fix_flux_freq_range()

        # Apply default selection and initialise all members that depend
        # on selection in the process
        self.select(spw=0, subarray=0, ants=obs_ants)
Ejemplo n.º 30
0
    def __init__(self,
                 filename,
                 ref_ant='',
                 time_offset=0.0,
                 mode='r',
                 **kwargs):
        DataSet.__init__(self, filename, ref_ant, time_offset)

        # Load file
        self.file, self.version = H5DataV1._open(filename, mode)
        f = self.file

        # Load main HDF5 groups
        ants_group, corr_group, data_group = f['Antennas'], f['Correlator'], f[
            'Scans']
        # Get observation script parameters, with defaults
        self.observer = self.obs_params['observer'] = f.attrs.get(
            'observer', '')
        self.description = self.obs_params['description'] = f.attrs.get(
            'description', '')
        self.experiment_id = self.obs_params['experiment_id'] = f.attrs.get(
            'experiment_id', '')

        # Collect all groups below data group that fit the description of a scan group
        scan_groups = []

        def register_scan_group(name, obj):
            """A scan group is defined as a group named 'Scan*' with non-empty timestamps and data."""
            if isinstance(obj, h5py.Group) and name.split('/')[-1].startswith('Scan') and \
               'data' in obj and 'timestamps' in obj and len(obj['timestamps']) > 0:
                scan_groups.append(obj)

        data_group.visititems(register_scan_group)
        # Sort scan groups in chronological order via 'decorate-sort-undecorate' (DSU) idiom
        decorated_scan_groups = [(s['timestamps'][0], s) for s in scan_groups]
        decorated_scan_groups.sort()
        self._scan_groups = [s[-1] for s in decorated_scan_groups]

        # ------ Extract timestamps ------

        self.dump_period = 1.0 / corr_group.attrs['dump_rate_hz']
        self._segments = np.cumsum(
            [0] + [len(s['timestamps']) for s in self._scan_groups])
        num_dumps = self._segments[-1]
        self._time_keep = np.ones(num_dumps, dtype=np.bool)
        data_timestamps = self.timestamps
        if data_timestamps[0] < 1e9:
            logger.warning(
                "File '%s' has invalid first correlator timestamp (%f)" % (
                    filename,
                    data_timestamps[0],
                ))
        # Estimate timestamps by assuming they are uniformly spaced (much quicker than loading them from file).
        # This is useful for the purpose of segmenting data set, where accurate timestamps are not that crucial.
        # The real timestamps are still loaded when the user explicitly asks for them.
        # Do quick test for uniform spacing of timestamps (necessary but not sufficient).
        if abs((data_timestamps[-1] - data_timestamps[0]) / self.dump_period +
               1 - num_dumps) < 0.01:
            # Estimate the timestamps as being uniformly spaced
            data_timestamps = data_timestamps[
                0] + self.dump_period * np.arange(num_dumps)
        else:
            # Load the real timestamps instead and warn the user, as this is anomalous
            data_timestamps = data_timestamps[:]
            expected_dumps = (data_timestamps[-1] -
                              data_timestamps[0]) / self.dump_period + 1
            logger.warning((
                "Irregular timestamps detected in file '%s':"
                "expected %.3f dumps based on dump period and start/end times, got %d instead"
            ) % (filename, expected_dumps, num_dumps))
        self.start_time = katpoint.Timestamp(data_timestamps[0] -
                                             0.5 * self.dump_period)
        self.end_time = katpoint.Timestamp(data_timestamps[-1] +
                                           0.5 * self.dump_period)

        # ------ Extract sensors ------

        # Populate sensor cache with all HDF5 datasets below antennas group that fit the description of a sensor
        cache = {}

        def register_sensor(name, obj):
            if isinstance(obj, h5py.Dataset) and obj.shape != (
            ) and obj.dtype.names == ('timestamp', 'value', 'status'):
                # Assume sensor dataset name is AntennaN/Sensors/dataset and rename it to Antennas/{ant}/dataset
                ant_name = obj.parent.parent.attrs['description'].split(',')[0]
                standardised_name = 'Antennas/%s/%s' % (ant_name,
                                                        name.split('/')[-1])
                cache[standardised_name] = SensorData(obj, standardised_name)

        ants_group.visititems(register_sensor)
        # Use estimated data timestamps for now, to speed up data segmentation
        # This will linearly interpolate pointing coordinates to correlator data timestamps (on access)
        # As long as azimuth is in natural antenna coordinates, no special angle interpolation required
        self.sensor = SensorCache(cache,
                                  data_timestamps,
                                  self.dump_period,
                                  keep=self._time_keep,
                                  props=SENSOR_PROPS,
                                  virtual=VIRTUAL_SENSORS,
                                  aliases=SENSOR_ALIASES)

        # ------ Extract subarrays ------

        ants = [
            katpoint.Antenna(ants_group[group].attrs['description'])
            for group in ants_group
        ]
        self.ref_ant = ants[0].name if not ref_ant else ref_ant
        # Map from (old-style) DBE input label (e.g. '0x') to the new antenna-based input label (e.g. 'ant1h')
        input_label = dict([(ants_group[group]['H'].attrs['dbe_input'],
                             ant.name + 'h')
                            for ant, group in zip(ants, ants_group.keys())
                            if 'H' in ants_group[group]])
        input_label.update(
            dict([(ants_group[group]['V'].attrs['dbe_input'], ant.name + 'v')
                  for ant, group in zip(ants, ants_group.keys())
                  if 'V' in ants_group[group]]))
        # Split DBE input product string into its separate inputs
        split_product = re.compile(r'(\d+[xy])(\d+[xy])')
        # Iterate over map from correlation product index to DBE input product string and convert
        # the latter to pairs of input labels (this assumes that the corrprod indices are sorted)
        corrprods = []
        for corrind, product in corr_group['input_map']:
            match = split_product.match(product)
            if match is None:
                raise BrokenFile(
                    "Unknown DBE input product '%s' in input map (expected e.g. '0x1y')"
                    % (product, ))
            corrprods.append(
                tuple([input_label[inp] for inp in match.groups()]))
        data_cp_len = len(self._scan_groups[0]['data'].dtype)
        if len(corrprods) != data_cp_len:
            raise BrokenFile(
                'Number of baseline labels received from correlator '
                '(%d) differs from number of baselines in data (%d)' %
                (len(corrprods), data_cp_len))
        self.subarrays = [Subarray(ants, corrprods)]
        self.sensor['Observation/subarray'] = CategoricalData(
            self.subarrays, [0, len(data_timestamps)])
        self.sensor['Observation/subarray_index'] = CategoricalData(
            [0], [0, len(data_timestamps)])
        # Store antenna objects in sensor cache too, for use in virtual sensor calculations
        for ant in ants:
            self.sensor['Antennas/%s/antenna' %
                        (ant.name, )] = CategoricalData(
                            [ant], [0, len(data_timestamps)])

        # ------ Extract spectral windows / frequencies ------

        centre_freq = corr_group.attrs['center_frequency_hz']
        num_chans = corr_group.attrs['num_freq_channels']
        data_num_chans = self._scan_groups[0]['data'].shape[1]
        if num_chans != data_num_chans:
            raise BrokenFile(
                'Number of channels received from correlator '
                '(%d) differs from number of channels in data (%d)' %
                (num_chans, data_num_chans))
        channel_width = corr_group.attrs['channel_bandwidth_hz']
        self.spectral_windows = [
            SpectralWindow(centre_freq, channel_width, num_chans, 'poco')
        ]
        self.sensor['Observation/spw'] = CategoricalData(
            self.spectral_windows, [0, len(data_timestamps)])
        self.sensor['Observation/spw_index'] = CategoricalData(
            [0], [0, len(data_timestamps)])

        # ------ Extract scans / compound scans / targets ------

        # Fringe Finder augment does not store antenna activity sensors - use scan + compscan labels as a guess
        scan_labels = [s.attrs.get('label', '') for s in self._scan_groups]
        compscan_labels = [
            s.parent.attrs.get('label', '') for s in self._scan_groups
        ]
        scan_states = [
            _labels_to_state(s, cs)
            for s, cs in zip(scan_labels, compscan_labels)
        ]
        # The scans are already partitioned into groups - use corresponding segments as start events
        self.sensor['Observation/scan_state'] = CategoricalData(
            scan_states, self._segments)
        self.sensor['Observation/scan_index'] = CategoricalData(
            range(len(scan_states)), self._segments)
        # Group scans together based on compscan group name and have one label per compound scan
        compscan = CategoricalData([s.parent.name for s in self._scan_groups],
                                   self._segments)
        compscan.remove_repeats()
        label = CategoricalData(compscan_labels, self._segments)
        label.align(compscan.events)
        self.sensor['Observation/label'] = label
        self.sensor['Observation/compscan_index'] = CategoricalData(
            range(len(label)), label.events)
        # Extract targets from compscan groups, replacing empty or bad descriptions with dummy target
        target = CategoricalData([
            _robust_target(s.parent.attrs.get('target', ''))
            for s in self._scan_groups
        ], self._segments)
        target.align(compscan.events)
        self.sensor['Observation/target'] = target
        self.sensor['Observation/target_index'] = CategoricalData(
            target.indices, target.events)
        # Set up catalogue containing all targets in file, with reference antenna as default antenna
        self.catalogue.add(target.unique_values)
        self.catalogue.antenna = self.sensor['Antennas/%s/antenna' %
                                             (self.ref_ant, )][0]
        # Ensure that each target flux model spans all frequencies in data set if possible
        self._fix_flux_freq_range()

        # Restore original (slow) timestamps so that subsequent sensors (e.g. pointing) will have accurate values
        self.sensor.timestamps = self.timestamps
        # Apply default selection and initialise all members that depend on selection in the process
        self.select(spw=0, subarray=0)