def __init__(self, delay_client, ordered_beams, ordered_antennas, nreaders): """ @brief Controls shared memory delay buffers that are accessed by one or more beamformer instances. @params delay_client A KATCPResourceClient connected to an FBFUSE delay engine server @params ordered_beams A list of beam IDs in the order that they should be generated by the beamformer @params orderded_antennas A list of antenna IDs in the order which they should be captured by the beamformer @params nreaders The number of posix shared memory readers that will access the memory buffers that are managed by this instance. """ self._nreaders = nreaders self._delay_client = delay_client self._ordered_antennas = ordered_antennas self._ordered_beams = ordered_beams self.shared_buffer_key = "delay_buffer" self.mutex_semaphore_key = "delay_buffer_mutex" self.counting_semaphore_key = "delay_buffer_count" self._nbeams = len(self._ordered_beams) self._nantennas = len(self._ordered_antennas) self._delays_array = self._delays = np.rec.recarray((self._nbeams, self._nantennas), dtype=[("delay_rate","float32"),("delay_offset","float32")]) self._targets = OrderedDict() for beam in self._ordered_beams: self._targets[beam] = Target(DEFAULT_TARGET) self._phase_reference = Target(DEFAULT_TARGET) self._update_rate = DEFAULT_UPDATE_RATE self._delay_span = DEFAULT_DELAY_SPAN self._update_callback = None self._beam_callbacks = {}
def test_startup(self): bm = BeamManager(4, KATPOINT_ANTENNAS) de = DelayConfigurationServer("127.0.0.1", 0, bm) de.start() bm.add_beam(Target('test_target0,radec,12:00:00,01:00:00')) bm.add_beam(Target('test_target1,radec,12:00:00,01:00:00')) bm.add_beam(Target('test_target2,radec,12:00:00,01:00:00')) bm.add_beam(Target('test_target3,radec,12:00:00,01:00:00'))
def __init__(self, dataset: katdal.DataSet, common: CommonStats, target: katpoint.Target) -> None: self.common = common self.target = target # Status string for each channel self.status: List[str] = ['masked'] * common.channels for channel in common.output_channels: self.status[channel] = 'failed' # Peak per channel (NaN where missing) self.peak: u.Quantity = [math.nan] * common.channels * (u.Jy / u.beam) # Total flux density per (polarization, channel) (NaN where missing) self.totals: u.Quantity = { pol: [math.nan] * common.channels * u.Jy for pol in 'IQUV' } # Noise per channel (NaN where missing) self.noise: u.Quantity = [math.nan] * common.channels * (u.Jy / u.beam) # Estimate noise from weights (NaN where missing) self.weights_noise: u.Quantity = [math.nan] * common.channels * (u.Jy / u.beam) # Increase in noise due to imaging weights self.normalized_noise = [math.nan] * common.channels self.plots: Dict[str, str] = {} # Divs to insert for plots returned by make_plots self.uv_coverage = '' self.frequency_range = bokeh.models.Range1d( self.common.frequencies[0].to_value(FREQUENCY_PLOT_UNIT), self.common.frequencies[-1].to_value(FREQUENCY_PLOT_UNIT), bounds='auto' ) self.channel_range = bokeh.models.Range1d(0, self.common.channels - 1, bounds='auto') self.time_on_target = katsdpimager.metadata.time_on_target(dataset, target) self.model_natural_noise: Optional[u.Quantity] = None if self.common.sefd is not None and len(self.common.antennas) > 1: n = len(self.common.antennas) # Correlator efficiency is already folded in to self.common.sefd denom = math.sqrt(2 * n * (n - 1) * self.time_on_target * self.common.channel_width) self.model_natural_noise = self.common.sefd / denom / u.beam mask = katsdpimager.metadata.target_mask(dataset, target) self.timestamps = dataset.timestamps[mask] self.time_range = bokeh.models.Range1d( datetime.fromtimestamp(self.timestamps[0] - 0.5 * dataset.dump_period, timezone.utc), datetime.fromtimestamp(self.timestamps[-1] + 0.5 * dataset.dump_period, timezone.utc) ) # Find contiguous time intervals on target delta = np.diff(mask, prepend=0, append=0) starts = np.nonzero(delta == 1)[0] ends = np.nonzero(delta == -1)[0] - 1 self.time_intervals = list(zip(dataset.timestamps[starts] - 0.5 * dataset.dump_period, dataset.timestamps[ends] + 0.5 * dataset.dump_period)) self.array_ant = dataset.sensor['Antennas/array/antenna'][0] self.ants = dataset.ants self.elevation = target.azel(timestamp=self.timestamps, antenna=self.array_ant)[1] << u.rad self.parallactic_angle = target.parallactic_angle( timestamp=self.timestamps, antenna=self.array_ant) << u.rad
def check_target(OVST, target, tmstmp=None, check=True): ''' :param tar: str or object of class Target if string: searches for the target in catalogue :param antennas: list list of antenna objects of class Antenna :param catalogue: Catalogue :param tmstmp: Timestamp :return: list with position tuples [(az1, el1), (az2, el2), ...] ''' antennas = OVST.active_antennas catalogue = OVST.Catalogue azel = [] if isinstance(target, Target): target = target elif isinstance( target, str ) and ',' in target: # Check if target has format: e.g. 'azel, 30, 60' target = Target(target) elif isinstance(target, str): target = catalogue[target] if not target: raise ValueError("Target not in Catalogue") if isinstance(tmstmp, str): if tmstmp and len(tmstmp) == 5: tmstmp += ':00' if tmstmp and len(tmstmp) == 8: tmstmp = str(datetime.now().date()) + ' ' + tmstmp if isinstance(tmstmp, (int, float)): tmstmp = Timestamp(tmstmp) if not tmstmp: tmstmp = Timestamp() for antenna in antennas: ae = target.azel(timestamp=tmstmp, antenna=antenna) azel.append([rad2deg(ae[0]), rad2deg(ae[1])]) az = [item[0] for item in azel] el = [item[1] for item in azel] if check: if all((OVST.az_limit[1] - 2 < i < OVST.az_limit[0] + 2 for i in az)) or all(i < OVST.el_limit[0] for i in el): raise LookupError( 'target cannot get focused at % s (target at azimuth %.2f and elevation %.2f).\n ' 'Allowed limits: az not in range of 150-173 and elevation > 25' % (tmstmp.local()[11:19], azel[0][0], azel[0][1])) return azel # format: [(az1, el1), (az2, el2), ...]
def ca_target_update_callback(received_timestamp, timestamp, status, value): # TODO, should we really reset all the beams or should we have # a mechanism to only update changed beams config_dict = json.loads(value) self.reset_beams() for target_string in config_dict.get('beams',[]): target = Target(target_string) self.add_beam(target) for tiling in config_dict.get('tilings',[]): target = Target(tiling['target']) #required freq = float(tiling.get('reference_frequency', self._cfreq_sensor.value())) nbeams = int(tiling['nbeams']) overlap = float(tiling.get('overlap', 0.5)) epoch = float(tiling.get('epoch', time.time())) self.add_tiling(target, nbeams, freq, overlap, epoch)
def request_add_beam(self, req, product_id, target): """ @brief Configure the parameters of one beam @note This call may only be made AFTER a successful call to start-beams. Before this point no beams are allocated to the instance. If all beams are currently allocated an exception will be raised. @param req A katcp request object @param product_id This is a name for the data product, used to track which subarray is being deconfigured. For example "array_1_bc856M4k". @param target A KATPOINT target string @return katcp reply object [[[ !add-beam ok | (fail [error description]) ]]] """ try: product = self._get_product(product_id) except ProductLookupError as error: return ("fail", str(error)) try: target = Target(target) except Exception as error: return ("fail", str(error)) beam = product.add_beam(target) return ("ok", beam.idx)
def request_set_default_target_configuration(self, req, product_id, target): """ @brief Set the configuration of FBFUSE from the FBFUSE configuration server @param product_id This is a name for the data product, used to track which subarray is being deconfigured. For example "array_1_bc856M4k". @param target A KATPOINT target string """ try: product = self._get_product(product_id) except ProductLookupError as error: return ("fail", str(error)) try: target = Target(target) except Exception as error: return ("fail", str(error)) if not product.capturing: return ("fail","Product must be capturing before a target confiugration can be set.") product.reset_beams() # TBD: Here we connect to some database and request the default configurations # For example this may return secondary target in the FoV # # As a default the current system will put one beam directly on target and # the rest of the beams in a static tiling pattern around this target now = time.time() nbeams = product._beam_manager.nbeams product.add_tiling(target, nbeams-1, 1.4e9, 0.5, now) product.add_beam(target) return ("ok",)
def req_target(self, target): self.target = target self._target = Target(target) if target else None self.lock = False self.scan_status = 'none' if not self._target and self.mode in ('POINT', 'SCAN'): self.req_mode('STOP')
def request_target_start(self, req, product_id, target): """ @brief Notify FBFUSE that a new target is being observed @param product_id This is a name for the data product, used to track which subarray is being deconfigured. For example "array_1_bc856M4k". @param target A KATPOINT target string @return katcp reply object [[[ !target-start ok | (fail [error description]) ]]] """ log.info("Received target-start request for target: {}".format(target)) try: product = self._get_product(product_id) except ProductLookupError as error: log.error("target-start request failed with error: {}".format( str(error))) raise Return("fail", str(error)) try: target = Target(target) except Exception as error: raise Return(("fail", str(error))) yield product.target_start(target) yield product.rescale() raise Return(("ok", ))
def generate(self, epoch): """ @brief Calculate and update RA and Dec positions of all beams in the tiling object. @param epoch The epoch of tiling (unix time) @param antennas The antennas to use when calculating the beam shape. Note these are the antennas in katpoint CSV format. """ log.debug("Creating PSF simulator at reference frequency {} Hz".format( self.reference_frequency)) psfsim = mosaic.PsfSim(self._antennas, self.reference_frequency) log.debug(("Generating beam shape for target position {} " "at epoch {}").format(self.target, epoch)) self.beam_shape = psfsim.get_beam_shape(self.target, epoch) log.debug("Generating tiling of {} beams with an overlap of {}".format( self.nbeams, self.overlap)) tiling = mosaic.generate_nbeams_tiling( self.beam_shape, self.nbeams, self.overlap) coordinates = tiling.get_equatorial_coordinates() for ii in range(min(tiling.beam_num, self.nbeams)): ra, dec = coordinates[ii] self._beams[ii].target = Target('{},radec,{},{}'.format( self.target.name, ra, dec)) return tiling
def callback(rt, t, status, value, beam): log.debug("Received target update for beam {}: {}".format(beam, value)) if status == 'nominal': try: self._targets[beam] = Target(value) except Exception as error: log.exception("Error when updating target for beam {}".format(beam))
def __init__(self, delay_client, ordered_beams, ordered_antennas, nreaders, offline=False): """ @brief Controls shared memory delay buffers that are accessed by one or more beamformer instances. @params delay_client A KATCPResourceClient connected to an FBFUSE delay engine server @params ordered_beams A list of beam IDs in the order that they should be generated by the beamformer @params orderded_antennas A list of antenna IDs in the order which they should be captured by the beamformer @params nreaders The number of posix shared memory readers that will access the memory buffers that are managed by this instance. @note The delay model definition in FBFUSE looks like: @code struct DelayModel { double epoch; double duration; float2 delays[FBFUSE_CB_NBEAMS * FBFUSE_CB_NANTENNAS]; }; @endcode """ self._nreaders = nreaders self._delay_client = delay_client self._ordered_antennas = ordered_antennas self._ordered_beams = ordered_beams self.shared_buffer_key = "delay_buffer" self.mutex_semaphore_key = "delay_buffer_mutex" self.counting_semaphore_key = "delay_buffer_count" self._nbeams = len(self._ordered_beams) self._nantennas = len(self._ordered_antennas) self._delay_model = delay_model_type(self._nbeams, self._nantennas)() self._targets = OrderedDict() for beam in self._ordered_beams: self._targets[beam] = Target(DEFAULT_TARGET) self._phase_reference = Target(DEFAULT_TARGET) self._update_rate = DEFAULT_UPDATE_RATE self._delay_span = DEFAULT_DELAY_SPAN self._update_callback = None self._beam_callbacks = {} self._offline = offline self._offline_control_thread = None
def test_dryrun(self): nbeams = 32 antennas = ["m%03d" % ii for ii in range(16)] feng_antenna_map = {antenna: ii for ii, antenna in enumerate(antennas)} coherent_beam_antennas = antennas incoherent_beam_antennas = antennas nantennas = len(antennas) beam_manager = BeamManager( nbeams, [Antenna(ANTENNAS[i]) for i in coherent_beam_antennas]) delay_config_server = DelayConfigurationServer("127.0.0.1", 0, beam_manager) delay_config_server.start() dc_ip, dc_port = delay_config_server.bind_address for _ in range(nbeams): beam_manager.add_beam(Target("source0, radec, 123.1, -30.3")) coherent_beams_csv = ",".join( [beam.idx for beam in beam_manager.get_beams()]) tot_nchans = 4096 feng_groups = "spead://239.11.1.150+3:7147" nchans_per_group = tot_nchans / nantennas / 4 chan0_idx = 0 chan0_freq = 1240e6 chan_bw = 856e6 / tot_nchans mcast_to_beam_map = { "spead://239.11.2.150:7147": coherent_beams_csv, "spead://239.11.2.151:7147": "ifbf00001" } feng_config = { "bandwidth": 856e6, "centre-frequency": 1200e6, "sideband": "upper", "feng-antenna-map": feng_antenna_map, "sync-epoch": 12353524243.0, "nchans": 4096 } coherent_beam_config = { "tscrunch": 16, "fscrunch": 1, "antennas": ",".join(coherent_beam_antennas) } incoherent_beam_config = { "tscrunch": 16, "fscrunch": 1, "antennas": ",".join(incoherent_beam_antennas) } yield self._send_request_expect_ok('prepare', feng_groups, nchans_per_group, chan0_idx, chan0_freq, chan_bw, nbeams, json.dumps(mcast_to_beam_map), json.dumps(feng_config), json.dumps(coherent_beam_config), json.dumps(incoherent_beam_config), dc_ip, dc_port) yield self._check_sensor_value('device-status', 'ok') yield self._send_request_expect_ok('capture-start') yield sleep(10) yield self._send_request_expect_ok('capture-stop') self.server._delay_buf_ctrl.stop()
class Beam(object): """Wrapper class for a single beam to be produced by FBFUSE""" def __init__(self, idx, target=DEFAULT_KATPOINT_TARGET): """ @brief Create a new Beam object @params idx a unique identifier for this beam. @param target A KATPOINT target object """ self.idx = idx self._target = target self._observers = set() @property def target(self): return self._target @target.setter def target(self, new_target): self._target = new_target self.notify() def notify(self): """ @brief Notify all observers of a change to the beam parameters """ for observer in self._observers: observer(self) def register_observer(self, func): """ @brief Register an observer to be called on a notify @params func Any function that takes a Beam object as its only argument """ self._observers.add(func) def deregister_observer(self, func): """ @brief Deregister an observer to be called on a notify @params func Any function that takes a Beam object as its only argument """ self._observers.remove(func) def reset(self): """ @brief Reset the beam to default parameters """ self.target = Target(DEFAULT_KATPOINT_TARGET) def __repr__(self): return "{}, {}".format( self.idx, self.target.format_katcp())
def move_to_gal_pos(self, long, lat, name='galactic target'): """ Moves telescopes to position of galactic coordinates :param long: galactic longitude :param lat: galactic latitude :param name: optional target name """ target = Target(('%s, gal, %s, %s' % (name, long, lat))) message = self.move_to_pos(target) return message
def test_online_mode(self): beam_ids = ["cfbf{:05d}".format(i) for i in range(32)] for beam_id in beam_ids: self._beam_manager.add_beam( Target('{},radec,12:00:00,01:00:00'.format(beam_id))) antenna_ids = ["m{:03d}".format(i) for i in range(7, 7+4)] controller = DelayBufferController( self._delay_client, beam_ids, antenna_ids, 1, offline=False) yield controller.start() yield sleep(5) controller.stop()
def setup(self): self.target = Target('PKS1934-638, radec, 19:39, -63:42') self.antennas = [ Antenna('m000, -30:42:39.8, 21:26:38.0, 1086.6, 13.5, ' '-8.264 -207.29 8.5965'), Antenna('m063, -30:42:39.8, 21:26:38.0, 1086.6, 13.5, ' '-3419.5845 -1840.48 16.3825') ] corrprods = [('m000h', 'm000h'), ('m000v', 'm000v'), ('m063h', 'm063h'), ('m063v', 'm063v'), ('m000h', 'm063h'), ('m000v', 'm063v')] subarray = Subarray(self.antennas, corrprods) spw = SpectralWindow(centre_freq=1284e6, channel_width=0, num_chans=16, sideband=1, bandwidth=856e6) # Pick a time when the source is up as that seems more realistic self.timestamps = 1234667890.0 + 1.0 * np.arange(10) self.dataset = MinimalDataSet(self.target, subarray, spw, self.timestamps) self.array_ant = self.dataset.sensor.get('Antennas/array/antenna')[0]
def request_add_tiling(self, req, product_id, target, nbeams, reference_frequency, overlap, epoch): """ @brief Configure the parameters of a static beam tiling @note This call may only be made AFTER a successful call to start-beams. Before this point no beams are allocated to the instance. If there are not enough free beams to satisfy the request an exception will be raised. @note Beam shapes calculated for tiling are always assumed to be 2D elliptical Gaussians. @param req A katcp request object @param product_id This is a name for the data product, used to track which subarray is being deconfigured. For example "array_1_bc856M4k". @param target A KATPOINT target string @param nbeams The number of beams in this tiling pattern. @param reference_frequency The reference frequency at which to calculate the synthesised beam shape, and thus the tiling pattern. Typically this would be chosen to be the centre frequency of the current observation. @param overlap The desired overlap point between beams in the pattern. The overlap defines at what power point neighbouring beams in the tiling pattern will meet. For example an overlap point of 0.1 corresponds to beams overlapping only at their 10%-power points. Similarly a overlap of 0.5 corresponds to beams overlapping at their half-power points. [Note: This is currently a tricky parameter to use when values are close to zero. In future this may be define in sigma units or in multiples of the FWHM of the beam.] @param epoch The desired epoch for the tiling pattern as a unix time. A typical usage would be to set the epoch to half way into the coming observation in order to minimise the effect of parallactic angle and array projection changes altering the shape and position of the beams and thus changing the efficiency of the tiling pattern. @return katcp reply object [[[ !add-tiling ok | (fail [error description]) ]]] """ try: product = self._get_product(product_id) except ProductLookupError as error: return ("fail", str(error)) try: target = Target(target) except Exception as error: return ("fail", str(error)) tiling = product.add_tiling(target, nbeams, reference_frequency, overlap, epoch) return ("ok", tiling.idxs())
def generate(self, antennas, epoch): """ @brief Calculate and update RA and Dec positions of all beams in the tiling object. @param epoch The epoch of tiling (unix time) @param antennas The antennas to use when calculating the beam shape. Note these are the antennas in katpoint CSV format. """ psfsim = mosaic.PsfSim(antennas, self.reference_frequency) beam_shape = psfsim.get_beam_shape(self.target, epoch) tiling = mosaic.generate_nbeams_tiling(beam_shape, self.nbeams, self.overlap) for ii in range(tiling.beam_num): ra, dec = tiling.coordinates[ii] self._beams[ii].target = Target('{},radec,{},{}'.format(self.target.name, ra, dec))
def test_offline_mode(self): def update_delay_via_socket(): sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.connect(CONTROL_SOCKET_ADDR) sock.sendall(struct.pack("d", 1554051922.649372)) response = struct.unpack("b", sock.recv(1))[0] if response == 0: self.fail("Could not update delays") beam_ids = ["cfbf{:05d}".format(i) for i in range(32)] for beam_id in beam_ids: self._beam_manager.add_beam( Target('{},radec,12:00:00,01:00:00'.format(beam_id))) antenna_ids = ["m{:03d}".format(i) for i in range(7, 7+4)] controller = DelayBufferController( self._delay_client, beam_ids, antenna_ids, 1, offline=True) yield controller.start() update_delay_via_socket() controller.stop()
def set_target_config_return_value(self, proxy_id, target_string, return_value): target = Target(target_string) self.target_return_values[(proxy_id, target.name)] = return_value
def reset(self): """ @brief Reset the beam to default parameters """ self.target = Target(DEFAULT_KATPOINT_TARGET)
def target_start(self, beam_info, output_dir): # Send target information to apsuse pipeline # and trigger file writing # First build message containing beam information # in JSON form: # # { # "command":"start", # "beam_parameters": [ # {id: "cfbf00000", name: "PSRJ1823+3410", "ra": "00:00:00.00", "dec": "00:00:00.00"}, # {id: "cfbf00002", name: "SBGS0000", "ra": "00:00:00.00", "dec": "00:00:00.00"}, # {id: "cfbf00006", name: "SBGS0000", "ra": "00:00:00.00", "dec": "00:00:00.00"}, # {id: "cfbf00008", name: "SBGS0000", "ra": "00:00:00.00", "dec": "00:00:00.00"}, # {id: "cfbf00010", name: "SBGS0000", "ra": "00:00:00.00", "dec": "00:00:00.00"} # ] # } # # Here the "idx" parameter refers to the internal index of the beam, e.g. if the # apsuse executable is handling 6 beams these are numbered 0-5 regardless of their # global index. It is thus necessary to track the mapping between internal and # external indices for these beams. # log.info("Target start on capture instance") beam_params = [] message_dict = { "command": "start", "directory": output_dir, "beam_parameters": beam_params } log.info("Parsing beam information") for beam, target_str in beam_info.items(): if beam in self._internal_beam_mapping: idx = self._internal_beam_mapping[beam] target = Target(target_str) ra, dec = map(str, target.radec()) log.info( "IDX: {}, name: {}, ra: {}, dec: {}, source: {}".format( idx, beam, ra, dec, target.name)) beam_params.append({ "idx": idx, "name": beam, "source": target.name, "ra": ra, "dec": dec }) log.debug("Connecting to apsuse instance via socket") client = UDSClient(self._control_socket) log.debug("Sending message: {}".format(json.dumps(message_dict))) client.send(json.dumps(message_dict)) response_str = client.recv(timeout=3) try: response = json.loads(response_str)["response"] except Exception: log.exception( "Unable to parse JSON returned from apsuse application") else: log.debug("Response: {}".format(response_str)) if response != "success": raise Exception("Failed to start APSUSE recording") finally: client.close() log.debug("Closed socket connection")
def get_target_config(self, proxy_id, target_string): target = Target(target_string) raise Return(self.target_return_values[(proxy_id, target.name)])
def _update_phase_reference(self, rt, t, status, value): if status != "nominal": return log.debug("Received update to phase-reference: {}, {}, {}, {}".format(rt, t, status, value)) self._phase_reference = Target(value)
def req_target(self, target): self.target = target self._target = Target(target) self._target.antenna = self.ref_ant
def test_get_target_configuration_from_ca(self): product_name = 'test_product' proxy_name = 'FBFUSE_test' hostname = "127.0.0.1" sb_id = "default_subarray" #TODO replace this when the sb_id is actually provided to FBF targets = [ 'test_target0,radec,12:00:00,01:00:00', 'test_target1,radec,13:00:00,02:00:00' ] ca_server = MockFbfConfigurationAuthority(hostname, 0) ca_server.start() ca_server.set_sb_config_return_value(proxy_name, sb_id, {}) ca_server.set_target_config_return_value(proxy_name, targets[0], {'beams': targets}) port = ca_server.bind_address[1] self._add_n_servers(64) yield self._send_request_expect_ok('configure', product_name, self.DEFAULT_ANTENNAS, self.DEFAULT_NCHANS, self.DEFAULT_STREAMS, proxy_name) yield self._send_request_expect_ok('set-configuration-authority', product_name, hostname, port) yield self._send_request_expect_ok('provision-beams', product_name, 'random_schedule_block_id') product = self.server._products[product_name] while True: yield sleep(0.5) if product.ready: break yield self._send_request_expect_ok('capture-start', product_name) yield self._send_request_expect_ok('target-start', product_name, targets[0]) yield self._check_sensor_value( '{}.coherent-beam-cfbf00000'.format(product_name), Target(targets[0]).format_katcp()) yield self._check_sensor_value( '{}.coherent-beam-cfbf00001'.format(product_name), Target(targets[1]).format_katcp()) new_targets = [ 'test_target2,radec,14:00:00,03:00:00', 'test_target3,radec,15:00:00,04:00:00' ] ca_server.update_target_config(proxy_name, {'beams': new_targets}) # Need to give some time for the update callback to hit the top of the # event loop and change the beam configuration sensors. yield sleep(1) yield self._check_sensor_value( '{}.coherent-beam-cfbf00000'.format(product_name), Target(new_targets[0]).format_katcp()) yield self._check_sensor_value( '{}.coherent-beam-cfbf00001'.format(product_name), Target(new_targets[1]).format_katcp()) yield self._send_request_expect_ok('target-stop', product_name) # Put beam configuration back to original: ca_server.update_target_config(proxy_name, {'beams': targets}) yield sleep(1) # At this point the sensor values should NOT have updated yield self._check_sensor_value( '{}.coherent-beam-cfbf00000'.format(product_name), Target(new_targets[0]).format_katcp()) yield self._check_sensor_value( '{}.coherent-beam-cfbf00001'.format(product_name), Target(new_targets[1]).format_katcp()) #Not start up a new target-start yield self._send_request_expect_ok('target-start', product_name, targets[0]) yield self._check_sensor_value( '{}.coherent-beam-cfbf00000'.format(product_name), Target(targets[0]).format_katcp()) yield self._check_sensor_value( '{}.coherent-beam-cfbf00001'.format(product_name), Target(targets[1]).format_katcp())
class TestVirtualSensors: def setup(self): self.target = Target('PKS1934-638, radec, 19:39, -63:42') self.antennas = [ Antenna('m000, -30:42:39.8, 21:26:38.0, 1086.6, 13.5, ' '-8.264 -207.29 8.5965'), Antenna('m063, -30:42:39.8, 21:26:38.0, 1086.6, 13.5, ' '-3419.5845 -1840.48 16.3825') ] corrprods = [('m000h', 'm000h'), ('m000v', 'm000v'), ('m063h', 'm063h'), ('m063v', 'm063v'), ('m000h', 'm063h'), ('m000v', 'm063v')] subarray = Subarray(self.antennas, corrprods) spw = SpectralWindow(centre_freq=1284e6, channel_width=0, num_chans=16, sideband=1, bandwidth=856e6) # Pick a time when the source is up as that seems more realistic self.timestamps = 1234667890.0 + 1.0 * np.arange(10) self.dataset = MinimalDataSet(self.target, subarray, spw, self.timestamps) self.array_ant = self.dataset.sensor.get('Antennas/array/antenna')[0] def test_timestamps(self): mjd = Timestamp(self.timestamps[0]).to_mjd() assert_equal(self.dataset.mjd[0], mjd) lst = self.array_ant.local_sidereal_time(self.timestamps) # Convert LST from radians (katpoint) to hours (katdal) assert_array_equal(self.dataset.lst, lst * (12 / np.pi)) def test_pointing(self): az, el = self.target.azel(self.timestamps, self.antennas[1]) assert_array_equal(self.dataset.az[:, 1], rad2deg(az)) assert_array_equal(self.dataset.el[:, 1], rad2deg(el)) ra, dec = self.target.radec(self.timestamps, self.antennas[0]) assert_array_almost_equal(self.dataset.ra[:, 0], rad2deg(ra), decimal=5) assert_array_almost_equal(self.dataset.dec[:, 0], rad2deg(dec), decimal=5) angle = self.target.parallactic_angle(self.timestamps, self.antennas[0]) # TODO: Check why this is so poor... see SR-1882 for progress on this assert_array_almost_equal(self.dataset.parangle[:, 0], rad2deg(angle), decimal=0) x, y = self.target.sphere_to_plane(az, el, self.timestamps, self.antennas[1]) assert_array_equal(self.dataset.target_x[:, 1], rad2deg(x)) assert_array_equal(self.dataset.target_y[:, 1], rad2deg(y)) def test_uvw(self): u0, v0, w0 = self.target.uvw(self.antennas[0], self.timestamps, self.array_ant) u1, v1, w1 = self.target.uvw(self.antennas[1], self.timestamps, self.array_ant) u = u0 - u1 v = v0 - v1 w = w0 - w1 assert_array_equal(self.dataset.u[:, 4], u) assert_array_equal(self.dataset.v[:, 4], v) assert_array_equal(self.dataset.w[:, 4], w) # Check that both H and V polarisations have the same (u, v, w) assert_array_equal(self.dataset.u[:, 5], u) assert_array_equal(self.dataset.v[:, 5], v) assert_array_equal(self.dataset.w[:, 5], w)
def referencemetrics(ant, data, num_samples_limit=1, power_sample_limit=0): """Determine and sky RMS from the antenna pointing model.""" """On the calculation of all-sky RMS Assume the el and cross-el errors have zero mean, are distributed normally, and are uncorrelated They are therefore described by a 2-dimensional circular Gaussian pdf with zero mean and *per-component* standard deviation of sigma The absolute sky error (== Euclidean length of 2-dim error vector) then has a Rayleigh distribution The RMS sky error has a mean value of sqrt(2) * sigma, since each squared error term is the sum of two squared Gaussian random values, each with an expected value of sigma^2. e.g. sky_rms = np.sqrt(np.mean((abs_sky_error-abs_sky_error.mean()) ** 2)) A more robust estimate of the RMS sky error is obtained via the median of the Rayleigh distribution, which is sigma * sqrt(log(4)) -> convert this to the RMS sky error = sqrt(2) * sigma e.g. robust_sky_rms = np.median(np.sqrt((abs_sky_error-abs_sky_error.mean())**2)) * np.sqrt(2. / np.log(4.)) """ #print type(data.shape[0] ), type(num_samples_limit) beam = data['beam_height_I'].mean() good_beam = (data['beam_height_I'] > beam * .8) * ( data['beam_height_I'] < beam * 1.2) * (data['beam_height_I'] > power_sample_limit) data = data[good_beam] if data.shape[0] > 0 and not np.all(good_beam): print("bad scan", data['target'][0]) if data.shape[0] >= num_samples_limit and ( data['timestamp'][-1] - data['timestamp'][0]) < 2000: # check all fitted Ipks are valid condition_str = ['ideal', 'optimal', 'normal', 'other'] condition = 3 text = [ ] #azimuth, elevation, delta_azimuth, delta_azimuth_std, delta_elevation, delta_elevation_std, measured_delta_xel = data['delta_azimuth'] * np.cos( data['elevation']) # scale due to sky shape abs_sky_error = measured_delta_xel model_delta_az, model_delta_el = ant.pointing_model.offset( data['azimuth'], data['elevation']) residual_az = data['delta_azimuth'] - model_delta_az residual_el = data['delta_elevation'] - model_delta_el residual_xel = residual_az * np.cos(data['elevation']) delta_xel_std = data['delta_azimuth_std'] * np.cos(data['elevation']) abs_sky_delta_std = rad2deg( np.sqrt(delta_xel_std**2 + data['delta_azimuth_std']**2)) * 3600 # make arc seconds #for i,val in enumerate(data): # print ("Test Target: '%s' fit accuracy %.3f\" "%(data['target'][i],abs_sky_delta_std[i])) abs_sky_error = rad2deg(np.sqrt((residual_xel)**2 + (residual_el)**2)) * 3600 condition = get_condition(data) rms = np.std(abs_sky_error) robust = np.median( np.abs(abs_sky_error - abs_sky_error.mean())) * np.sqrt( 2. / np.log(4.)) text.append( "Dataset:%s Test Target: '%s' Reference RMS = %.3f\" {fit-accuracy=%.3f\"} (robust %.3f\") (N=%i Data Points) ['%s']" % (data['dataset'][0], data['target'][0], rms, np.mean(abs_sky_delta_std), robust, data.shape[0], condition_str[condition])) output_data = data[0].copy() # make a copy of the rec array for i, x in enumerate(data[0]): # make an average of data if x.dtype.kind == 'f': # average floats output_data[i] = data.field(i).mean() else: output_data[i] = data.field(i)[0] sun = Target('Sun,special') source = Target( '%s,azel, %f,%f' % (output_data['target'], np.degrees( output_data['azimuth']), np.degrees(output_data['elevation']))) sun_sep = np.degrees( source.separation(sun, timestamp=output_data['timestamp'], antenna=ant)) output_data = recfunctions.append_fields(output_data, 'sun_sep', np.array([sun_sep]), dtypes=np.float, usemask=False, asrecarray=True) output_data = recfunctions.append_fields(output_data, 'condition', np.array([condition]), dtypes=np.float, usemask=False, asrecarray=True) output_data = recfunctions.append_fields(output_data, 'rms', np.array([rms]), dtypes=np.float, usemask=False, asrecarray=True) output_data = recfunctions.append_fields(output_data, 'robust', np.array([robust]), dtypes=np.float, usemask=False, asrecarray=True) output_data = recfunctions.append_fields(output_data, 'N', np.array([data.shape[0]]), dtypes=np.float, usemask=False, asrecarray=True) #### Debugging #residual_az = data['delta_azimuth'] - model_delta_az #residual_el = data['delta_elevation'] - model_delta_el #residual_xel = residual_az * np.cos(data['elevation']) output_data = recfunctions.append_fields( output_data, 'residual_az', np.array([rad2deg(residual_az.std()) * 3600]), dtypes=np.float, usemask=False, asrecarray=True) output_data = recfunctions.append_fields( output_data, 'residual_el', np.array([rad2deg(residual_el.std()) * 3600]), dtypes=np.float, usemask=False, asrecarray=True) output_data = recfunctions.append_fields( output_data, 'residual_xel', np.array([rad2deg(residual_xel.std()) * 3600]), dtypes=np.float, usemask=False, asrecarray=True) #print "%10s %i %3.1f, %s"%(data['target'][0],data['timestamp'][-1] - data['timestamp'][0], rms, str(np.degrees(data['delta_elevation']-data['delta_elevation'].mean())*3600) ) output_data['wind_speed'] = data['wind_speed'].max() return text, output_data else: return None, None
def move_to_pos(self, az, el=None, for_track=False, azel_off=[0,0]): ''' moves the telescopes to a specific azel or radec or to a target position :param az: float/int, string if float/int: az if string: ra when in format ('HH:MM:SS') otherwise search for target :param el: float/int, string if float/int: el, if string: declination in degree ''' if self.error: print "Error has occured. Can not move at %s."%Timestamp().to_string() elif self.halt: print "Halt has occured. Can not move at %s."%Timestamp().to_string() # Important if you choose another telescope while another is moving antennas = self.active_antennas # Checks whether azel, radec or a target is given if isinstance(az, (int, float)) and isinstance(el, (int, float)): az = az % 360 target = construct_azel_target(deg2rad(az), deg2rad(el)) target.name = 'Moving to az: %d, el: % d at %s' % (az, el, Timestamp().to_string()) elif isinstance(az, str) and isinstance(el, str): target = construct_radec_target(az, el) target.name = 'Moving to ra: %s, dec: %s at %s' % (az, el, Timestamp().to_string()) elif isinstance(az, str) and not el: if ',' in az: target = Target(az) else: target = self.Catalogue[az] elif isinstance(az, Target): target = az else: raise AttributeError('Wrong format') try: azel = check_target(self, target) except LookupError: return 'Target with position az: %s, el: %s is out of telescope range' % (az, el) self.enableTelescopes() self.inSafetyPosition = False moveIncrementalAdapter(self, target, antennas=antennas, azElOff=azel_off) #inRange = [[False]*2]*len(antennas) inRange = [] for i in antennas: # list of Booleans which show if the antennas are at the right position inRange.append([False, False]) all_inRange = False azel = np.add(azel,azel_off) while ((self.CurrentSystemStatus.value() == 5 or (not all_inRange)) and not self.error and not self.halt): for n, ant in enumerate(antennas): for i, az_el in enumerate(azel[n]): # check if antenna is range if az_el-0.5 <= ant.azElPos[i].value() <= az_el+0.5: inRange[n][i] = True all_inRange = all(i == [True]*2 for i in inRange) time.sleep(1) if (not all_inRange and not self.CurrentMotionCommand.value() == self.moveIncrementalValue and not self.error and not self.halt): moveIncrementalAdapter(self, target, antennas=antennas, azElOff=azel_off) # Get position of all Telescopes pos = self.get_pos() if not for_track: if inRange: print 'Telescopes are in position at %s:'% Timestamp().to_string() for i, azel in enumerate(pos): print '%s \t Azimuth: %s \t Elevation: %s' % (self.antennaList[i].name, azel[1], azel[2]) self.disableTelescopes()