def stack_gathering(self, working_surface_location: SurfaceData) -> None: """ call the stack_gathering algorithm and store the results in the working surface location object """ self.stack_gathering_algorithm(working_surface_location) working_surface_location.stack_bursts = \ self.stack_gathering_algorithm.stack_bursts working_surface_location.beams_surf = \ self.stack_gathering_algorithm.beams_surf working_surface_location.beam_angles_surf = \ self.stack_gathering_algorithm.beam_angles_surf working_surface_location.t0_surf = \ self.stack_gathering_algorithm.t0_surf working_surface_location.doppler_angles_surf = \ self.stack_gathering_algorithm.doppler_angles_surf working_surface_location.look_angles_surf = \ self.stack_gathering_algorithm.look_angles_surf working_surface_location.pointing_angles_surf = \ self.stack_gathering_algorithm.pointing_angles_surf working_surface_location.look_index_surf = \ self.stack_gathering_algorithm.look_index_surf working_surface_location.look_counter_surf = \ self.stack_gathering_algorithm.look_counter_surf working_surface_location.closest_burst_index = \ self.stack_gathering_algorithm.closest_burst_index
def range_compression(self, working_surface_location: SurfaceData) -> None: """ call the range compression algorithm and store the results """ self.range_compression_algorithm(working_surface_location) working_surface_location.beams_range_compr = \ self.range_compression_algorithm.beam_range_compr working_surface_location.beams_range_compr_iq = \ self.range_compression_algorithm.beam_range_compr_iq
def sigma_zero_scaling(self, working_surface_location: SurfaceData) -> None: """ call the sigma0 scaling algorithm and store the results in the surface location object """ working_surface_location.sigma0_scaling_factor = self.sigma_zero_algorithm( working_surface_location, self.chd.wv_length_ku, self.chd.chirp_slope_ku ) working_surface_location.sigma0_scaling_factor_beam =\ self.sigma_zero_algorithm.sigma0_scaling_factor_beam
def stack_masking(self, working_surface_location: SurfaceData) -> None: """ call the stack masking algorithm and store the results """ self.stack_masking_algorithm(working_surface_location) # store results in working surface location working_surface_location.beams_masked = \ self.stack_masking_algorithm.beams_masked working_surface_location.stack_mask_vector = \ self.stack_masking_algorithm.stack_mask_vector working_surface_location.stack_mask =\ self.stack_masking_algorithm.stack_mask
def new_surface(self, loc_data: Dict[str, Any]) -> SurfaceData: """ create a new surface location object from the provided data, and add it to the list of surface locations """ surf = SurfaceData( self.cst, self.chd, self.surfaces_count, **loc_data ) self.surfaces_count += 1 self.add_surface(surf) surf.compute_surf_sat_vector() surf.compute_angular_azimuth_beam_resolution( self.chd.pri_sar ) return surf
def multilooking(self, working_surface_location: SurfaceData) -> None: """ call the multilooking algorithm and store the results in the surface location object """ self.multilooking_algorithm(working_surface_location) working_surface_location.stack_std = \ self.multilooking_algorithm.stack_std working_surface_location.stack_skewness = \ self.multilooking_algorithm.stack_skewness working_surface_location.stack_kurtosis = \ self.multilooking_algorithm.stack_kurtosis working_surface_location.waveform_multilooked = \ self.multilooking_algorithm.waveform_multilooked
def geometry_corrections(self, working_surface_location: SurfaceData) -> None: """ call the geometry correction algorithm and store the results """ self.geometry_corrections_algorithm(working_surface_location, self.chd.wv_length_ku) working_surface_location.slant_range_corrections = \ self.geometry_corrections_algorithm.slant_range_corrections working_surface_location.range_sat_surf = \ self.geometry_corrections_algorithm.range_sat_surf working_surface_location.doppler_corrections = \ self.geometry_corrections_algorithm.doppler_corrections working_surface_location.win_delay_corrections = \ self.geometry_corrections_algorithm.win_delay_corrections working_surface_location.beams_geo_corr = \ self.geometry_corrections_algorithm.beams_geo_corr
def focus_surface(self, surface_to_move: SurfaceData, previous_surface: SurfaceData) -> None: """ move the location of a surface to the closest approach to the target position given by the user in the configuration file :param surface_to_move: the surface that will be moved :param previous_surface: the surface previous to the surface being moved :return: """ self.focus_found = True # flag surface as being focused surface_to_move.target_focused = True # surface_to_move.surface_type = 4 # get position of target lla_target = np.asarray([ self.cnf.surface_focusing_lat, self.cnf.surface_focusing_lon, self.cnf.surface_focusing_alt ]) pos_target = lla2ecef(lla_target, self.cst) # get current position of focus surface & previous pos_focus = np.asmatrix(surface_to_move.ecef_surf) pos_prior = np.asmatrix(previous_surface.ecef_surf) # calculate along-track direction vector along_track = pos_focus - pos_prior along_track /= np.linalg.norm(along_track) # calculate vector from previous surface to target position rel_target = pos_target - pos_prior # project relative target vector onto along-track direction cos_a = (along_track * rel_target.T) / np.linalg.norm(rel_target) rel_focus = (np.linalg.norm(rel_target) * cos_a) * along_track # get position to move surface to x_move, y_move, z_move = np.ravel(rel_focus) focus_x = previous_surface.x_surf + x_move focus_y = previous_surface.y_surf + y_move focus_z = previous_surface.z_surf + z_move focus_ecef = np.asarray([focus_x, focus_y, focus_z]) # set new position of focus surface surface_to_move.x_surf = focus_x surface_to_move.y_surf = focus_y surface_to_move.z_surf = focus_z focus_lla = ecef2lla(focus_ecef, self.cst) focus_lat, focus_lon, focus_alt = np.ravel(focus_lla) surface_to_move.lat_surf = focus_lat surface_to_move.lon_surf = focus_lon surface_to_move.alt_surf = focus_alt surface_to_move.win_delay_surf =\ (surface_to_move.alt_sat - surface_to_move.alt_surf) * 2. / self.cst.c
def sigma_zero_scaling(self, working_surface_location: SurfaceData) -> None: """ call the sigma0 scaling algorithm and store the results in the surface location object """ working_surface_location.sigma0_scaling_factor = self.sigma_zero_algorithm( working_surface_location, self.chd.wv_length_ku, self.chd.chirp_slope_ku )
def _stack_masking_algorithm_tests(self, input_data, expected): """ runs the stack masking algorithm test with provided input data and expected values :param input_data: TestDataLoader object containing inputs :param expected: TestDataLoader object containing expected values :return: None """ self.initilise_algorithm(input_data) stack_size = input_data["data_stack_size"] zp_fact_range = self.cnf.zp_fact_range beams_range_compr_samples =\ np.tile(input_data["beams_range_compr"], 2) beams_range_compr = np.reshape( beams_range_compr_samples, (stack_size, self.chd.n_samples_sar * zp_fact_range)) working_loc = SurfaceData( cst=self.cst, chd=self.chd, data_stack_size=stack_size, surface_type=SurfaceType(input_data["surface_type"]), doppler_corrections=input_data["doppler_corrections"], slant_range_corrections=input_data["slant_range_corrections"], win_delay_corrections=input_data["win_delay_corrections"], beams_range_compr=beams_range_compr) # set stack masking cnf parameters self.stack_masking_algorithm(working_loc) stack_mask_vector_actual = self.stack_masking_algorithm.stack_mask_vector beams_masked_actual = self.stack_masking_algorithm.beams_masked stack_mask_vector_expected = expected["stack_mask_vector"] beams_masked_expected = np.reshape( expected["beams_masked"], (stack_size, self.chd.n_samples_sar * zp_fact_range)) for stack_index in range(stack_size): # compare beams masked values for sample_index in range(self.chd.n_samples_sar * zp_fact_range): self.assertEqual( beams_masked_actual[stack_index, sample_index], beams_masked_expected[stack_index, sample_index], msg="stack_index: {}/{}, sample_index: {}/{}".format( stack_index, stack_size, sample_index, self.chd.n_samples_sar)) # compare stack mask vector value self.assertEqual(stack_mask_vector_actual[stack_index], stack_mask_vector_expected[stack_index], msg="stack_index: {}/{}".format( stack_index, stack_size))
def test_range_compression_algorithm_01(self): """ range compression algorithm test 01 ----------------------------------- """ input_data = TestDataLoader(self.inputs_01, delim=' ') expected = TestDataLoader(self.expected_01, delim=' ') self.initialise_algotithm(input_data) input_beams_geo_corr = input_data["beams_geo_corr"] complex_beams_geo_corr = input_beams_geo_corr +\ 1j * np.flipud(input_beams_geo_corr) stack_size = input_data["data_stack_size"] beams_geo_corr = np.reshape( complex_beams_geo_corr, (stack_size, self.chd.n_samples_sar) ) working_loc = SurfaceData( self.cst, self.chd, data_stack_size=stack_size, beams_geo_corr=beams_geo_corr ) self.range_compression_algorithm(working_loc) beam_range_compr = self.range_compression_algorithm.beam_range_compr beam_range_compr_i = np.real( self.range_compression_algorithm.beam_range_compr_iq ) beam_range_compr_q = np.imag( self.range_compression_algorithm.beam_range_compr_iq ) expected_range_compr = np.reshape( expected["beams_range_compr"], beam_range_compr.shape, order='F' ) expected_range_compr_i = np.reshape( expected["beams_range_compr_i"], beam_range_compr.shape, order='F' ) expected_range_compr_q = np.reshape( expected["beams_range_compr_q"], beam_range_compr.shape, order='F' ) self.assertTrue( np.allclose(beam_range_compr, expected_range_compr) ) self.assertTrue( np.allclose(beam_range_compr_i, expected_range_compr_i) ) self.assertTrue( np.allclose(beam_range_compr_q, expected_range_compr_q) )
def new_surface(self, loc_data: Dict[str, Any]) -> SurfaceData: """ create a new surface location object from the provided data, and add it to the list of surface locations """ surf = SurfaceData(self.cst, self.chd, self.surfaces_count, **loc_data) self.surfaces_count += 1 self.add_surface(surf) surf.compute_surf_sat_vector() surf.compute_angular_azimuth_beam_resolution(self.chd.pri_sar) return surf
def test_surface_location_algorithm_02(self): """ surface location algorithm test 02 ---------------------------------- loads multiple input ISPs and one input surface location. expected result is for the surface location algorithm to determine that a new surface location should not yet be calculated """ # load input data inputs = TestDataLoader(self.input_02, delim=' ') self.initialise_algorithm(inputs) # generate input packet objects isps = [ L1AProcessingData(self.cst, self.chd, i, time_sar_ku=time, lat_sar_sat=inputs["lat_sar_sat"][i], lon_sar_sat=inputs["lon_sar_sat"][i], alt_sar_sat=inputs["alt_sar_sat"][i], win_delay_sar_ku=inputs["win_delay_sar_ku"][i]) \ for i, time in enumerate(inputs["time_sar_ku"]) ] for packet in isps: packet.compute_location_sar_surf() # create prior surface location object surf = SurfaceData(self.cst, self.chd, time_surf=inputs["time_surf"], x_surf=inputs["x_surf"], y_surf=inputs["y_surf"], z_surf=inputs["z_surf"], x_sat=inputs["x_sat"], y_sat=inputs["y_sat"], z_sat=inputs["z_sat"], x_vel_sat=inputs["x_vel_sat"], y_vel_sat=inputs["y_vel_sat"], z_vel_sat=inputs["z_vel_sat"] ) surf.compute_surf_sat_vector() surf.compute_angular_azimuth_beam_resolution( inputs["pri_sar_pre_dat"] ) # execute surface location algorithm new_surf = self.surface_location_algorithm([surf], isps) # confirm that no new surface is generated self.assertFalse(new_surf, msg="erroneously created new surface")
def test_sigma0_algorithm_01(self): input_data = TestDataLoader(self.inputs_01, delim=' ') expected = TestDataLoader(self.expected_01, delim=' ') self.initialise_algorithm(input_data) data_stack_size = input_data['data_stack_size'] isps = [] x_vel = input_data['x_vel_sat_sar'] y_vel = input_data['y_vel_sat_sar'] z_vel = input_data['z_vel_sat_sar'] pri_sar = input_data['pri_sar_pre_dat'] for stack_index in range(data_stack_size): packet = L1AProcessingData(self.cst, self.chd, x_vel_sat_sar=x_vel[stack_index], y_vel_sat_sar=y_vel[stack_index], z_vel_sat_sar=z_vel[stack_index], pri_sar_pre_dat=pri_sar[stack_index]) isps.append(packet) working_loc = SurfaceData(self.cst, self.chd, data_stack_size=data_stack_size, range_sat_surf=input_data['range_sat_surf'], stack_bursts=np.asarray(isps)) sig0_scale_factor = self.sigma0_algorithm(working_loc, input_data['wv_length_ku'], input_data['chirp_slope_ku']) self.assertAlmostEqual( expected['sigma0_scaling_factor'], sig0_scale_factor, )
def test_surface_location_algorithm_03(self): """ surface location algorithm test 03 ---------------------------------- loads multiple input ISPs and one input surface location. expected result is for the surface location algorithm to generate a new surface location. The attributes of this new surface location are then validated against the expected values. """ # load the expected data expected_data = TestDataLoader(self.expected_03, delim=' ') # load the input data inputs = TestDataLoader(self.input_03, delim=' ') self.initialise_algorithm(inputs) # create all input packet objects isps = [ L1AProcessingData(self.cst, self.chd, i, time_sar_ku=time, lat_sar_sat=inputs["lat_sar_sat"][i], lon_sar_sat=inputs["lon_sar_sat"][i], alt_sar_sat=inputs["alt_sar_sat"][i], win_delay_sar_ku=inputs["win_delay_sar_ku"][i], x_sar_sat=0, y_sar_sat=0, z_sar_sat=0, alt_rate_sat_sar=0, roll_sar=0, pitch_sar=0, yaw_sar=0, x_vel_sat_sar=0, y_vel_sat_sar=0, z_vel_sat_sar=0, days=inputs["time_sar_ku"] // self.cst.sec_in_day, seconds=inputs["time_sar_ku"] % self.cst.sec_in_day) \ for i, time in enumerate(inputs["time_sar_ku"]) ] # calculate surface position for each packet for packet in isps: packet.compute_location_sar_surf() # create the prior surface location object surf = SurfaceData(self.cst, self.chd, time_surf=inputs["time_surf"], x_surf=inputs["x_surf"], y_surf=inputs["y_surf"], z_surf=inputs["z_surf"], x_sat=inputs["x_sat"], y_sat=inputs["y_sat"], z_sat=inputs["z_sat"], x_vel_sat=inputs["x_vel_sat"], y_vel_sat=inputs["y_vel_sat"], z_vel_sat=inputs["z_vel_sat"] ) # compute properties of the surface location surf.compute_surf_sat_vector() surf.compute_angular_azimuth_beam_resolution( inputs["pri_sar_pre_dat"] ) # execute the surface location algorithm new_surf = self.surface_location_algorithm([surf], isps) # confirm new surface has been created self.assertTrue(new_surf, msg="failed to create new surface") # retreive properties of the surface location surf = self.surface_location_algorithm.get_surface() # validate properties self.assertAlmostEqual(surf["time_surf"], expected_data["time_surf"]) self.assertAlmostEqual(surf["x_surf"], expected_data["x_surf"], delta=1e-5) self.assertAlmostEqual(surf["y_surf"], expected_data["y_surf"], delta=1e-5) self.assertAlmostEqual(surf["z_surf"], expected_data["z_surf"], delta=1e-5) self.assertAlmostEqual(surf["lat_surf"], expected_data["lat_surf"], delta=1e-12) self.assertAlmostEqual(surf["lon_surf"], expected_data["lon_surf"], delta=1e-12) self.assertAlmostEqual(surf["alt_surf"], expected_data["alt_surf"], delta=1e-4)
def test_beam_angles_algorithm_02(self): """ beam angles algorithm test #02 ----------------------------- """ # load the expected data object expected = TestDataLoader(self.expected_02, delim=' ') # load the input data input_data = TestDataLoader(self.input_02, delim=' ') self.initialise_algorithm(input_data) # create surface location objects surfs = [] for i, surf_num in enumerate(input_data["surface_counter"]): surf = SurfaceData( self.cst, self.chd, surf_num, time_surf=input_data["time_surf"][i], x_surf=input_data["x_surf"][i], y_surf=input_data["y_surf"][i], z_surf=input_data["z_surf"][i], ) surfs.append(surf) # create packet object packet = L1AProcessingData( self.cst, self.chd, time_sar_ku=input_data["time_sar_ku"], x_sar_sat=input_data["x_sar_sat"], y_sar_sat=input_data["y_sar_sat"], z_sar_sat=input_data["z_sar_sat"], x_vel_sat_sar=input_data["x_vel_sat_sar"], y_vel_sat_sar=input_data["y_vel_sat_sar"], z_vel_sat_sar=input_data["z_vel_sat_sar"], pri_sar_pre_dat=input_data["pri_sar_pre_dat"]) work_loc = input_data["working_surface_location_counter"] # execute beam angles algorithm self.beam_angles_algorithm(surfs, packet, work_loc) # confirm correct number of surfaces seen self.assertEqual(len(self.beam_angles_algorithm.surfaces_seen), expected["surfaces_seen"]) # confirm the working surface location is not seen self.assertFalse(self.beam_angles_algorithm.work_location_seen, msg="working location erroneously seen") # check beam angles are correct self.assertTrue(np.allclose(self.beam_angles_algorithm.beam_angles, expected["beam_ang"]), msg="Beam Angles values are not correct") # check surface indicies are correct self.assertTrue(np.array_equal( self.beam_angles_algorithm.surfaces_seen, expected["surf_loc_index"]), msg="Surface Indicies are not correct")
def multilooking(self, working_surface_location: SurfaceData) -> None: """ call the multilooking algorithm and store the results in the surface location object """ self.multilooking_algorithm(working_surface_location) working_surface_location.stack_max = \ self.multilooking_algorithm.stack_max working_surface_location.stack_std = \ self.multilooking_algorithm.stack_std working_surface_location.stack_skewness = \ self.multilooking_algorithm.stack_skewness working_surface_location.stack_kurtosis = \ self.multilooking_algorithm.stack_kurtosis working_surface_location.n_beams_start_stop = \ self.multilooking_algorithm.n_beams_start_stop working_surface_location.start_look_angle = \ self.multilooking_algorithm.start_look_angle working_surface_location.stop_look_angle = \ self.multilooking_algorithm.stop_look_angle working_surface_location.start_doppler_angle = \ self.multilooking_algorithm.start_doppler_angle working_surface_location.stop_doppler_angle = \ self.multilooking_algorithm.stop_doppler_angle working_surface_location.start_pointing_angle = \ self.multilooking_algorithm.start_pointing_angle working_surface_location.stop_pointing_angle = \ self.multilooking_algorithm.stop_pointing_angle working_surface_location.start_beam_angle = \ self.multilooking_algorithm.start_beam_angle working_surface_location.stop_beam_angle = \ self.multilooking_algorithm.stop_beam_angle working_surface_location.start_burst_index = \ self.multilooking_algorithm.start_burst_index working_surface_location.stop_burst_index = \ self.multilooking_algorithm.stop_burst_index working_surface_location.stack_mask_vector_start_stop = \ self.multilooking_algorithm.stack_mask_vector_start_stop working_surface_location.beam_angles_start_stop = \ self.multilooking_algorithm.beam_angles_start_stop working_surface_location.look_angles_start_stop = \ self.multilooking_algorithm.look_angles_start_stop working_surface_location.waveform_multilooked = \ self.multilooking_algorithm.waveform_multilooked
def _geometry_corrections_algorithm_tests(self, input_data, expected): self.initialize_algorithm(input_data) # create stack of ISPs isps = [] stack_size = input_data["data_stack_size"] for stack_index in range(stack_size): packet = L1AProcessingData( self.cst, self.chd, x_vel_sat_sar=input_data["x_vel_sat_sar"][stack_index], y_vel_sat_sar=input_data["y_vel_sat_sar"][stack_index], z_vel_sat_sar=input_data["z_vel_sat_sar"][stack_index], x_sar_sat=input_data["x_sar_sat"][stack_index], y_sar_sat=input_data["y_sar_sat"][stack_index], z_sar_sat=input_data["z_sar_sat"][stack_index], win_delay_sar_ku=input_data["win_delay_sar_ku"][stack_index] ) isps.append(packet) # create working surface location beams_surf = np.reshape( input_data["beams_surf"], (stack_size, self.chd.n_samples_sar) ) working_loc = SurfaceData( self.cst, self.chd, stack_bursts=isps, data_stack_size=stack_size, win_delay_surf=input_data["win_delay_surf"], x_surf=input_data["x_surf"], y_surf=input_data["y_surf"], z_surf=input_data["z_surf"], beam_angles_surf=input_data["beam_angles_surf"], t0_surf=input_data["T0_surf"], beams_surf=beams_surf ) # TODO: add window delay alignment method selection self.geometry_corrections_algorithm(working_loc, input_data["wv_length_ku"]) self.assertTrue( np.allclose( self.geometry_corrections_algorithm.doppler_corrections, expected["doppler_corrections"] ), msg="Doppler corrections do not match" ) self.assertTrue( np.allclose( self.geometry_corrections_algorithm.range_sat_surf, expected["range_sat_surf"] ), msg="Range Sat Surf does not match" ) self.assertTrue( np.allclose( self.geometry_corrections_algorithm.slant_range_corrections, expected["slant_range_corrections"], atol=1e-8 ), msg="Slant Range Corrections do not match" ) self.assertTrue( np.allclose( self.geometry_corrections_algorithm.win_delay_corrections, expected["win_delay_corrections"] ), msg="window delay corrections do not match" ) flat_corr = np.ravel(self.geometry_corrections_algorithm.beams_geo_corr) components = zip(np.real(flat_corr), np.imag(flat_corr)) for index, (i, q) in enumerate(components): expected_i = expected["beams_geo_corr_i"][index] expected_q = expected["beams_geo_corr_q"][index] if expected_i == 0: self.assertEqual( expected_i, i ) else: rel_err = abs((expected_i - i) / expected_i) self.assertLess(rel_err, 2e-4) if expected_q == 0: self.assertEqual( expected_q, q ) else: rel_err = abs((expected_q - q) / expected_q) self.assertLess(rel_err, 2e-4)
def multilooking_tests(self, input_data, expected): """ :param input_data: :param expected: :return: """ self.initialise_algorithm(input_data) zp_fact_range = input_data['zp_fact_range_cnf'] data_stack_size = input_data['data_stack_size'] input_stack_mask_vector = input_data['stack_mask_vector'] n_samples_max = self.chd.n_samples_sar * zp_fact_range stack_mask = np.reshape(input_data['stack_mask'], (data_stack_size, n_samples_max)) beams_masked = np.reshape(input_data['beams_masked'], (data_stack_size, n_samples_max)) stack_bursts = [FakeBurst(i) for i in range(data_stack_size)] working_location = SurfaceData( self.cst, self.chd, data_stack_size=data_stack_size, beam_angles_surf=input_data['beam_angles_surf'], look_angles_surf=input_data['look_angles_surf'], pointing_angles_surf=input_data['pointing_angles_surf'], doppler_angles_surf=input_data['doppler_angles_surf'], stack_mask_vector=input_stack_mask_vector, stack_mask=stack_mask, beams_masked=beams_masked, stack_bursts=stack_bursts) self.multilooking_algorithm.zp_fact_range =\ input_data['zp_fact_range_cnf'] self.multilooking_algorithm.flag_avoid_zeros_in_multilooking =\ input_data['flag_avoid_zeros_in_multilooking'] self.multilooking_algorithm.n_looks_stack =\ input_data['n_looks_stack_cnf'] self.multilooking_algorithm(working_location) expected_waveform = expected['wfm_ml_sar'] for sample_index in range(n_samples_max): self.assertEqual( expected_waveform[sample_index], self.multilooking_algorithm.waveform_multilooked[sample_index]) self.zero_float_assertion(expected['stack_skewness'], self.multilooking_algorithm.stack_skewness) self.zero_float_assertion(expected['stack_kurtosis'], self.multilooking_algorithm.stack_kurtosis) self.zero_float_assertion(expected['stack_std'], self.multilooking_algorithm.stack_std) self.zero_float_assertion( expected['stack_look_angle_centre'], self.multilooking_algorithm.look_angle_centre) self.zero_float_assertion( expected['stack_pointing_angle_centre'], self.multilooking_algorithm.pointing_angle_centre) # look angle self.assertEqual(expected['start_look_angle'], self.multilooking_algorithm.start_look_angle) self.assertEqual(expected['stop_look_angle'], self.multilooking_algorithm.stop_look_angle) # doppler angle self.assertEqual(expected['start_doppler_angle'], self.multilooking_algorithm.start_doppler_angle) self.assertEqual(expected['stop_doppler_angle'], self.multilooking_algorithm.stop_doppler_angle) # pointing angle self.assertEqual(expected['start_pointing_angle'], self.multilooking_algorithm.start_pointing_angle) self.assertEqual(expected['stop_pointing_angle'], self.multilooking_algorithm.stop_pointing_angle) # beams contributing self.assertEqual(expected['n_beams_multilooking'], self.multilooking_algorithm.n_beams_multilooking) self.assertEqual(expected['n_beams_start_stop'], self.multilooking_algorithm.n_beams_start_stop) for expect, actual in zip( expected['stack_mask_vector_start_stop'], self.multilooking_algorithm.stack_mask_vector_start_stop): self.assertEqual(expect, actual) self.assertEqual( len(expected['stack_mask_vector_start_stop']), len(self.multilooking_algorithm.stack_mask_vector_start_stop))
def test_surface_location_algorithm_04(self): """ surface location algorithm test 04 ---------------------------------- loads multiple input ISPs and two input surface locations. expected result is for the surface location algorithm to generate a new surface location and focus the position of the previous one towards the target position. The attributes of these surface locations are then validated against the expected values. """ # load the expected data expected_data = TestDataLoader(self.expected_04, delim=' ') # load the input data inputs = TestDataLoader(self.inputs_04, delim=' ') self.initialise_algorithm(inputs) # create all input packet objects isps = [ L1AProcessingData(self.cst, self.chd, i, time_sar_ku=time, lat_sar_sat=inputs["lat_sar_sat"][i], lon_sar_sat=inputs["lon_sar_sat"][i], alt_sar_sat=inputs["alt_sar_sat"][i], win_delay_sar_ku=inputs["win_delay_sar_ku"][i], x_sar_sat=inputs["x_sar_sat"][i], y_sar_sat=inputs["y_sar_sat"][i], z_sar_sat=inputs["z_sar_sat"][i], alt_rate_sat_sar=0, roll_sar=inputs["roll_sar"][i], pitch_sar=inputs["pitch_sar"][i], yaw_sar=inputs["yaw_sar"][i], x_vel_sat_sar=inputs["x_vel_sat_sar"][i], y_vel_sat_sar=inputs["y_vel_sat_sar"][i], z_vel_sat_sar=inputs["z_vel_sat_sar"][i], days=inputs["time_sar_ku"] // self.cst.sec_in_day, seconds=inputs["time_sar_ku"] % self.cst.sec_in_day) \ for i, time in enumerate(inputs["time_sar_ku"]) ] # calculate surface position for each packet for packet in isps: packet.compute_location_sar_surf() surfs = [] # create the prior surface location object for i, time in enumerate(inputs["time_surf"]): surf = SurfaceData( self.cst, self.chd, time_surf=time, x_surf=inputs["x_surf"][i], y_surf=inputs["y_surf"][i], z_surf=inputs["z_surf"][i], lat_surf=inputs["lat_surf"][i], lon_surf=inputs["lon_surf"][i], alt_surf=inputs["alt_surf"][i], x_sat=inputs["x_sat"][i], y_sat=inputs["y_sat"][i], z_sat=inputs["z_sat"][i], lat_sat=inputs["lat_sat"][i], lon_sat=inputs["lon_sat"][i], alt_sat=inputs["alt_sat"][i], x_vel_sat=inputs["x_vel_sat"][i], y_vel_sat=inputs["y_vel_sat"][i], z_vel_sat=inputs["z_vel_sat"][i], focus_target_distance=inputs["focus_target_distance"][i], win_delay_surf=inputs["win_delay_surf"][i] ) surf.compute_surf_sat_vector() surf.compute_angular_azimuth_beam_resolution( inputs["pri_sar_pre_dat"][i] ) surfs.append(surf) # execute the surface location algorithm new_surf = self.surface_location_algorithm(surfs, isps) # confirm new surface has been created self.assertTrue(new_surf, msg="failed to create new surface") # retreive properties of the surface location surf = surfs[1] # validate properties self.assertAlmostEqual(surf.time_surf, expected_data["time_surf"]) self.assertAlmostEqual(surf.x_surf, expected_data["x_surf"], delta=1e-5) self.assertAlmostEqual(surf.y_surf, expected_data["y_surf"], delta=1e-5) self.assertAlmostEqual(surf.z_surf, expected_data["z_surf"], delta=1e-5) self.assertAlmostEqual(surf.lat_surf, expected_data["lat_surf"], delta=1e-12) self.assertAlmostEqual(surf.lon_surf, expected_data["lon_surf"], delta=1e-12) self.assertAlmostEqual(surf.alt_surf, expected_data["alt_surf"], delta=1e-4) self.assertAlmostEqual(surf.win_delay_surf, expected_data["win_delay_surf"])
def test_stacking_algorithm_01(self): """ stack_gathering algorithm test 01 -------------------------- """ input_data = TestDataLoader(self.inputs_01, delim=' ') expected = TestDataLoader(self.expected_01, delim=' ') self.initialise_algorithm(input_data) all_stack_size = input_data["all_stack_size"] input_beam_angles_list = np.reshape( input_data["beam_angles_list"], (all_stack_size, self.chd.n_ku_pulses_burst) ) isps = [] for stack_index in range(all_stack_size): beams_focused = np.zeros( (self.chd.n_ku_pulses_burst, self.chd.n_samples_sar) ) pid = PacketPid.echo_sar if input_data['isp_pid'][stack_index] == 7 else PacketPid.echo_rmc packet = L1AProcessingData( self.cst, self.chd, stack_index, t0_sar=input_data["T0_sar"][stack_index], doppler_angle_sar_sat=input_data["doppler_angle_sar_sat"][stack_index], pitch_sar=input_data["pitch_sar"][stack_index], beam_angles_list=input_beam_angles_list[stack_index, :], beams_focused=beams_focused, isp_pid=pid ) isps.append(packet) working_loc = SurfaceData( self.cst, self.chd, stack_all_bursts=isps ) for beam_index in input_data["stack_all_beam_indexes"]: working_loc.add_stack_beam_index(beam_index, 0, 0) self.stacking_algorithm(working_loc) self.assertEqual( self.stacking_algorithm.data_stack_size, expected["data_stack_size"] ) self.assertEqual( self.stacking_algorithm.surface_type, SurfaceType(expected["surface_type"]) ) # beam_angles_surf self.assertTrue( np.allclose( self.stacking_algorithm.beam_angles_surf, expected["beam_angles_surf"] ), msg="beam_angles_surf do not match expected values" ) # t0_surf self.assertTrue( np.allclose( self.stacking_algorithm.t0_surf, expected["t0_surf"] ), msg="t0_surf do not match expected values" ) # doppler_angles_surf self.assertTrue( np.allclose( self.stacking_algorithm.doppler_angles_surf, expected["doppler_angles_surf"] ), msg="doppler_angles_surf do not match expected values" ) # look_angles_surf self.assertTrue( np.allclose( self.stacking_algorithm.look_angles_surf, expected["look_angles_surf"] ), msg="look_angles_surf do not match expected values" ) # pointing_angles_surf self.assertTrue( np.allclose( self.stacking_algorithm.pointing_angles_surf, expected["pointing_angles_surf"] ), msg="pointing_angles_surf do not match expected values" )
def test_stacking_algorithm_01(self): """ stack_gathering algorithm test 01 -------------------------- """ input_data = TestDataLoader(self.inputs_01, delim=' ') expected = TestDataLoader(self.expected_01, delim=' ') self.initialise_algorithm(input_data) all_stack_size = input_data["all_stack_size"] input_beam_angles_list = np.reshape( input_data["beam_angles_list"], (all_stack_size, self.chd.n_ku_pulses_burst)) isps = [] for stack_index in range(all_stack_size): beams_focused = np.zeros( (self.chd.n_ku_pulses_burst, self.chd.n_samples_sar)) pid = PacketPid.echo_sar if input_data['isp_pid'][ stack_index] == 7 else PacketPid.echo_rmc packet = L1AProcessingData( self.cst, self.chd, stack_index, t0_sar=input_data["T0_sar"][stack_index], doppler_angle_sar_sat=input_data["doppler_angle_sar_sat"] [stack_index], pitch_sar=input_data["pitch_sar"][stack_index], beam_angles_list=input_beam_angles_list[stack_index, :], beams_focused=beams_focused, isp_pid=pid) isps.append(packet) working_loc = SurfaceData(self.cst, self.chd, stack_all_bursts=isps) for beam_index in input_data["stack_all_beam_indexes"]: working_loc.add_stack_beam_index(beam_index, 0, 0) self.stacking_algorithm(working_loc) self.assertEqual(self.stacking_algorithm.data_stack_size, expected["data_stack_size"]) self.assertEqual(self.stacking_algorithm.surface_type, SurfaceType(expected["surface_type"])) # beam_angles_surf self.assertTrue(np.allclose(self.stacking_algorithm.beam_angles_surf, expected["beam_angles_surf"]), msg="beam_angles_surf do not match expected values") # t0_surf self.assertTrue(np.allclose(self.stacking_algorithm.t0_surf, expected["t0_surf"]), msg="t0_surf do not match expected values") # doppler_angles_surf self.assertTrue(np.allclose( self.stacking_algorithm.doppler_angles_surf, expected["doppler_angles_surf"]), msg="doppler_angles_surf do not match expected values") # look_angles_surf self.assertTrue(np.allclose(self.stacking_algorithm.look_angles_surf, expected["look_angles_surf"]), msg="look_angles_surf do not match expected values") # pointing_angles_surf self.assertTrue( np.allclose(self.stacking_algorithm.pointing_angles_surf, expected["pointing_angles_surf"]), msg="pointing_angles_surf do not match expected values")