示例#1
0
 def test_outlier_reject_nowriteoutbug(self):
     # test that outliers are rejected when it is a source without the data corruption.
     data = HipparcosRereductionJavaTool()
     data.parse(star_id='27100', intermediate_data_directory=self.test_data_directory)
     assert len(data) == 147 - 2  # num entries - num outliers
     # outliers are marked with negative AL errors. Assert outliers are gone.
     assert np.all(data.along_scan_errs > 0)
示例#2
0
 def test_parse_residuals_with_no_changes(self):
     data_recalb = Hipparcos2Recalibrated(cosmic_dispersion=0, residual_offset=0)
     data_recalb.parse('27321', 'htof/test/data_for_tests/Hip21/')
     data = HipparcosRereductionJavaTool()
     data.parse('27321', 'htof/test/data_for_tests/Hip21/', error_inflate=False)
     # test that the new residuals are equal to the old residuals, within 0.01 (round off).
     assert np.allclose(data_recalb.residuals, data.residuals, atol=0.01)
     assert np.allclose(data_recalb.along_scan_errs, data.along_scan_errs, atol=0.01)
示例#3
0
 def test_write_and_read_recalibrated_data(self):
     data = Hipparcos2Recalibrated()
     data.parse('27321', 'htof/test/data_for_tests/Hip21/')
     with tempfile.TemporaryDirectory() as tmp_dir:
         outpath = os.path.join(tmp_dir, '27321_recalibrated.d')
         data.write_as_javatool_format(outpath)
         reloaded_data = HipparcosRereductionJavaTool()
         reloaded_data.parse('27321', tmp_dir, error_inflate=False, attempt_adhoc_rejection=False, reject_known=False)
         assert np.allclose(reloaded_data.residuals, data.residuals, atol=0.01)
         assert np.allclose(reloaded_data._epoch, data._epoch)
         assert np.allclose(reloaded_data.along_scan_errs, data.along_scan_errs, atol=0.01)
         assert np.allclose(reloaded_data._iorb, data._iorb)
示例#4
0
文件: utils.py 项目: gmbrandt/HTOF
def refit_hip21_object(iad_dir, hip_id, use_parallax=False):
    data = HipparcosRereductionJavaTool()
    header, _ = data.parse(star_id=hip_id, intermediate_data_directory=iad_dir)
    plx, cntr_RA, cntr_Dec = header['third']['Plx'], Angle(header['third']['RAdeg'], unit='degree'), Angle(header['third']['DEdeg'], unit='degree')
    pmRA, pmDec, soltype = header['third']['pm_RA'], header['third']['pm_DE'], str(int(header['first']['isol_n']))
    soltype = soltype.strip()
    fit_degree = {'5': 1, '7': 2, '9': 3}.get(soltype[-1], None)  # only refit 5, 7, 9 parameter solutions.
    if fit_degree is not None:
        diffs, errors, chisq, chi2_partials = refit_hip_fromdata(data, fit_degree, cntr_RA=cntr_RA, cntr_Dec=cntr_Dec,
                                                                 use_parallax=use_parallax)
        return tuple((diffs, errors, chisq, chi2_partials, soltype))
    else:
        return [None] * 9, [None] * 9, None, [None] * 5, soltype
示例#5
0
 def test_parse(self):
     data = HipparcosRereductionJavaTool()
     data.parse(star_id='27321', intermediate_data_directory=self.test_data_directory)
     u = 0.875291  # D. Michalik et al. 2014 Q factor for Hip 27321, calculated by hand
     assert len(data) == 111
     assert data.meta['star_id'] == '27321'
     assert data.meta['catalog_f2'] == -1.63
     assert data.meta['catalog_soltype'] == 5
     assert np.isclose(data.meta['calculated_f2'], -1.64, atol=0.01)
     assert np.isclose(data._epoch[0], 1990.0055)
     assert np.isclose(np.sin(data.scan_angle[0]), -0.9050, rtol=.01)
     assert np.isclose(data.along_scan_errs.values[0], 0.80 * u, atol=0.01)
     assert np.isclose(data._epoch[84], 1991.9523)
     assert np.isclose(np.sin(data.scan_angle[84]), -0.8083, rtol=.01)
示例#6
0
 def test_parse_and_instantiate_hip2javatool(self):
     star_id, iad_dir = '27321', 'htof/test/data_for_tests/Hip21/IntermediateData/'
     parser = self.factory.parse_and_instantiate(star_id, iad_dir)
     assert type(parser) is HipparcosRereductionJavaTool
     comparison_parser = HipparcosRereductionJavaTool.parse_and_instantiate(star_id, iad_dir)
     assert np.allclose(comparison_parser.scan_angle, parser.scan_angle)
     assert np.allclose(comparison_parser.residuals, parser.residuals)
示例#7
0
 def test_reject_obs(self, hip_id, rej_obs):
     # hip 651 is a great test source because it has 3 rejected observations (negative AL errors)
     # and it has an uncatalogued rejection that we need to fix (i.e., an extra, 4th, rejection).
     test_data_directory = os.path.join(os.getcwd(), 'htof/test/data_for_tests/Hip21')
     data = HipparcosRereductionJavaTool()
     # get info on the IAD without doing any rejection:
     data.parse(star_id=hip_id, intermediate_data_directory=test_data_directory,
                attempt_adhoc_rejection=False, reject_known=False)
     nobs_initial = len(data)
     num_known_rejects = np.count_nonzero(data.along_scan_errs.values < 0)
     # now run the rejection routine and see how it compares to what we expect:
     data.parse(star_id=hip_id, intermediate_data_directory=test_data_directory)
     nobs_after_rejection = len(data)
     num_rejects_writeout_bug = len(rej_obs.get('orbit/scan_angle/time', []))
     assert nobs_after_rejection == nobs_initial - num_rejects_writeout_bug - num_known_rejects
     # Note that for hip39, any of the orbits within 1426 are ok to reject. I.e. 70, 71, 72, 73, 74, 75.
     sum_chi2_partials = calculate_chisq_partials(data)
     assert sum_chi2_partials < 0.12  # assert that the IAD reflect a solution that is a stationary point
     if len(rej_obs) > 0:
         assert np.allclose(np.sort(data.additional_rejected_epochs['orbit/scan_angle/time']),
                            np.sort(rej_obs['orbit/scan_angle/time']))
         assert np.allclose(np.sort(data.additional_rejected_epochs['residual/along_scan_error']),
                            np.sort(rej_obs['residual/along_scan_error']))
示例#8
0
        help=
        'If true, this will run the refit test on only 500 sources. Useful to check for '
        'filepath problems before running the full test on all ~100000 sources.'
    )

    args = parser.parse_args()
    import time

    hip_ids = np.genfromtxt(args.inlist).flatten().astype(int)
    if args.debug:
        # test a source with 2 corrupted observations and 5.
        hip_ids = [114114, 18517]

    # do the fit.
    for hip_id in hip_ids:
        data = HipparcosRereductionJavaTool()
        # get info on the IAD without doing any rejection:
        header, raw_iad = data.parse(
            star_id=hip_id,
            intermediate_data_directory=args.iad_directory,
            attempt_adhoc_rejection=False,
            reject_known=False)
        n_transits, n_expected_transits = header['first']['NRES'], header[
            'second']['NOB']
        n_additional_reject = int(n_transits) - int(n_expected_transits)
        orbit_number = raw_iad[0].values
        correct_id = header['first']['HIP']
        additional_rejected_epochs = find_epochs_to_reject_java_largest(
            data, n_additional_reject, orbit_number)
        f = open(f"{str(int(correct_id))}.txt", "w")
        f.write(str(additional_rejected_epochs))
示例#9
0
 def test_reject_obs_from_precomputed_list(self, hip_id):
     test_data_directory = os.path.join(os.getcwd(), 'htof/test/data_for_tests/Hip21')
     data = HipparcosRereductionJavaTool()
     data.parse(star_id=hip_id, intermediate_data_directory=test_data_directory)
     sum_chi2_partials = calculate_chisq_partials(data)
     assert sum_chi2_partials < 0.12  # assert that the IAD reflect a solution that is a stationary point
示例#10
0
 def test_outlier_reject_warning(self):
     # test that outlier reject throws a warning
     data = HipparcosRereductionJavaTool()
     data.parse(star_id='4427', intermediate_data_directory=self.test_data_directory)
     assert True
示例#11
0
from astropy.io import ascii as ascii_astropy
from astropy.table import Table
from htof.parse import HipparcosRereductionJavaTool

# Directory containing the residual records corresponding to the Java tool data
# Redirect this to the directory containing the IAD of all (6617) sources
test_data_directory = os.path.join(os.getcwd(),
                                   'htof/test/data_for_tests/Hip21')

# Read list of the 6617 discrepant sources discussed in Brandt et al. 2021, Section 4
discrepant = ascii_astropy.read("htof/data/hip21_java_nobs_discrepant.txt",
                                names=["HIP", "diffNobs"])
numDis = len(discrepant[discrepant['diffNobs'] > 0])

# Instantiate a data parser
data = HipparcosRereductionJavaTool()

# For each source, Let's store:
# HIP
# catalog_f2
# htof_f2 without ad-hoc correction
# htof_f2 with ad-hoc correction
# The difference of the htof_f2 with ad-hoc correction and the catalog f2
results = np.zeros((numDis, 5))

hip_ids_to_parse = discrepant[discrepant['diffNobs'] > 0]["HIP"]
# hip_ids_to_parse = ['27321', '37515'] debug
for idx, hip_id in enumerate(hip_ids_to_parse):
    results[idx][0] = hip_id
    # parse data without ad-hoc correction, compute and store f2
    data.parse(star_id=hip_id,