def test_return_from_file(self):
        tst = mp_ephem.EphemerisReader()
        tst = tst.read(self.filename)
        img = storage.FitsImage.from_frame(tst[0].comment.frame)

        self.assertIsInstance(img, storage.FitsImage)
        self.assertEquals(img.observation.dataset_name, "2086898")
        self.assertEquals(img.ccd, 24)
        self.assertEquals(img.uri, "vos:cfis/solar_system/dbimages/2086898/2086898p.fits.fz")
Beispiel #2
0
def link(target, candidates):
    """

    :param target: file containing MPC lines of thing to try and link to.
    :param candidates: list of files containing MPC lines of sources that might be matches.
    :return: count of how many candidates provided a link.
    """

    # Load the observations of the target into a list.
    baseObservations = []
    nlinks = 0
    for obs in mp_ephem.EphemerisReader().read(target):
        if obs.null_observation:
            continue
        baseObservations.append(obs)

    logging.debug(f"Retrieved {len(baseObservations)} for {target}")
    if not len(baseObservations) > 0:
        return -1

    for filename in candidates:
        if filename == target:
            logging.info("No self-linking")
            continue
        logging.debug(f"Attempting to link {filename}")
        new_obs = []
        linkable_candidate = True
        for ob in mp_ephem.EphemerisReader().read(filename):
            if ob.null_observation:
                continue
            new_obs.append(ob)
        if not len(new_obs) > 0:
            continue
        cand_prov_name = new_obs[0].provisional_name

        trial_obs = baseObservations + new_obs
        try:
            orbit = BKOrbit(trial_obs)
            orbit.predict("2021-01-01T00:00:00")
            orbit.compute_residuals()
        except Exception as ex:
            logging.error(f"Linking attempting link with {filename} -> {ex}")
            for obs in trial_obs:
                logging.error(f"{obs}")
            continue

        # Check all the positions for goodness of fit
        for obs in orbit.observations:
            if obs.ra_residual**2 + obs.dec_residual**2 > 0.2**2:
                linkable_candidate = False

        if not linkable_candidate:
            logging.debug(
                f"Link with {filename} results in large residuals, rejecting")
            continue

        if orbit.a < 0 * units.au or orbit.inc > 90 * units.degree:
            logging.debug(
                f"Link with {filename} results in non-physical orbit, rejecting"
            )
            continue

        if linkable_candidate:
            print(f"{target} + {filename} ({cand_prov_name})")
            print(orbit.summarize())
            nlinks += 1
            with open(f"{os.path.splitext(target)[0]}_{cand_prov_name}.mpc",
                      'w') as fobj:
                for obs in orbit.observations:
                    fobj.write(obs.to_string() + '\n')

    return nlinks
Beispiel #3
0
 def setUp(self):
     mpc_filename = 'data/o3o08.mpc'
     self.abg_filename = 'data/o3o08.abg'
     self.observations = mp_ephem.EphemerisReader().read(mpc_filename)
     self.orbit = mp_ephem.BKOrbit(self.observations)
Beispiel #4
0
def run(mpc_file,
        cor_file,
        skip_discovery=True,
        skip_mags=False,
        skip_centroids=False,
        compare_orbits=False):
    """

    :param mpc_file: A file containing the astrometric lines to be updated.
    :param cor_file: The base name for the updated astrometry and diagnostic files.
    :param skip_mags: Should we skip recomputing the magnitude of sources?
    :return: :raise ValueError: If actions on the mpc_obs indicate this is not a valid OSSOS observations
    """
    observations = mp_ephem.EphemerisReader().read(mpc_file)
    logging.debug("Read in Observations: {}".format(observations))
    original_obs = []
    modified_obs = []
    logging.info("ASTROMETRY FILE: {} --> {}.tlf".format(mpc_file, cor_file))
    for mpc_in in observations:
        try:
            if not isinstance(mpc_in.comment, mp_ephem.ephem.OSSOSComment):
                logging.info(type(mpc_in.comment))
                logging.info("Skipping: {}".format(mpc_in.to_string()))
                continue
            if ((skip_discovery and mpc_in.discovery)
                    or (not skip_discovery and not mpc_in.discovery)):
                logging.info("Discovery mis-match")
                logging.info("Skipping: {}".format(mpc_in.to_string()))
                continue
            logging.info("=" * 220)
            logging.info("   orig: {}".format(mpc_in.to_string()))
            if mpc_in.comment.astrometric_level == 4:
                logging.info("Already at maximum AstLevel, skipping.")
                continue
            if mpc_in.null_observation:
                logging.info("Skipping NULL observation.")
                continue
            mpc_obs = remeasure(mpc_in)
            logging.info("new wcs: {}".format(mpc_obs.to_string()))

            if not skip_mags:
                # and not mpc_obs.comment.photometry_note[0] == "Z":
                mpc_mag = remeasure(recompute_mag(
                    mpc_obs, skip_centroids=skip_centroids),
                                    reset_pixel_coordinates=not skip_centroids)
            else:
                mpc_mag = mpc_obs

            sep = mpc_in.coordinate.separation(mpc_mag.coordinate)
            if sep > TOLERANCE:
                logging.error("Large offset: {} arc-sec".format(sep))
                logging.error("orig: {}".format(mpc_in.to_string()))
                logging.error(" new: {}".format(mpc_mag.to_string()))
                new_comment = "BIG SHIFT HERE"
                mpc_mag.comment.comment = mpc_mag.comment.comment + " " + new_comment
            logging.info("new cen: {}".format(mpc_mag.to_string()))
            original_obs.append(mpc_in)
            modified_obs.append(mpc_mag)
            logging.info("=" * 220)
        except:
            logging.error("Skipping: {}".format(mpc_in))

    optr = open(cor_file + ".tlf", 'w')
    for idx in range(len(modified_obs)):
        inp = original_obs[idx]
        out = modified_obs[idx]
        if inp != out:
            optr.write(out.to_tnodb() + "\n")
    optr.close()

    if not compare_orbits:
        return True
    try:
        compare_orbits(original_obs, modified_obs, cor_file)
    except Exception as ex:
        logging.error("Orbit comparison failed: {}".format(ex))
    logging.info("=" * 220)

    return True
Beispiel #5
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('date',
                        type=str,
                        help="ISO date string to optimize the pointings too")
    parser.add_argument('pointing_objects',
                        type=str,
                        help="Name of file that contains list of objects to try and observe.")
    parser.add_argument('required_objects',
                        type=str,
                        help="target_name of file that contains a list of required objects")
    parser.add_argument('--nattempts', type=int,
                        help="Number of random variations of pointings to try",
                        default=2)
    parser.add_argument('--qrunid', 
                        help="CFHT ID for this QUEUE Run",
                        default='16BQ17')

    args = parser.parse_args()

    reader = mp_ephem.EphemerisReader()

    logging.basicConfig(level=logging.INFO)

    pointing_date = args.date

    logging.info("Date for orbit predictions: {}".format(args.date))
    logging.info("Building pointings that include all mjdates in {}".format(args.required_objects))
    logging.info("Optimized to include as many objects in {} as possible".format(args.pointing_objects))

    required_objects = []
    required_filename = args.required_objects
    for required_object in open(required_filename):
        if '#' in required_object:
           continue
        required_objects.append(required_object.strip())

    pointings_filename = os.path.splitext(required_filename)[0]+"_pointings.txt"

    filenames = open(args.pointing_objects).readlines()

    orbits = {}
    tokens = []

    # load the orbits of all objects of interest.
    for filename in filenames:
        filename = filename.strip()
        token = os.path.splitext(os.path.basename(filename))[0]
        abg_filename = os.path.splitext(filename)[0]+".abg"
        orbits[token] = mp_ephem.BKOrbit(None, ast_filename=filename, abg_file=abg_filename)
        tokens.append(token)
        print token, orbits[token].r_mag

    # Turn the object locations at the time of interest into a SkyCoord numpy array.
    tokens = np.array(tokens)
    minimum_number_of_pointings = len(required_objects)

    for attempt in range(args.nattempts):
        locations = []
        for token in tokens:
            orbits[token].predict(pointing_date)
            locations.append([orbits[token].coordinate.ra.degree, orbits[token].coordinate.dec.degree])
        locations = SkyCoord(locations, unit='degree')
        logging.info("Attempt : {} \n".format(attempt))
        pointings = optimize(orbits, required_objects, locations, tokens)
        if minimum_number_of_pointings >= len(pointings):
            minimum_number_of_pointings = len(pointings)
            best_pointing_list = deepcopy(pointings)

    with open(pointings_filename, 'w') as pobj:
        pobj.write("index {}\n".format(pointing_date))
        pointing_number = 0
        for token in best_pointing_list:
            pobj.write("{} {} {} {} # {}\n".format(pointing_number+1,
                                              "{}".format(token),
                                              best_pointing_list[token][0].coord.to_string("hmsdms", sep=" "),
                                              2000,
                                              len(best_pointing_list[token][1])))
            pointing_number += 1
            create_ephemeris_file(token, best_pointing_list[token][0], best_pointing_list[token][1], orbits, pointing_date, args.qrunid)