Exemplo n.º 1
0
def main():
    args = arg_parser().parse_args()

    util.set_logger(args.verbose, args.logfile, args.log)

    if len(args.img) > 1 and not args.output.startswith("."):
        logger.critical(
            "With more than one input IMG file, the --output must start with "
            f"a period, and it does not: {args.output}"
        )
        sys.exit()

    gainsinfo = pvl.load(args.gains)

    for i in args.img:
        out_p = util.path_w_suffix(args.output, i)

        with util.main_exceptions(args.verbose):
            histats = EDR_Stats(
                i, out_p, gainsinfo, args.histmin, args.histmax, keep=args.keep
            )

        # DB stuff
        # for k, v in histats.items():
        #     print(f'{k}: {v}')
        db_path = util.path_w_suffix(args.db, i)

        with open(db_path, "w") as f:
            json.dump(histats, f, indent=0, sort_keys=True)

        logger.info(f"Wrote {db_path}")
    return
Exemplo n.º 2
0
def main():
    try:
        parser = argparse.ArgumentParser(
            description=__doc__,
            formatter_class=argparse.RawDescriptionHelpFormatter,
            parents=[util.parent_parser()],
        )
        parser.add_argument(
            "-o", "--output", required=False, default=".bitunflip.cub"
        )
        parser.add_argument("cube", help="ISIS Cube file.")

        args = parser.parse_args()

        util.set_logger(args.verbose, args.logfile, args.log)

        out_p = util.path_w_suffix(args.output, args.cube)

        unflip(Path(args.cube), out_p, keep=args.keep)

    except subprocess.CalledProcessError as err:
        print("Had an ISIS error:", file=sys.stderr)
        print(" ".join(err.cmd), file=sys.stderr)
        print(err.stdout, file=sys.stderr)
        print(err.stderr, file=sys.stderr)
Exemplo n.º 3
0
def main():
    args = arg_parser().parse_args()

    util.set_logger(args.verbose, args.logfile, args.log)

    if len(args.img) > 1:
        if not args.output.startswith("."):
            logger.critical(
                "With more than one input IMG file, the --output must start "
                f"with a period, and it does not: {args.output}"
            )
            sys.exit()

        if not args.db.startswith("."):
            logger.critical(
                "With more than one input IMG file, the --db must start with "
                f"a period, and it does not: {args.db}"
            )
            sys.exit()

    gainsinfo = pvl.load(args.gains)

    with util.main_exceptions(args.verbose):
        if len(args.img) == 1:
            # No need to fire up the multiprocessing for just one image.
            db_path = write_json(
                EDR_Stats(
                    args.img[0],
                    util.path_w_suffix(args.output, args.img[0]),
                    gainsinfo,
                    args.histmin,
                    args.histmax,
                    keep=args.keep
                ),
                args.db,
                args.img[0],
            )
            logger.info(f"Wrote {db_path}")
        else:
            with concurrent.futures.ProcessPoolExecutor(
                max_workers=args.max_workers
            ) as executor:
                for img, histats, in zip(args.img, executor.map(
                    EDR_Stats,
                    args.img,
                    map(util.path_w_suffix, repeat(args.output), args.img),
                    repeat(gainsinfo),
                    repeat(args.histmin),
                    repeat(args.histmax),
                    repeat(args.keep),
                )):
                    db_path = write_json(histats, args.db, img)
                    logger.info(f"Wrote {db_path}")
    return
Exemplo n.º 4
0
def write_json(d: dict, outpath: str, template_path: os.PathLike) -> Path:
    """
    Writes out a Python dict as JSON to *outpath*.

    :param d: Python dictionary to serialize to a JSON file.
    :param outpath: If it starts with a "." assume it is a suffix that
        should be swapped with the suffix on *img* to get the output filename,
        otherwise use as an outpath.
    :param template_path: Pathlike that could have its suffix replaced.
    """
    json_path = util.path_w_suffix(outpath, template_path)

    with open(json_path, "w") as f:
        json.dump(d, f, indent=0, sort_keys=True)

    return json_path
Exemplo n.º 5
0
def main():
    args = arg_parser().parse_args()

    util.set_logger(args.verbose, args.logfile, args.log)

    out_p = util.path_w_suffix(args.output, args.file)

    with util.main_exceptions(args.verbose):
        fixed = fix(
            args.file,
            out_p,
        )

    if not fixed:
        sys.exit(f"{args.file} did not need lisfix cleaning.")

    return
Exemplo n.º 6
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        "-o", "--output", required=False, default=".mdr.iof.cub"
    )
    parser.add_argument("-e", "--edr", required=True)
    parser.add_argument(
        "-c",
        "--conf",
        required=False,
        type=argparse.FileType('r'),
        default=pkg_resources.resource_stream(
            __name__,
            'data/hical.pipelines.conf'
        ),
    )
    parser.add_argument("mdr", metavar="MDR_file")
    parser.add_argument(
        "-l",
        "--log",
        required=False,
        default="WARNING",
        help="The log level to show for this program, can "
        "be a named log level or a numerical level.",
    )
    parser.add_argument(
        "-k",
        "--keep",
        required=False,
        default=False,
        action="store_true",
        help="Normally, the program will clean up any "
        "intermediary files, but if this option is given, it "
        "won't.",
    )

    args = parser.parse_args()

    util.set_logger(args.verbose, args.logfile, args.log)

    edr_path = Path(args.edr)
    mdr_path = Path(args.mdr)

    to_del = isis.PathSet()

    h2i_path = to_del.add(edr_path.with_suffix(".hi2isis.cub"))

    out_path = util.path_w_suffix(args.output, edr_path)

    # The first thing Alan's program did was to crop the image down to only the
    # 'imaging' parts.  We're not doing that so the resultant file has a
    # geometry similar to what comes out of ISIS hical.

    # Convert the EDR to a cube file
    isis.hi2isis(edr_path, to=h2i_path)

    # Convert Alan's MDR to a cube file
    mdr_cub_path = to_del.add(mdr_path.with_suffix(".alan.cub"))
    logger.info(f"Running gdal_translate {mdr_path} -of ISIS3 {mdr_cub_path}")
    gdal.Translate(str(mdr_cub_path), str(mdr_path), format="ISIS3")

    h2i_s = int(isis.getkey_k(h2i_path, "Dimensions", "Samples"))
    h2i_l = int(isis.getkey_k(h2i_path, "Dimensions", "Lines"))
    mdr_s = int(isis.getkey_k(mdr_cub_path, "Dimensions", "Samples"))
    mdr_l = int(isis.getkey_k(mdr_cub_path, "Dimensions", "Lines"))

    if h2i_s != mdr_s:
        label = pvl.load(str(h2i_path))
        hirise_cal_info = get_one(
            label, "Table", "HiRISE Calibration Ancillary"
        )

        buffer_pixels = get_one(hirise_cal_info, "Field", "BufferPixels")[
            "Size"
        ]
        dark_pixels = get_one(hirise_cal_info, "Field", "DarkPixels")["Size"]
        rev_mask_tdi_lines = hirise_cal_info["Records"]

        if h2i_s + buffer_pixels + dark_pixels == mdr_s:
            logger.info(
                f"The file {mdr_cub_path} has "
                f"{buffer_pixels + dark_pixels} more sample pixels "
                f"than {h2i_path}, assuming those are dark and "
                "buffer pixels and will crop accordingly."
            )
            if h2i_l + rev_mask_tdi_lines != mdr_l:
                logger.critical(
                    'Even assuming this is a "full" channel '
                    "image, this has the wrong number of lines. "
                    f"{mdr_cub_path} should have "
                    f"{h2i_l + rev_mask_tdi_lines}, but "
                    f"has {mdr_l} lines. Exiting"
                )
                sys.exit()
            else:
                crop_path = to_del.add(mdr_cub_path.with_suffix(".crop.cub"))
                # We want to start with the next pixel (+1) after the cal
                # pixels.
                isis.crop(
                    mdr_cub_path,
                    to=crop_path,
                    sample=buffer_pixels + 1,
                    nsamples=h2i_s,
                    line=rev_mask_tdi_lines + 1,
                )
                mdr_cub_path = crop_path
                mdr_l = int(isis.getkey_k(mdr_cub_path, "Dimensions", "Lines"))

        else:
            logger.critical(
                f"The number of samples in {h2i_path} ({h2i_s}) "
                f"and {mdr_cub_path} ({mdr_s}) are different. "
                "Exiting."
            )
            sys.exit()

    if h2i_l != mdr_l:
        logger.critical(
            f"The number of lines in {h2i_path} ({h2i_l}) "
            f"and {mdr_cub_path} ({mdr_l}) are different. "
            "Exiting."
        )
        sys.exit()

    # Convert the EDR to the right bit type for post-HiCal Pipeline:
    h2i_16b_p = to_del.add(h2i_path.with_suffix(".16bit.cub"))
    isis.bit2bit(
        h2i_path,
        to=h2i_16b_p,
        bit="16bit",
        clip="minmax",
        minval=0,
        maxval=1.5,
    )
    shutil.copyfile(h2i_16b_p, out_path)

    # If it is a channel 1 file, Alan mirrored it so that he could process
    # the two channels in an identical way (which we also took advantage
    # of above if the buffer and dark pixels were included), so we need to
    # mirror it back.
    cid = hirise.get_ChannelID_fromfile(h2i_16b_p)
    if cid.channel == "1":
        mirror_path = to_del.add(mdr_cub_path.with_suffix(".mirror.cub"))
        isis.mirror(mdr_cub_path, to=mirror_path)
        mdr_cub_path = mirror_path

    # Is the MDR in DN or I/F?
    maximum_pxl = float(
        pvl.loads(isis.stats(mdr_cub_path).stdout)["Results"]["Maximum"]
    )
    if maximum_pxl < 1.5:
        logger.info("MDR is already in I/F units.")
        mdr_16b_p = to_del.add(mdr_cub_path.with_suffix(".16bit.cub"))
        isis.bit2bit(
            mdr_cub_path,
            to=mdr_16b_p,
            bit="16bit",
            clip="minmax",
            minval=0,
            maxval=1.5,
        )
        isis.handmos(mdr_16b_p, mosaic=out_path)
    else:
        logger.info("MDR is in DN units and will be converted to I/F.")

        fpa_t = statistics.mean(
            [
                float(
                    isis.getkey_k(
                        h2i_16b_p, "Instrument", "FpaPositiveYTemperature"
                    )
                ),
                float(
                    isis.getkey_k(
                        h2i_16b_p, "Instrument", "FpaNegativeYTemperature"
                    )
                ),
            ]
        )
        print(f"fpa_t {fpa_t}")

        conf = pvl.load(args.conf)

        tdg = t_dep_gain(get_one(conf["Hical"], "Profile", cid.ccdname), fpa_t)
        suncorr = solar_correction()
        sclk = isis.getkey_k(
            h2i_16b_p, "Instrument", "SpacecraftClockStartCount"
        )
        target = isis.getkey_k(h2i_16b_p, "Instrument", "TargetName")
        suncorr = solar_correction(sunDistanceAU(sclk, target))
        sed = float(
            isis.getkey_k(h2i_16b_p, "Instrument", "LineExposureDuration")
        )
        zbin = get_one(conf["Hical"], "Profile", "GainUnitConversion")[
            "GainUnitConversionBinFactor"
        ]

        # The 'ziof' name is from the ISIS HiCal/GainUnitConversion.h, it is a
        # divisor in the calibration equation.
        print(f"zbin {zbin}")
        print(f"tdg {tdg}")
        print(f"sed {sed}")
        print(f"suncorr {suncorr}")
        ziof = zbin * tdg * sed * 1e-6 * suncorr
        eqn = f"\(F1 / {ziof})"  # noqa W605

        mdriof_p = to_del.add(mdr_cub_path.with_suffix(".iof.cub"))
        to_s = "{}+SignedWord+{}:{}".format(mdriof_p, 0, 1.5)
        isis.fx(f1=mdr_cub_path, to=to_s, equ=eqn)

        isis.handmos(mdriof_p, mosaic=out_path)

    if not args.keep:
        to_del.unlink()
Exemplo n.º 7
0
def edr2stitch(images, conf_dir, bitflipwidth=0, lis_tolerance=1, keep=False):
    chids = list()
    for i in images:
        out_edr = util.path_w_suffix(".EDR_Stats.cub", i)

        # EDR_Stats
        db = EDR_Stats.EDR_Stats(i,
                                 out_edr,
                                 pvl.load(conf_dir /
                                          "EDR_Stats_gains_config.pvl"),
                                 keep=keep)

        # HiCal
        out_hical = util.path_w_suffix(".HiCal.cub", out_edr)

        db = HiCal.HiCal(
            out_edr,
            out_hical,
            db,
            HiCal.conf_setup(pvl.load(conf_dir / "HiCal.conf"),
                             pvl.load(conf_dir / "NoiseFilter.conf")),
            conf_dir / "HiCal.conf",
            None,
            None,
            bitflipwidth,
            lis_tolerance,
            keep=keep,
        )

        chids.append(ChannelCube(out_hical, db))

    # HiStitch
    # get Channel pairs
    cids = list()
    for chid1, chid2 in get_CCDpairs(chids):
        (db, o_path) = HiStitch.HiStitch(
            chid1.nextpath,
            chid2.nextpath,
            chid1.db,
            chid2.db,
            ".HiStitch.cub",
            pvl.load(conf_dir / "HiStitch.conf"),
            keep=keep,
        )
        cid = HiccdStitch.HiccdStitchCube(o_path)
        cid.gather_from_db(db)
        cids.append(cid)

    # HiccdStitch, makes balance cubes
    # need to separate by color:
    color_groups = get_color_groups(cids)
    for color_group in color_groups.values():
        db, out_stitch = HiccdStitch.HiccdStitch(
            color_group,
            ".HiccdStitch.cub",
            pvl.load(conf_dir / "HiccdStitch.conf"),
            sline=None,
            eline=None,
            keep=keep,
        )
    # HiColorInit
    #   takes *balance.cub
    #   creates *[IR|BG]*.balance.precolor.cub
    #   Can then run JitPlot on these *.balance.precolor.cub
    HiColorInit.HiColorInit([c.nextpath for c in cids],
                            ".precolor.cub",
                            keep=keep)

    # HiJitReg
    #   takes tmp/*balance.cub tmp/*balance.precolor.cub
    #   creates *regdef.pvl and *flat.tab files
    for_jitreg = list()
    for color, balcubes in color_groups.items():
        if color == "RED":
            for c in balcubes:
                for_jitreg.append(c.nextpath)
        else:
            for c in balcubes:
                for_jitreg.append(c.nextpath.with_suffix(".precolor.cub"))

    HiJitReg.HiJitReg(for_jitreg,
                      pvl.load(conf_dir / "HiJitReg.conf"),
                      keep=keep)

    # HiSlither
    #   takes same as HiJitReg (and assumes its products are available.
    #   creates *slither.txt, *slither.cub, and *COLOR[4|5].cub
    #   Can then run SliterStats on the *slither.txt
    HiSlither.HiSlither(for_jitreg)

    return chids
Exemplo n.º 8
0
def main():
    # The Original Perl took a .pvl file as input which mostly just had the
    # filenames of the ccd files to stitch together.  We'll just take those
    # on the command line and into args.cubes.

    args = arg_parser().parse_args()

    util.set_logger(args.verbose, args.logfile, args.log)

    # outcub_path = set_outcube(args.output, pid0)

    if args.cubenorm is not None:
        if len(args.cubenorm) != len(args.cubes):
            logger.critical(f"The number of cubes ({len(args.cubes)}) and "
                            "the number of cubenorm flags given "
                            f"({len(args.cubenorm)}) did not match. Exiting.")
            sys.exit()
    else:
        args.cubenorm = [False] * len(args.cubes)

    # Perl: GetConfigurationParameters()
    conf = pvl.load(args.conf)
    conf_check(conf)

    # We may not need to read anything from the DB, only write to it?
    # Nope, we need to read in each of the CHANNEL!(!) HiCat files to get
    # their hical_status value.  Hmm.  Rather than devise a
    # command line strategy for manually loading them, I think we'll just
    # use the args.cubes filenames to find them.

    # Perl: GetPVLParameters()
    # Gets some options and items from the pvl file that the.
    #    Need to recreate?
    # Looks like there's a True/False for each input about Cubenorming or not
    # Then a HiccdStitch/Start_line, HiccdStitch/End_Line,
    # and a HiccdStitch/Reduce_Factor
    # If those aren't present then then default to 0, 0, -9999.  Probably
    # should all be 'None's.
    # Upon inspection of the HiStitch_Next_Pipe, there is no logic to set these
    # values, they are simply always FALSE for each CCD, 0, 0, and -9999.
    # So maybe this was put in for manual use, but not a 'normal' part of the
    # pipeline?
    # Adding as command line arguments

    if len(args.cubes) == 1:
        # Assume this is a filepath to a file of cube names
        cubes = list(
            map(
                HiccdStitchCube,
                Path(args.cubes[0]).read_text().splitlines(),
                args.cubenorm,
            ))
    else:
        cubes = list(map(HiccdStitchCube, args.cubes, args.cubenorm))

    for c in cubes:
        c.gather_from_db()

    with util.main_exceptions(args.verbose):
        (db, outpath) = HiccdStitch(
            cubes,
            args.output,
            conf,
            args.sline,
            args.eline,
            keep=args.keep,
        )

    db_path = util.path_w_suffix(args.db, outpath)

    with open(db_path, "w") as f:
        json.dump(db, f, indent=0, sort_keys=True)

    return
Exemplo n.º 9
0
 def test_path_w_suffix(self):
     self.assertEqual("bar.foo", str(util.path_w_suffix(".foo", "bar.cub")))
     self.assertEqual("foo.foo",
                      str(util.path_w_suffix("foo.foo", "bar.cub")))
Exemplo n.º 10
0
def edr2stitch(
    images,
    conf_dir,
    bitflipwidth=0,
    lis_tolerance=1.0,
    max_workers=None,
    keep=False,
):
    chids = list()
    edr_conf = pvl.load(conf_dir / "EDR_Stats_gains_config.pvl")
    hical_conf = HiCal.conf_setup(
        pvl.load(conf_dir / "HiCal.conf"),
        pvl.load(conf_dir / "NoiseFilter.conf"),
    )
    with concurrent.futures.ProcessPoolExecutor(
                max_workers=max_workers
    ) as executor:
        future_dbs = dict()
        for i in images:
            out_edr = util.path_w_suffix(".EDR_Stats.cub", i)
            out_hical = util.path_w_suffix(".HiCal.cub", out_edr)

            f = executor.submit(
                edr_cal,
                i,
                out_edr,
                out_hical,
                edr_conf,
                hical_conf,
                conf_dir / "HiCal.conf",
                bitflipwidth=bitflipwidth,
                lis_tolerance=lis_tolerance,
                keep=False,
            )
            future_dbs[f] = out_hical

        for future in concurrent.futures.as_completed(future_dbs):
            out_hical = future_dbs[future]
            chids.append(ChannelCube(out_hical, future.result()))

    # HiStitch
    # get Channel pairs
    cids = list()
    stitch_conf = pvl.load(conf_dir / "HiStitch.conf")
    with concurrent.futures.ProcessPoolExecutor(
        max_workers=max_workers
    ) as executor:
        future_tuples = list()
        for chid1, chid2 in get_CCDpairs(chids):
            f = executor.submit(
                # (db, o_path) = HiStitch.HiStitch(
                HiStitch.HiStitch,
                chid1.nextpath,
                chid2.nextpath,
                chid1.db,
                chid2.db,
                ".HiStitch.cub",
                stitch_conf,
                keep=keep,
            )
            future_tuples.append(f)

        for future in concurrent.futures.as_completed(future_tuples):
            (db, o_path) = future.result()
            cid = HiccdStitch.HiccdStitchCube(o_path)
            cid.gather_from_db(db)
            cids.append(cid)

    # HiccdStitch, makes balance cubes
    # need to separate by color:
    color_groups = get_color_groups(cids)
    for color_group in color_groups.values():
        db, out_stitch = HiccdStitch.HiccdStitch(
            color_group,
            ".HiccdStitch.cub",
            pvl.load(conf_dir / "HiccdStitch.conf"),
            sline=None,
            eline=None,
            keep=keep,
        )
    # HiColorInit
    #   takes *balance.cub
    #   creates *[IR|BG]*.balance.precolor.cub
    #   Can then run JitPlot on these *.balance.precolor.cub
    HiColorInit.HiColorInit(
        [c.nextpath for c in cids], ".precolor.cub", keep=keep
    )

    # HiJitReg
    #   takes tmp/*balance.cub tmp/*balance.precolor.cub
    #   creates *regdef.pvl and *flat.tab files
    for_jitreg = list()
    for color, balcubes in color_groups.items():
        if color == "RED":
            for c in balcubes:
                for_jitreg.append(c.nextpath)
        else:
            for c in balcubes:
                for_jitreg.append(c.nextpath.with_suffix(".precolor.cub"))

    HiJitReg.HiJitReg(
        for_jitreg, pvl.load(conf_dir / "HiJitReg.conf"), keep=keep
    )

    # HiSlither
    #   takes same as HiJitReg (and assumes its products are available.
    #   creates *slither.txt, *slither.cub, and *COLOR[4|5].cub
    #   Can then run SlitherStats on the *slither.txt
    HiSlither.HiSlither(for_jitreg)

    return chids
Exemplo n.º 11
0
def main():
    # Set the numpy type for elements in the main data area of the .dat file.
    float_type = np.float32

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        parents=[util.parent_parser()],
    )
    parser.add_argument("-o",
                        "--output",
                        required=False,
                        default=".bitflip.dat")
    parser.add_argument(
        "-w",
        "--width",
        required=False,
        default=5,
        help="The number of medstd widths for bit-flip "
        "cleaning.",
    )
    parser.add_argument(
        "-r",
        "--replacement",
        required=False,
        default=float_type(0),
        type=float_type,
        help="By default, the program will replace "
        "identified pixels with an appropriate NULL data "
        "value, but if provided this value will be used "
        "instead.",
    )
    parser.add_argument(
        "-p",
        "--plot",
        required=False,
        action="store_true",
        help="Displays plot for each area.",
    )
    parser.add_argument(
        "-n",
        "--dryrun",
        required=False,
        action="store_true",
        help="Does not produce a cleaned output file.",
    )
    parser.add_argument("file", help="A .dat file to clean.")

    args = parser.parse_args()

    util.set_logger(args.verbose, args.logfile, args.log)

    out_p = util.path_w_suffix(args.output, args.file)

    clean(
        args.file,
        out_p,
        args.replacement,
        width=args.width,
        plot=args.plot,
        dryrun=args.dryrun,
    )

    sys.exit(0)