예제 #1
0
    def __init__(self, pathlike, cubenormstep=False):

        self.path = Path(pathlike)
        self.nextpath = self.path
        super().__init__(hirise.get_CCDID_fromfile(self.path))
        self.ns = int(isis.getkey_k(self.path, "Dimensions", "Samples"))
        self.nl = int(isis.getkey_k(self.path, "Dimensions", "Lines"))
        self.bin = int(isis.getkey_k(self.path, "Instrument", "Summing"))
        self.cubenormstep = cubenormstep
        self.cubenorm_stddev = None
        self.sl_cubenorm = None
        self.nl_cubenorm = None
        self.ss_balance_left = None
        self.ns_balance_left = None
        self.ss_balance_right = None
        self.ns_balance_right = None
        self.sl_balance = None
        self.nl_balance = None
        self.smag = None
        self.lmag = None
        self.ls_path = None
        self.rs_path = None
        self.lm_path = None
        self.rm_path = None
        self.rstats = None
        self.lstats = None
        self.correction = None
        self.hical_status = None
        self.snr_list = list()
예제 #2
0
    def get_binning_from(self, color_code, pids=None, db_paths=None):
        dbs = list()
        if pids is None:
            pids = self.pids

        parent = self.path.parent
        if db_paths is None:
            db_paths = list()
            for pid in pids:
                db_paths.extend(list(parent.glob(f"{pid}*.json")))

        for pid in pids:
            if color_code in pid:
                if len(db_paths) > 0:
                    for p in db_paths:
                        with open(p, "r") as f:
                            dbs.append(json.load(f))
                else:
                    for p in parent.glob(f"{pid}*.cub"):
                        temp_d = dict()
                        temp_d["PRODUCT_ID"] = isis.getkey_k(
                            p, "Archive", "ProductId")
                        temp_d["BINNING"] = int(
                            isis.getkey_k(p, "Instrument", "Summing"))
                        dbs.append(temp_d)
        return dbs
예제 #3
0
    def __init__(self, pathlike):

        self.path = Path(pathlike)
        super().__init__(hirise.get_CCDID_fromfile(self.path))
        self.bin = int(isis.getkey_k(self.path, "Instrument", "Summing"))
        self.tdi = int(isis.getkey_k(self.path, "Instrument", "TDI"))
        self.lines = int(isis.getkey_k(self.path, "Dimensions", "Lines"))
        self.samps = int(isis.getkey_k(self.path, "Dimensions", "Samples"))
예제 #4
0
def EDR_Stats(
    img: os.PathLike,
    out_path: os.PathLike,
    gainsinfo: dict,
    histmin=0.01,
    histmax=99.99,
    keep=False,
) -> dict:
    logger.info(f"EDR_Stats start: {img}")
    try:
        logger.info("The LUT for this file is: " + str(check_lut(img)))
    except KeyError as err:
        logger.error("The LUT header area is either corrupted or has a gap.")
        raise err

    # Convert to .cub
    isis.hi2isis(img, to=out_path)

    histat_complete = isis.histat(
        out_path,
        useoffsets=True,
        leftimage=0,
        rightimage=1,
        leftcalbuffer=3,
        rightcalbuffer=1,
        leftcaldark=3,
        rightcaldark=1,
        leftbuffer=3,
        rightbuffer=1,
        leftdark=3,
        rightdark=1,
    )
    histats = parse_histat(histat_complete.stdout)

    # Get some info from the new cube:
    histats["PRODUCT_ID"] = isis.getkey_k(out_path, "Archive", "ProductId")
    histats["IMAGE_LINES"] = int(
        isis.getkey_k(out_path, "Dimensions", "Lines")
    )
    histats["LINE_SAMPLES"] = int(
        isis.getkey_k(out_path, "Dimensions", "Samples")
    )
    histats["BINNING"] = int(isis.getkey_k(out_path, "Instrument", "Summing"))

    histats["STD_DN_LEVELS"] = get_dncnt(out_path, histmin, histmax, keep=keep)
    histats["IMAGE_SIGNAL_TO_NOISE_RATIO"] = calc_snr(
        out_path, gainsinfo, histats
    )
    histats["GAP_PIXELS_PERCENT"] = (
        histats["GAP_PIXELS"]
        / (int(histats["IMAGE_LINES"]) * int(histats["LINE_SAMPLES"]))
    ) * 100.0

    tdi_bin_check(out_path, histats)
    lut_check(out_path, histats)

    logger.info(f"EDR_Stats done: {out_path}")
    return histats
예제 #5
0
    def test_Hidestripe(self):
        to_del = isis.PathSet()
        calcube = to_del.add(Path("test_Hidestripe-out.hical.cub"))
        isis.hical(self.cube, to=calcube)
        to_del.add(Path(str(self.pid)).with_suffix(".hical.log"))
        outcube = to_del.add(Path("test_Hidestripe-out.cub"))
        samps = int(isis.getkey_k(calcube, "Dimensions", "Samples"))

        self.assertRaises(
            KeyError,
            hc.Hidestripe,
            self.cube,
            outcube,
            self.binning,
            minimum=0.0,
            maximum=1.5,
            hidcorr="ADD",
            line_samples=samps,
            keep=False,
        )

        self.assertAlmostEqual(
            0.000101402295171637,
            hc.Hidestripe(
                calcube,
                outcube,
                self.binning,
                minimum=0.0,
                maximum=1.5,
                hidcorr="ADD",
                line_samples=samps,
                keep=False,
            ),
        )
        to_del.unlink()
예제 #6
0
 def setUp(self):
     self.cube = imgs[0].with_suffix(".TestHiCal.cub")
     self.pid = hirise.get_ChannelID_fromfile(self.cube)
     self.db = edr.EDR_Stats(imgs[0], self.cube, gains)
     self.binning = int(isis.getkey_k(self.cube, "Instrument", "Summing"))
     self.conf = conf
     # self.conf['HiGainFx'] = pvl.load(str(hgf_conf))['HiGainFx']
     self.conf["NoiseFilter"] = nf_conf["NoiseFilter"]
예제 #7
0
def sort_input_cubes(a_cub: os.PathLike, b_cub: os.PathLike) -> tuple:
    """Figures out which one is Channel 0 and which is Channel 1."""

    channel_of = dict()
    channel_of[a_cub] = isis.getkey_k(a_cub, "Instrument", "ChannelNumber")
    channel_of[b_cub] = isis.getkey_k(b_cub, "Instrument", "ChannelNumber")

    if channel_of[a_cub] == channel_of[b_cub]:
        raise RuntimeError(f"{a_cub} and {b_cub} have the same channel: "
                           "{channel_of[a_cub]}.")

    for (cub, chan) in channel_of.items():
        if int(chan) != 0 and int(chan) != 1:
            raise RuntimeError(
                f"{cub} has a channel other than 0 or 1: {chan}")

    sorted_cubchan = sorted(channel_of.items(), key=lambda x: x[1])
    return (sorted_cubchan[0][0], sorted_cubchan[1][0])
예제 #8
0
def _get_fromfile(path: os.PathLike, IDclass, name, archivekey):
    p = Path(path)

    try:
        return IDclass(isis.getkey_k(p, "Archive", archivekey))
    except (subprocess.CalledProcessError, ValueError):
        # The CalledProcessError is if there is some problem with running
        # getkey, the ValueError is if a CCDID can't be extracted from
        # the labels.  This allows this function to be called on any kind of
        # file, as it will just try and read the CCDID from the filename
        # or could also reverse recurse up to directory names.
        return _reverse_recurse(p, IDclass, name)
예제 #9
0
def tdi_bin_check(cube: os.PathLike, histats: dict):
    """This function only logs warnings and returns nothing."""

    # TDI and binning check
    if float(histats["IMAGE_MEAN"]) >= 8000:
        logger.warning(
            "Channel mean greater than 8000 (TDI or binning too high)."
        )
    elif float(histats["IMAGE_MEAN"]) < 2500:
        tdi = isis.getkey_k(cube, "Instrument", "Tdi")
        if tdi == "32" or tdi == "64":
            logger.warning("TDI too low.")
    return
예제 #10
0
def SpecialProcessingFlags(cube: HiccdStitchCube):
    """Set the special processing flags in the ISIS label."""
    status = "NOMINAL"

    if cube.hical_status == "BadCal":
        status = "BADCAL"

    if cube.cubenormstep:
        status = "CUBENORM"

    try:
        isis.getkey_k(cube.nextpath, "Instrument", "Special_Processing_Flag")
        option = "MODKEY"
    except subprocess.CalledProcessError:
        option = "ADDKEY"

    isis.editlab(
        cube.nextpath,
        option=option,
        grpname="Instrument",
        keyword="Special_Processing_Flag",
        value=status,
    )
예제 #11
0
    def __init__(self, pathlike, dbs=None):
        self.path = Path(pathlike)
        match = re.search(r"COLOR(\d)", str(pathlike))
        if match:
            self.ccdnumber = match.group(1)
        else:
            raise ValueError(f"Could not extract a COLOR from {pathlike}")

        super().__init__(hirise.get_ObsID_fromfile(self.path))
        self.lines = int(isis.getkey_k(self.path, "Dimensions", "Lines"))
        self.samps = int(isis.getkey_k(self.path, "Dimensions", "Samples"))
        self.bands = int(isis.getkey_k(self.path, "Dimensions", "Bands"))

        if self.bands != 3:
            raise Exception("{} must have 3 bands.".format(self.path))

        # centers = isis.getkey_k(self.path, 'BandBin', 'Center')
        # match_c = re.search(r'900,\s+700,\s+500', centers)
        if not match:
            raise Exception(
                f"{self.path} must have BandBin/Center of 900, 700, 500.")
        self.band = {"IR": 1, "RED": 2, "BG": 3}

        self.pids = isis.getkey_k(self.path, "Mosaic",
                                  "SourceProductId").split(", ")
        self.ir_bin = self.get_binning("IR", dbs)
        self.red_bin = self.get_binning("RED", dbs)
        # self.bg_bin = get_binning('BG')

        self.mask_path = {"IR": None, "BG": None}
        self.crop_path = {"IR": None, "BG": None}
        self.nrm_path = {"IR": None, "BG": None}

        self.crop_sline = None
        self.crop_lines = None

        self.final_path = None
예제 #12
0
def tdi_bin_check(cube: os.PathLike, histats: dict, cid=None):
    """This function only logs warnings and returns nothing."""

    if cid is None:
        try:
            cid = histats['PRODUCT_ID']
        except KeyError:
            cid = hirise.get_ChannelID_fromfile(cube)

    # TDI and binning check
    if float(histats["IMAGE_MEAN"]) >= 8000:
        logger.warning(
            f"{cid}: "
            f"Channel mean greater than 8000 (TDI or binning too high)."
        )
    elif float(histats["IMAGE_MEAN"]) < 2500:
        tdi = isis.getkey_k(cube, "Instrument", "Tdi")
        if tdi == "32" or tdi == "64":
            logger.warning(f"{cid}: TDI too low.")
    return
예제 #13
0
def fix_labels(cubes: list, path: os.PathLike, matched_cube: str,
               prodid: str) -> None:
    isis.editlab(
        path,
        option="modkey",
        grpname="Archive",
        keyword="ProductId",
        value=prodid,
    )
    isis.editlab(
        path,
        option="modkey",
        grpname="Instrument",
        keyword="MatchedCube",
        value=str(matched_cube),
    )

    # Fix ck kernel in InstrumentPointing in RED label
    # This doesn't seem to be needed, maybe it was HiROC-specific.

    #  Add SourceProductIds to Archive group in label
    logger.info("Original Perl just assumes that both channels are included "
                "in the balance cube.")
    source_ids = list()
    for c in cubes:
        source_ids.append(
            isis.getkey_k(c.path, "Instrument", "StitchedProductIds"))

    isis.editlab(
        path,
        option="ADDKEY",
        grpname="Archive",
        keyword="SourceProductId",
        value="({})".format(", ".join(source_ids)),
    )
    return
예제 #14
0
 def test_getkey_k(self):
     truth = 'HIRISE'
     key = isis.getkey_k(self.cub, 'Instrument', 'InstrumentId')
     self.assertEqual(truth, key)
예제 #15
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument(
        "-o", "--output", required=False, default=".mdr.iof.cub"
    )
    parser.add_argument("-e", "--edr", required=True)
    parser.add_argument(
        "-c",
        "--conf",
        required=False,
        type=argparse.FileType('r'),
        default=pkg_resources.resource_stream(
            __name__,
            'data/hical.pipelines.conf'
        ),
    )
    parser.add_argument("mdr", metavar="MDR_file")
    parser.add_argument(
        "-l",
        "--log",
        required=False,
        default="WARNING",
        help="The log level to show for this program, can "
        "be a named log level or a numerical level.",
    )
    parser.add_argument(
        "-k",
        "--keep",
        required=False,
        default=False,
        action="store_true",
        help="Normally, the program will clean up any "
        "intermediary files, but if this option is given, it "
        "won't.",
    )

    args = parser.parse_args()

    util.set_logger(args.verbose, args.logfile, args.log)

    edr_path = Path(args.edr)
    mdr_path = Path(args.mdr)

    to_del = isis.PathSet()

    h2i_path = to_del.add(edr_path.with_suffix(".hi2isis.cub"))

    out_path = util.path_w_suffix(args.output, edr_path)

    # The first thing Alan's program did was to crop the image down to only the
    # 'imaging' parts.  We're not doing that so the resultant file has a
    # geometry similar to what comes out of ISIS hical.

    # Convert the EDR to a cube file
    isis.hi2isis(edr_path, to=h2i_path)

    # Convert Alan's MDR to a cube file
    mdr_cub_path = to_del.add(mdr_path.with_suffix(".alan.cub"))
    logger.info(f"Running gdal_translate {mdr_path} -of ISIS3 {mdr_cub_path}")
    gdal.Translate(str(mdr_cub_path), str(mdr_path), format="ISIS3")

    h2i_s = int(isis.getkey_k(h2i_path, "Dimensions", "Samples"))
    h2i_l = int(isis.getkey_k(h2i_path, "Dimensions", "Lines"))
    mdr_s = int(isis.getkey_k(mdr_cub_path, "Dimensions", "Samples"))
    mdr_l = int(isis.getkey_k(mdr_cub_path, "Dimensions", "Lines"))

    if h2i_s != mdr_s:
        label = pvl.load(str(h2i_path))
        hirise_cal_info = get_one(
            label, "Table", "HiRISE Calibration Ancillary"
        )

        buffer_pixels = get_one(hirise_cal_info, "Field", "BufferPixels")[
            "Size"
        ]
        dark_pixels = get_one(hirise_cal_info, "Field", "DarkPixels")["Size"]
        rev_mask_tdi_lines = hirise_cal_info["Records"]

        if h2i_s + buffer_pixels + dark_pixels == mdr_s:
            logger.info(
                f"The file {mdr_cub_path} has "
                f"{buffer_pixels + dark_pixels} more sample pixels "
                f"than {h2i_path}, assuming those are dark and "
                "buffer pixels and will crop accordingly."
            )
            if h2i_l + rev_mask_tdi_lines != mdr_l:
                logger.critical(
                    'Even assuming this is a "full" channel '
                    "image, this has the wrong number of lines. "
                    f"{mdr_cub_path} should have "
                    f"{h2i_l + rev_mask_tdi_lines}, but "
                    f"has {mdr_l} lines. Exiting"
                )
                sys.exit()
            else:
                crop_path = to_del.add(mdr_cub_path.with_suffix(".crop.cub"))
                # We want to start with the next pixel (+1) after the cal
                # pixels.
                isis.crop(
                    mdr_cub_path,
                    to=crop_path,
                    sample=buffer_pixels + 1,
                    nsamples=h2i_s,
                    line=rev_mask_tdi_lines + 1,
                )
                mdr_cub_path = crop_path
                mdr_l = int(isis.getkey_k(mdr_cub_path, "Dimensions", "Lines"))

        else:
            logger.critical(
                f"The number of samples in {h2i_path} ({h2i_s}) "
                f"and {mdr_cub_path} ({mdr_s}) are different. "
                "Exiting."
            )
            sys.exit()

    if h2i_l != mdr_l:
        logger.critical(
            f"The number of lines in {h2i_path} ({h2i_l}) "
            f"and {mdr_cub_path} ({mdr_l}) are different. "
            "Exiting."
        )
        sys.exit()

    # Convert the EDR to the right bit type for post-HiCal Pipeline:
    h2i_16b_p = to_del.add(h2i_path.with_suffix(".16bit.cub"))
    isis.bit2bit(
        h2i_path,
        to=h2i_16b_p,
        bit="16bit",
        clip="minmax",
        minval=0,
        maxval=1.5,
    )
    shutil.copyfile(h2i_16b_p, out_path)

    # If it is a channel 1 file, Alan mirrored it so that he could process
    # the two channels in an identical way (which we also took advantage
    # of above if the buffer and dark pixels were included), so we need to
    # mirror it back.
    cid = hirise.get_ChannelID_fromfile(h2i_16b_p)
    if cid.channel == "1":
        mirror_path = to_del.add(mdr_cub_path.with_suffix(".mirror.cub"))
        isis.mirror(mdr_cub_path, to=mirror_path)
        mdr_cub_path = mirror_path

    # Is the MDR in DN or I/F?
    maximum_pxl = float(
        pvl.loads(isis.stats(mdr_cub_path).stdout)["Results"]["Maximum"]
    )
    if maximum_pxl < 1.5:
        logger.info("MDR is already in I/F units.")
        mdr_16b_p = to_del.add(mdr_cub_path.with_suffix(".16bit.cub"))
        isis.bit2bit(
            mdr_cub_path,
            to=mdr_16b_p,
            bit="16bit",
            clip="minmax",
            minval=0,
            maxval=1.5,
        )
        isis.handmos(mdr_16b_p, mosaic=out_path)
    else:
        logger.info("MDR is in DN units and will be converted to I/F.")

        fpa_t = statistics.mean(
            [
                float(
                    isis.getkey_k(
                        h2i_16b_p, "Instrument", "FpaPositiveYTemperature"
                    )
                ),
                float(
                    isis.getkey_k(
                        h2i_16b_p, "Instrument", "FpaNegativeYTemperature"
                    )
                ),
            ]
        )
        print(f"fpa_t {fpa_t}")

        conf = pvl.load(args.conf)

        tdg = t_dep_gain(get_one(conf["Hical"], "Profile", cid.ccdname), fpa_t)
        suncorr = solar_correction()
        sclk = isis.getkey_k(
            h2i_16b_p, "Instrument", "SpacecraftClockStartCount"
        )
        target = isis.getkey_k(h2i_16b_p, "Instrument", "TargetName")
        suncorr = solar_correction(sunDistanceAU(sclk, target))
        sed = float(
            isis.getkey_k(h2i_16b_p, "Instrument", "LineExposureDuration")
        )
        zbin = get_one(conf["Hical"], "Profile", "GainUnitConversion")[
            "GainUnitConversionBinFactor"
        ]

        # The 'ziof' name is from the ISIS HiCal/GainUnitConversion.h, it is a
        # divisor in the calibration equation.
        print(f"zbin {zbin}")
        print(f"tdg {tdg}")
        print(f"sed {sed}")
        print(f"suncorr {suncorr}")
        ziof = zbin * tdg * sed * 1e-6 * suncorr
        eqn = f"\(F1 / {ziof})"  # noqa W605

        mdriof_p = to_del.add(mdr_cub_path.with_suffix(".iof.cub"))
        to_s = "{}+SignedWord+{}:{}".format(mdriof_p, 0, 1.5)
        isis.fx(f1=mdr_cub_path, to=to_s, equ=eqn)

        isis.handmos(mdriof_p, mosaic=out_path)

    if not args.keep:
        to_del.unlink()
예제 #16
0
def lut_check(cube: os.PathLike, histats: dict, cid=None):
    # LUT check
    lut = int(isis.getkey_k(cube, "Instrument", "LookupTableNumber"))
    orbit_number = int(isis.getkey_k(cube, "Archive", "OrbitNumber"))
    threshhold = dict()
    if lut != -9998:
        if orbit_number > 65881:
            # After orbit 65881, RED1 RED2 RED3 moved to to DN 900-1000 offset

            threshhold["RED0"] = (
                (6814, 22),
                (5341, 23),
                (3869, 24),
                (3133, 25),
                (2397, 26),
                (1200, 27),
            )
            threshhold["RED1"] = (
                (6619, 1),
                (5116, 2),
                (3614, 3),
                (2863, 4),
                (2112, 5),
                (900, 6),
            )
            threshhold["RED2"] = threshhold["RED1"]
            threshhold["RED3"] = threshhold["RED1"]
            threshhold["RED4"] = (
                (6684, 8),
                (5191, 9),
                (3699, 10),
                (2953, 11),
                (2207, 12),
                (1000, 13)
            )
            threshhold["RED5"] = (
                (6749, 15),
                (5266, 16),
                (3784, 17),
                (3043, 18),
                (2302, 19),
                (1100, 20),
            )
            threshhold["RED6"] = threshhold["RED4"]
            threshhold["RED7"] = threshhold["RED4"]
            threshhold["RED8"] = threshhold["RED0"]
            threshhold["RED9"] = threshhold["RED4"]
            threshhold["IR10"] = threshhold["RED1"]
            threshhold["IR11"] = threshhold["RED4"]
            threshhold["BG12"] = threshhold["RED1"]
            threshhold["BG13"] = threshhold["RED1"]
        elif orbit_number > 13057:
            # After orbit 13057, IR10 moved to DN 900-1000 offset
            threshhold["RED0"] = (
                (6814, 22),
                (5341, 23),
                (3869, 24),
                (3133, 25),
                (2397, 26),
                (1200, 27),
            )
            threshhold["RED1"] = (
                (6684, 8),
                (5191, 9),
                (3699, 10),
                (2953, 11),
                (2207, 12),
                (1000, 13),
            )
            threshhold["RED2"] = threshhold["RED1"]
            threshhold["RED3"] = threshhold["RED1"]
            threshhold["RED4"] = threshhold["RED1"]
            threshhold["RED5"] = (
                (6749, 15),
                (5266, 16),
                (3784, 17),
                (3043, 18),
                (2302, 19),
                (1100, 20),
            )
            threshhold["RED6"] = threshhold["RED1"]
            threshhold["RED7"] = threshhold["RED1"]
            threshhold["RED8"] = threshhold["RED0"]
            threshhold["RED9"] = threshhold["RED1"]
            threshhold["IR10"] = (
                (6619, 1),
                (5116, 2),
                (3614, 3),
                (2863, 4),
                (2112, 5),
                (900, 6),
            )
            threshhold["IR11"] = threshhold["RED1"]
            threshhold["BG12"] = threshhold["IR10"]
            threshhold["BG13"] = threshhold["IR10"]
        elif orbit_number > 11710:
            # After orbit 11710, RED6 moved to DN 1000-1100 offset
            threshhold["RED0"] = (
                (6814, 22),
                (5341, 23),
                (3869, 24),
                (3133, 25),
                (2397, 26),
                (1200, 27),
            )
            threshhold["RED1"] = (
                (6684, 8),
                (5191, 9),
                (3699, 10),
                (2953, 11),
                (2207, 12),
                (1000, 13),
            )
            threshhold["RED2"] = threshhold["RED1"]
            threshhold["RED3"] = threshhold["RED1"]
            threshhold["RED4"] = threshhold["RED1"]
            threshhold["RED5"] = (
                (6749, 15),
                (5266, 16),
                (3784, 17),
                (3043, 18),
                (2302, 19),
                (1100, 20),
            )
            threshhold["RED6"] = threshhold["RED1"]
            threshhold["RED7"] = threshhold["RED1"]
            threshhold["RED8"] = threshhold["RED0"]
            threshhold["RED9"] = threshhold["RED1"]
            threshhold["IR10"] = threshhold["RED1"]
            threshhold["IR11"] = threshhold["RED1"]
            threshhold["BG12"] = (
                (6619, 1),
                (5116, 2),
                (3614, 3),
                (2863, 4),
                (2112, 5),
                (900, 6),
            )
            threshhold["BG13"] = threshhold["BG12"]
        elif orbit_number > 2660:
            # New LUT table after orbit 2660
            threshhold["RED0"] = (
                (6814, 22),
                (5341, 23),
                (3869, 24),
                (3133, 25),
                (2397, 26),
                (1200, 27),
            )
            threshhold["RED1"] = (
                (6684, 8),
                (5191, 9),
                (3699, 10),
                (2953, 11),
                (2207, 12),
                (1000, 13),
            )
            threshhold["RED2"] = threshhold["RED1"]
            threshhold["RED3"] = threshhold["RED1"]
            threshhold["RED4"] = threshhold["RED1"]
            threshhold["RED5"] = (
                (6749, 15),
                (5266, 16),
                (3784, 17),
                (3043, 18),
                (2302, 19),
                (1100, 20),
            )
            threshhold["RED6"] = (
                (6619, 1),
                (5116, 2),
                (3614, 3),
                (2863, 4),
                (2112, 5),
                (900, 6),
            )
            threshhold["RED7"] = threshhold["RED1"]
            threshhold["RED8"] = threshhold["RED0"]
            threshhold["RED9"] = threshhold["RED1"]
            threshhold["IR10"] = threshhold["RED1"]
            threshhold["IR11"] = threshhold["RED1"]
            threshhold["BG12"] = threshhold["RED6"]
            threshhold["BG13"] = threshhold["RED6"]
        else:
            # Original LUTs prior to 2661
            threshhold["RED0"] = (
                (14057, 21),
                (11676, 22),
                (9295, 23),
                (7152, 24),
                (5248, 25),
                (3343, 26),
                (1200, 27),
            )
            threshhold["RED1"] = (
                (13857 + 1, 14),
                (11476 + 1, 15),
                (9095 + 1, 16),
                (6952 + 1, 17),
                (5048 + 1, 18),
                (3143 + 1, 19),
                (1000, 20),
            )
            threshhold["RED2"] = threshhold["RED1"]
            threshhold["RED3"] = threshhold["RED1"]
            threshhold["RED4"] = threshhold["RED1"]
            threshhold["RED5"] = threshhold["RED1"]
            threshhold["RED6"] = (
                (13657 + 1, 7),
                (11276 + 1, 8),
                (8895 + 1, 9),
                (6752 + 1, 10),
                (4848 + 1, 11),
                (2943 + 1, 12),
                (800, 13),
            )
            threshhold["RED7"] = threshhold["RED1"]
            threshhold["RED8"] = threshhold["RED0"]
            threshhold["RED9"] = threshhold["RED1"]
            threshhold["IR10"] = threshhold["RED1"]
            threshhold["IR11"] = threshhold["RED1"]
            threshhold["BG12"] = threshhold["RED6"]
            threshhold["BG13"] = threshhold["RED6"]

        ccd = hirise.ChannelID(
            isis.getkey_k(cube, "Archive", "ProductId")
        ).get_ccd()
        if cid is None:
            cid = ccd

        for (th, ex) in threshhold[ccd]:
            if float(histats["IMAGE_MEAN"]) >= th:
                lut_diff = lut - ex
                if lut_diff >= 1 or lut_diff <= -1:
                    if lut_diff > 0:
                        direction = "to the right"
                    else:
                        direction = "to the left"
                    logger.warning(
                        f"{cid}: LUT is {lut_diff} column(s) ({direction}) "
                        f"from ideal settings - image overcompressed."
                    )
                break
        else:
            logger.warning(
                f"{cid}: DN value, {histats['IMAGE_MEAN']}, lower than lowest "
                f"DN value with defined LUT for this channel."
            )
    return
예제 #17
0
 def test_calc_snr(self):
     histats = edr.parse_histat(isis.histat(self.hicube).stdout)
     histats["BINNING"] = isis.getkey_k(self.hicube, "Instrument",
                                        "Summing")
     s = edr.calc_snr(self.hicube, pvl.load(gains), histats)
     self.assertAlmostEqual(s, 291.80442197)
예제 #18
0
 def setUp(self):
     self.cube = imgs[0].with_suffix(".TestHiCal_TestNeedISISCube.cub")
     isis.hi2isis(imgs[0], to=self.cube)
     self.pid = hirise.get_ChannelID_fromfile(self.cube)
     self.binning = int(isis.getkey_k(self.cube, "Instrument", "Summing"))
예제 #19
0
def HiFurrow_Fix(in_cube: os.PathLike,
                 out_cube: os.PathLike,
                 max_mean: float,
                 keep=False):
    """Perform a normalization of the furrow region of bin 2 or 4
    HiRISE images. The input to this script is a HiRISE stitch
    product containing both channels of a CCD.
    """
    in_cub = Path(in_cube)

    binning = int(isis.getkey_k(in_cub, "Instrument", "Summing"))
    lines = int(isis.getkey_k(in_cub, "Dimensions", "Lines"))
    samps = int(isis.getkey_k(in_cub, "Dimensions", "Samples"))

    if binning != 2 and binning != 4:
        raise ValueError("HiFurrow_Fix only supports correction for "
                         "bin 2 or 4 data.")
    if binning == 2 and samps != 1024:
        raise ValueError(f"HiFurrowFix: improper number of samples: {samps}, "
                         "for a stitch product with bin 2 (should be 1024).")

    # This string will get placed in the filename for all of our
    # temporary files. It will (hopefully) prevent collisions with
    # existing files and also allow for easy clean-up if keep=True
    temp_token = datetime.now().strftime("HFF-%y%m%d%H%M%S")
    to_del = isis.PathSet()

    # For bin2 and bin4 imaging, specify width of furrow based on
    # image average DN range
    range_low = {2: (512, 513), 4: (256, 257)}  # 2 pixel furrow width
    range_mid = {2: (511, 514), 4: (255, 258)}  # 4 pixel furrow width
    range_hgh = {2: (511, 514), 4: (255, 258)}  # 4 pixel furrow width
    range_max = {2: (510, 515), 4: (254, 259)}  # 6 pixel furrow width

    # Original code had low/mid/hgh for bin2 and bin4, but they
    # were hard-coded to be identical.
    dn_range_low = 9000
    dn_range_mid = 10000
    dn_range_hgh = 12000

    if max_mean > dn_range_hgh:
        dn_range = range_max[binning]
    elif max_mean > dn_range_mid:
        dn_range = range_hgh[binning]
    elif max_mean > dn_range_low:
        dn_range = range_mid[binning]
    else:
        dn_range = range_low[binning]

    lpf_samp = int((dn_range[1] - dn_range[0] + 1) / 2) * 4 + 1
    lpf_line = int(lpf_samp / 2) * 20 + 1

    # Create a mask file
    # DN=1 for non-furrow area
    # DN=0 for furrow area
    eqn = rf"\(1*(sample<{dn_range[0]})+ 1*(sample>{dn_range[1]}) + 0)"
    fx_cub = to_del.add(in_cub.with_suffix(f".{temp_token}.fx.cub"))
    isis.fx(to=fx_cub,
            mode="OUTPUTONLY",
            lines=lines,
            samples=samps,
            equation=eqn)

    # Create a file where the furrow area is set to null
    mask1_cub = to_del.add(in_cub.with_suffix(f".{temp_token}.mask1.cub"))
    isis.mask(
        in_cub,
        mask=fx_cub,
        to=mask1_cub,
        min_=1,
        max_=1,
        preserve="INSIDE",
        spixels="NULL",
    )

    # Lowpass filter to fill in the null pixel area
    lpf_cub = to_del.add(in_cub.with_suffix(f".{temp_token}.lpf.cub"))
    isis.lowpass(
        mask1_cub,
        to=lpf_cub,
        sample=lpf_samp,
        line=lpf_line,
        null=True,
        hrs=False,
        his=False,
        lis=False,
    )

    # Create a file where non-furrow columns are set to null
    mask2_cub = to_del.add(in_cub.with_suffix(f".{temp_token}.mask2.cub"))
    isis.mask(
        in_cub,
        mask=fx_cub,
        to=mask2_cub,
        min_=0,
        max_=0,
        preserve="INSIDE",
        spixels="NULL",
    )

    # Highpass filter the furrow region
    hpf_cub = to_del.add(in_cub.with_suffix(f".{temp_token}.hpf.cub"))
    isis.highpass(mask2_cub, to=hpf_cub, sample=1, line=lpf_line)

    # Add lowpass and highpass together to achieve desired result
    alg_cub = to_del.add(in_cub.with_suffix(f".{temp_token}.alg.cub"))
    isis.algebra(from_=lpf_cub,
                 from2=hpf_cub,
                 to=alg_cub,
                 operator="ADD",
                 A=1.0,
                 B=1.0)

    # copy the input file to the output file then mosaic the
    # furrow area as needed.
    logger.info(f"Copy {in_cub} to {out_cube}.")
    shutil.copyfile(in_cub, out_cube)
    isis.handmos(
        alg_cub,
        mosaic=out_cube,
        outsample=1,
        outline=1,
        outband=1,
        insample=1,
        inline=1,
        inband=1,
        create="NO",
    )

    if not keep:
        to_del.unlink()

    return
예제 #20
0
def HiStitch(
    cube0: os.PathLike,
    cube1: os.PathLike,
    db0: dict,
    db1: dict,
    out_cube: os.PathLike,
    conf: dict,
    keep=False,
) -> tuple:
    logger.info(f"HiStitch start: {cube0} {cube1}")

    # GetConfigurationParameters()
    conf_check(conf)

    # GetProductFiles()
    if not cube1:
        cubes = (Path(cube0), )
    else:
        cubes = sort_input_cubes(Path(cube0), Path(cube1))

    chids = get_chids(cubes)

    out_path = set_outpath(out_cube, hirise.CCDID(chids[0]), cubes[0].parent)

    # PrepareDBStatements()
    #   select from HiCat.EDR_Products
    db_paths = list()
    db_paths.append(db0)
    if len(cubes) == 2:
        db_paths.append(db1)

    dbs = sort_databases([db0, db1], chids)
    ccd_number = int(chids[0].ccdnumber)

    # Allows for indexing in lists ordered by bin value.
    b = 1, 2, 4, 8, 16

    # This string will get placed in the filename for all of our
    # temporary files. It will (hopefully) prevent collisions with
    # existing files and also allow for easy clean-up if keep=True
    temp_token = datetime.now().strftime("HiStitch-%y%m%d%H%M%S")

    # ProcessingStep() - mostly sets up stuff
    max_mean = max(map(lambda x: float(x["IMAGE_MEAN"]), dbs))
    flags = set_flags(conf["HiStitch"], dbs, ccd_number,
                      b.index(int(dbs[0]["BINNING"])), max_mean)

    # HiStitchStep() - runs HiStitch, and originally inserted to db, now we
    # just return at the bottom of this function.
    logger.info(
        HiStitchStep(
            cubes,
            out_path,
            dbs[0]["BINNING"],
            ccd_number,
            conf["HiStitch"],
            flags.balance,
            flags.equalize,
        ).args)

    if len(cubes) == 2 and flags.balance:
        # run getkey from ?HiStitch_output?
        truthchannel = isis.getkey_k(out_path, "HiStitch", "TruthChannel")
        balanceratio = isis.getkey_k(out_path, "HiStitch", "BalanceRatio")
    else:
        truthchannel = None
        balanceratio = None

    if flags.furrow:
        furrow_file = out_path.with_suffix(f".{temp_token}.temp.cub")
        HiFurrow_Fix(out_path, furrow_file, max_mean, keep=keep)
        furrow_file.rename(out_path)

    # insert ObsID, pid0.ccdname+pid0.ccdnumber, truthchannel, balanceratio
    # into HiCat.CCD_Processing_Statistics
    db = dict()
    db["OBSERVATION_ID"] = str(chids[0].get_obsid())
    db["CCD"] = chids[0].get_ccd()
    db["CONTROL_CHANNEL"] = truthchannel
    db["CHANNEL_MATCHING_CORRECTION"] = balanceratio

    logger.info(f"HiStitch done: {out_path}")
    return db, out_path
예제 #21
0
def make_flats(cubes, common_cube, conf, temp_token, keep=False):
    # If the flat files already exist, don't remake them.
    # "$OBS_ID"."_".$ccd."-".$common.".flat.tab"
    jitter_cubes = list()
    n_row = int(
        common_cube.lines
        / conf["AutoRegistration"]["ControlNet"]["Control_Lines"]
    )

    for c in cubes:
        if c == common_cube:
            continue
        jitter_cubes.append(
            hjr.JitterCube(
                c.next_path, matchccd=common_cube.get_ccd(), config=conf
            )
        )

    successful_flats = list()

    if not all(x.flattab_path.exists() for x in jitter_cubes):
        confauto = conf["AutoRegistration"]
        for c in jitter_cubes:
            params = {
                "ROWS": n_row,
                "TOLERANCE": confauto["Algorithm"]["Tolerance"],
            }
            min_fraction_good = confauto["AnaylyzeFlat"]["Minimum_Good"]
            if c.ccdname == "RED":
                redcolor = "Red"
            else:
                redcolor = "Color"

            params["GROUP"] = "ResolveJitter"
            params["COLS"] = confauto["ControlNet"]["Control_Cols_" + redcolor]
            params["PATTERN_SAMPLES"] = confauto["PatternChip" + redcolor][
                "Samples"
            ]
            params["PATTERN_LINES"] = confauto["PatternChip" + redcolor][
                "Lines"
            ]
            params["SEARCH_SAMPLES"] = confauto["SearchChip" + redcolor][
                "Samples"
            ]
            params["SEARCH_LINES"] = confauto["SearchChip" + redcolor]["Lines"]

            if c.ccdname != "RED":
                channels = isis.getkey_k(
                    c.path, "Instrument", "StitchedProductIds"
                )
                if len(channels) < 2:
                    logger.info(
                        f"Increasing columns because {c.path} is "
                        "missing a channel."
                    )
                    params["COLS"] += 1
                    min_fraction_good *= 0.5
            logger.info(
                "The minimum allowable Fraction Good "
                f"Matches = {min_fraction_good}"
            )
            step = 0
            while step <= confauto["Algorithm"]["Steps"]:
                logger.info(f"Step {step} begin")

                hjr.run_HiJitReg(
                    common_cube.next_path, c, params, temp_token, keep=keep
                )

                ret = hjr.Analyze_Flat(
                    c, 0, (min_fraction_good * 2), hijitreg=False
                )

                if ret == 1:
                    successful_flats.append(c.flattab_path)
                    break
                else:
                    step += 1
                    c.regdef_path.unlink()
                    c.flattab_path.unlink()
                    c.cnet_path.unlink()
                    params["TOLERANCE"] -= (
                        confauto["Algorithm"]["Increment"] * step
                    )
            else:
                raise RuntimeError(
                    f"Flat file for {c} is not within tolerances."
                )

    else:
        successful_flats = list(x.flattab_path for x in jitter_cubes)

    return successful_flats
예제 #22
0
def jitter_iter(
    red: hicolor.HiColorCube,
    color: hicolor.HiColorCube,
    conf: dict,
    keep=False,
) -> bool:
    """Iterates through hijitreg for the color cube."""

    temp_token = datetime.now().strftime("HiJitReg-%y%m%d%H%M%S")

    bin_ratio = color.bin / red.bin

    jit_param = dict()
    conf_ar = conf["AutoRegistration"]
    jit_param["GROUP"] = "HiJitReg"
    jit_param["COLS"] = conf_ar["ControlNet"]["Control_Cols"]
    jit_param["ROWS"] = conf_ar["ControlNet"]["Control_Lines"]
    jit_param["TOLERANCE"] = conf_ar["Algorithm"]["Tolerance"]
    jit_param["PATTERN_SAMPLES"] = conf_ar["PatternChip"]["Samples"]
    jit_param["PATTERN_LINES"] = conf_ar["PatternChip"]["Lines"]
    jit_param["SEARCH_SAMPLES"] = conf_ar["SearchChip"]["Samples"]
    jit_param["SEARCH_LINES"] = conf_ar["SearchChip"]["Lines"]
    jit_param["SEARCHLONGER_SAMPLES"] = conf_ar["SearchLongerChip"]["Samples"]
    jit_param["SEARCHLONGER_LINES"] = conf_ar["SearchLongerChip"]["Lines"]

    if bin_ratio > 3:
        jit_param["TOLERANCE"] -= conf_ar["Algorithm"]["INCREMENT"]

    channels = isis.getkey_k(color.path, "Instrument", "StitchedProductIds")

    coverage = 1.0

    if len(channels) < 2:
        coverage /= 2
        jit_param["COLS"] += jit_param["COLS"] / 2

    # A two-step process with completely different outcomes at each step,
    # so we can't really make a loop.
    step = 1

    logger.info(f"Attempting hijitreg iteration #{step} for {color}")

    color_jitter = JitterCube(color, conf)

    run_HiJitReg(red.path, color_jitter, jit_param, temp_token, keep=keep)

    ret = Analyze_Flat(color_jitter, step, coverage)

    if ret == -1:
        # edgy or suspect points only
        if jit_param["SEARCH_LINES"] == jit_param["SEARCHLONGER_LINES"]:
            return True
        else:
            # use larger search box for all subsequent iterations
            # (other CCDs too)
            jit_param["SEARCH_SAMPLES"] = jit_param["SEARCHLONGER_SAMPLES"]
            jit_param["SEARCH_LINES"] = jit_param["SEARCHLONGER_LINES"]
    elif ret == 0:
        # not enough points found
        # increase grid density
        jit_param["ROWS"] = jit_param["ROWS"] * 2
        if len(channels) >= 2:
            jit_param["COLS"] += 2
            coverage /= 2
    else:
        return True

    step += 1
    logger.info(f"Attempting hijitreg iteration #{step} for {color}")

    # second pass
    run_HiJitReg(red.path, color_jitter, jit_param, temp_token, keep=keep)

    # analyze output again
    ret = Analyze_Flat(color_jitter, step, coverage)

    if ret == 0:
        logger.info(f"Jitter registration failed for {color}")
        return False
    elif ret < 0:
        logger.info("!!! Validation Required !!!")
        return True
    else:
        return True
예제 #23
0
 def test_getkey_k(self):
     truth = 'HIRISE'
     gk = Mock(stdout=f'{truth}\n')
     with patch('kalasiris.k_funcs.isis.getkey', return_value=gk):
         key = isis.getkey_k('dummy.cub', 'Instrument', 'InstrumentId')
         self.assertEqual(truth, key)