Exemplo n.º 1
0
def per_color(cube, temp_token, color_code, keep=False):

    tt = f"{temp_token}_{color_code}"

    numerator = "{}+{}".format(cube.path, cube.band[color_code])
    denominator = "{}+{}".format(cube.path, cube.band["RED"])

    # Generate the IR/RED and BG/RED ratios for each of the COLOR products
    rat_p = cube.path.with_suffix(f".{tt}.ratio.cub")
    isis.ratio(num=numerator, den=denominator, to=rat_p)

    # mask out invalid pixels
    # Possible future update: Make mosaic of ratio files then run cubnorm
    # correction on these for the unfiltered products, to avoid any null
    # pixels created during the ratio process.
    mask_p = cube.path.with_suffix(f".{tt}.mask.cub")
    isis.mask(
        rat_p,
        mask=rat_p,
        to=mask_p,
        minimum=0.0,
        maximum=4.0,
        preserve="INSIDE",
    )

    # Generate the crop files
    crop_p = cube.path.with_suffix(f".{tt}.ratcrop.cub")
    isis.crop(mask_p, to=crop_p, line=cube.crop_sline, nlines=cube.crop_lines)

    if not keep:
        rat_p.unlink()

    return mask_p, crop_p
Exemplo n.º 2
0
def mask_gap(in_path: Path, out_path: Path):
    """Attempt to mask out pixels beyond the central DNs of the median
    based on gaps in the histogram.

    This approach worked well based on 'ideal' reverse-clocked data
    or 'dark' images, but in 'real' HiRISE images of Mars, the reality
    is that there are 'gaps' everywhere along the DN range, and this
    approach ends up being too 'dumb'.
    """

    hist = isis.Histogram(in_path)

    median = math.trunc(float(hist["Median"]))
    # std = math.trunc(float(hist['Std Deviation']))

    high = find_gap(
        hist, median, math.trunc(float(hist["Maximum"])), findfirst=True
    )

    low = find_gap(
        hist, median, math.trunc(float(hist["Minimum"])), findfirst=True
    )

    highdist = high - median
    lowdist = median - low

    maskmax = find_gap(hist, median, (median + (2 * highdist)))
    maskmin = find_gap(hist, median, (median - (2 * lowdist)))

    isis.mask(in_path, to=out_path, minimum=maskmin, maximum=maskmax)

    return
Exemplo n.º 3
0
def make_dummy_IR(red: hicolor.HiColorCube, bg: hicolor.HiColorCube):
    bg_slither_path = get_slither_path(bg)
    ir_name = bg_slither_path.name.replace(bg.get_ccd(),
                                           "IR" + str(int(bg.ccdnumber) - 2))
    ir_path = bg_slither_path.parent / ir_name

    if ir_path.exists():
        raise FileExistsError("{} exists ".format(str(ir_path)) +
                              "and we don't want to overwrite it.")

    ir_ccd = "IR" + str(int(red.ccdnumber) + 6)

    isis.mask(red.path, mask=red.path, to=ir_path, preserve="outside")
    isis.editlab(
        ir_path,
        options="modkey",
        grpname="Instrument",
        keyword="CpmmNumber",
        value=int(red.ccdnumber) + 2,
    )
    isis.editlab(
        ir_path,
        options="modkey",
        grpname="Instrument",
        keyword="CcdId",
        value=ir_ccd,
    )
    isis.editlab(
        ir_path,
        options="modkey",
        grpname="Archive",
        keyword="ProductID",
        value="{}_{}".format(red.get_obsid(), ir_ccd),
    )
    isis.editlab(
        ir_path,
        options="modkey",
        grpname="BandBin",
        keyword="Name",
        value="NearInfrared",
    )
    isis.editlab(
        ir_path,
        options="modkey",
        grpname="BandBin",
        keyword="Center",
        value=900,
        units="NANOMETERS",
    )
    isis.editlab(
        ir_path,
        options="modkey",
        grpname="BandBin",
        keyword="Width",
        value=200,
        units="NANOMETERS",
    )
    return ir_path
Exemplo n.º 4
0
def subtract_over_thresh(
    in_path: Path, out_path: Path, thresh: int, delta: int, keep=False
):
    """This is a convenience function that runs ISIS programs to add or
    subtract a value to DN values for pixels that are above or below
    a threshold.

    For all pixels in the *in_path* ISIS cube, if *delta* is positive,
    then pixels with a value greater than *thresh* will have *delta*
    subtracted from them.  If *delta* is negative, then all pixels
    less than *thresh* will have *delta* added to them.
    """

    # Originally, I wanted to just do this simply with fx:
    # eqn = "\(f1 + ((f1{glt}={thresh}) * {(-1 * delta)}))"
    # However, fx writes out floating point pixel values, and we really
    # need to keep DNs as ints as long as possible.  Sigh.

    shutil.copyfile(in_path, out_path)

    mask_p = in_path.with_suffix(".threshmask.cub")
    mask_args = {"from": in_path, "to": mask_p}
    if delta > 0:
        mask_args["min"] = thresh
    else:
        mask_args["max"] = thresh
    isis.mask(**mask_args)

    delta_p = in_path.with_suffix(".delta.cub")
    isis.algebra(
        mask_p, from2=in_path, to=delta_p, op="add", a=0, c=(-1 * delta)
    )

    isis.handmos(delta_p, mosaic=out_path)

    if not keep:
        mask_p.unlink()
        delta_p.unlink()

    return
Exemplo n.º 5
0
def HiFurrow_Fix(in_cube: os.PathLike,
                 out_cube: os.PathLike,
                 max_mean: float,
                 keep=False):
    """Perform a normalization of the furrow region of bin 2 or 4
    HiRISE images. The input to this script is a HiRISE stitch
    product containing both channels of a CCD.
    """
    in_cub = Path(in_cube)

    binning = int(isis.getkey_k(in_cub, "Instrument", "Summing"))
    lines = int(isis.getkey_k(in_cub, "Dimensions", "Lines"))
    samps = int(isis.getkey_k(in_cub, "Dimensions", "Samples"))

    if binning != 2 and binning != 4:
        raise ValueError("HiFurrow_Fix only supports correction for "
                         "bin 2 or 4 data.")
    if binning == 2 and samps != 1024:
        raise ValueError(f"HiFurrowFix: improper number of samples: {samps}, "
                         "for a stitch product with bin 2 (should be 1024).")

    # This string will get placed in the filename for all of our
    # temporary files. It will (hopefully) prevent collisions with
    # existing files and also allow for easy clean-up if keep=True
    temp_token = datetime.now().strftime("HFF-%y%m%d%H%M%S")
    to_del = isis.PathSet()

    # For bin2 and bin4 imaging, specify width of furrow based on
    # image average DN range
    range_low = {2: (512, 513), 4: (256, 257)}  # 2 pixel furrow width
    range_mid = {2: (511, 514), 4: (255, 258)}  # 4 pixel furrow width
    range_hgh = {2: (511, 514), 4: (255, 258)}  # 4 pixel furrow width
    range_max = {2: (510, 515), 4: (254, 259)}  # 6 pixel furrow width

    # Original code had low/mid/hgh for bin2 and bin4, but they
    # were hard-coded to be identical.
    dn_range_low = 9000
    dn_range_mid = 10000
    dn_range_hgh = 12000

    if max_mean > dn_range_hgh:
        dn_range = range_max[binning]
    elif max_mean > dn_range_mid:
        dn_range = range_hgh[binning]
    elif max_mean > dn_range_low:
        dn_range = range_mid[binning]
    else:
        dn_range = range_low[binning]

    lpf_samp = int((dn_range[1] - dn_range[0] + 1) / 2) * 4 + 1
    lpf_line = int(lpf_samp / 2) * 20 + 1

    # Create a mask file
    # DN=1 for non-furrow area
    # DN=0 for furrow area
    eqn = rf"\(1*(sample<{dn_range[0]})+ 1*(sample>{dn_range[1]}) + 0)"
    fx_cub = to_del.add(in_cub.with_suffix(f".{temp_token}.fx.cub"))
    isis.fx(to=fx_cub,
            mode="OUTPUTONLY",
            lines=lines,
            samples=samps,
            equation=eqn)

    # Create a file where the furrow area is set to null
    mask1_cub = to_del.add(in_cub.with_suffix(f".{temp_token}.mask1.cub"))
    isis.mask(
        in_cub,
        mask=fx_cub,
        to=mask1_cub,
        min_=1,
        max_=1,
        preserve="INSIDE",
        spixels="NULL",
    )

    # Lowpass filter to fill in the null pixel area
    lpf_cub = to_del.add(in_cub.with_suffix(f".{temp_token}.lpf.cub"))
    isis.lowpass(
        mask1_cub,
        to=lpf_cub,
        sample=lpf_samp,
        line=lpf_line,
        null=True,
        hrs=False,
        his=False,
        lis=False,
    )

    # Create a file where non-furrow columns are set to null
    mask2_cub = to_del.add(in_cub.with_suffix(f".{temp_token}.mask2.cub"))
    isis.mask(
        in_cub,
        mask=fx_cub,
        to=mask2_cub,
        min_=0,
        max_=0,
        preserve="INSIDE",
        spixels="NULL",
    )

    # Highpass filter the furrow region
    hpf_cub = to_del.add(in_cub.with_suffix(f".{temp_token}.hpf.cub"))
    isis.highpass(mask2_cub, to=hpf_cub, sample=1, line=lpf_line)

    # Add lowpass and highpass together to achieve desired result
    alg_cub = to_del.add(in_cub.with_suffix(f".{temp_token}.alg.cub"))
    isis.algebra(from_=lpf_cub,
                 from2=hpf_cub,
                 to=alg_cub,
                 operator="ADD",
                 A=1.0,
                 B=1.0)

    # copy the input file to the output file then mosaic the
    # furrow area as needed.
    logger.info(f"Copy {in_cub} to {out_cube}.")
    shutil.copyfile(in_cub, out_cube)
    isis.handmos(
        alg_cub,
        mosaic=out_cube,
        outsample=1,
        outline=1,
        outband=1,
        insample=1,
        inline=1,
        inband=1,
        create="NO",
    )

    if not keep:
        to_del.unlink()

    return
Exemplo n.º 6
0
def HiColorNorm(cubes: list,
                output,
                conf: dict,
                make_unfiltered=True,
                db_list=None,
                keep=False):
    logger.info(f"HiColorNorm start: {cubes}")

    # GetConfigurationParameters()
    conf_check(conf)

    cubes = list(map(ColorCube, cubes, repeat(db_list)))
    cubes.sort()

    outcub_path = set_outpath(output, cubes)

    temp_token = datetime.now().strftime("HiColorNorm-%y%m%d%H%M%S")
    out_p = Path(outcub_path)

    furrow_flag = FurrowCheck(cubes)

    to_del = isis.PathSet()

    for i, _ in enumerate(cubes):
        cubes[i].set_crop_lines(conf)

    for i, c in enumerate(cubes):
        # Protect the processing from erroneous/spurious pixels
        mask_list = list()
        for b in (1, 2, 3):
            tmp_p = to_del.add(
                c.path.with_suffix(f".{temp_token}.temp{b}.cub"))
            isis.mask(
                f"{c.path}+{b}",
                mask=f"{c.path}+{b}",
                to=tmp_p,
                minimum=0.0,
                maximum=2.0,
                preserve="INSIDE",
            )
            mask_list.append(tmp_p)

        c.final_path = c.path.with_suffix(f".HiColorNorm.cub")
        isis.cubeit_k(mask_list, to=c.final_path)

        (cubes[i].mask_path["IR"],
         cubes[i].crop_path["IR"]) = per_color(c, temp_token, "IR", keep=keep)
        (cubes[i].mask_path["BG"],
         cubes[i].crop_path["BG"]) = per_color(c, temp_token, "BG", keep=keep)

    ir_ratio_stddev = per_band(cubes,
                               out_p,
                               temp_token,
                               "IR",
                               furrow_flag,
                               make_unfiltered,
                               keep=keep)
    bg_ratio_stddev = per_band(cubes,
                               out_p,
                               temp_token,
                               "BG",
                               furrow_flag,
                               make_unfiltered,
                               keep=keep)

    if conf["HiColorNorm"]["HiColorNorm_Make_Stitch"]:
        # listpath = to_del.add(c.path.with_suffix(f'.{temp_token}.list.txt'))
        # listpath.write_text(
        #   '\n'.join(str(c.final_path) for c in cubes) + '\n')

        with isis.fromlist.temp([str(c.final_path) for c in cubes]) as f:
            isis.hiccdstitch(fromlist=f, to=out_p)

        for c in cubes:
            to_del.add(c.final_path)

    if not keep:
        to_del.unlink()
        for c in cubes:
            for cc in ("IR", "BG"):
                c.mask_path[cc].unlink()
                c.crop_path[cc].unlink()
                c.nrm_path[cc].unlink()

    logger.info(f"HiColorNorm done.")
    return ir_ratio_stddev, bg_ratio_stddev
Exemplo n.º 7
0
def BalanceStep(cubes, conf, keep=False) -> list:
    to_del = isis.PathSet()

    # Sort the cubes so that they are in CCD order
    cubes.sort()

    cubes, to_delete = crop_and_scale(cubes)
    to_del.update(to_delete)

    # Original Perl: Generate CCD number array for each CCD file, not needed
    # now, since we can just query the object.

    # Original Perl: Used $0 (the program name) instead of the index 0
    # here, but I've arranged things with the set_ls_path() and set_rs_path()
    # to just make these a full copy so there's no
    # need to mess with a conditional assignment and also streamlines
    # the following logic, since they're identical except when you
    # explicitly change them.

    # The third step is to mask the left and right overlap areas. We
    # want to zap pixels where there is not common coverage.
    for i, c in enumerate(cubes):
        if i + 1 < len(cubes) and int(cubes[i].ccdnumber) + 1 == int(
                cubes[i + 1].ccdnumber):
            cubes[i].rm_path = to_del.add(
                c.nextpath.with_suffix(".right.mask.cub"))
            cubes[i + 1].lm_path = to_del.add(
                cubes[i + 1].nextpath.with_suffix(".left.mask.cub"))

            for f, m, t in zip(
                [cubes[i].rs_path, cubes[i + 1].ls_path],
                [cubes[i + 1].ls_path, cubes[i].rs_path],
                [cubes[i].rm_path, cubes[i + 1].lm_path],
            ):
                isis.mask(
                    f,
                    mask=m,
                    to=t,
                    preserve="INSIDE",
                    min_=conf["HiccdStitch_Normalization_Minimum"],
                    max_=conf["HiccdStitch_Normalization_Maximum"],
                )

    # The fourth step is to get image statistics for left and right
    # overlap areas of each CCD image.
    cubes = get_stats(cubes)

    # Look for a break in joining CCDs, defined by a break in the CCD number,
    # or the right or left statistics are undefined, due to an all null channel
    #
    # In the original Perl there was a loop to determine if there was a break,
    # but then nothing was done with that information?  Oh, it was used
    # differently: the code past that point develops a series of sequences
    # from $first to $last.  If there are no breaks, then it only runs a
    # single sequence.  If there are breaks, it runs the sequences it finds.
    #
    # Here's the pythonic version:
    cubes.sort()
    for (offset, group) in get_group_i(cubes):
        logger.info("Correction before redistribution.")
        for ccd in group:
            i = ccd + offset
            cubes[i].correction = get_correction(
                cubes[i],
                cubes[i - 1],
                conf["HiccdStitch_Balance_Correction"],
                i,
            )
            logger.info(
                f"CCDID: {cubes[i]}, correction: {cubes[i].correction}")

        normalization = get_normalization(cubes, group, offset,
                                          conf["HiccdStitch_Control_CCD"])

        logger.info("Correction after redistribution.")
        for ccd in group:
            i = ccd + offset
            cubes[i].correction /= normalization
            logger.info(
                f"CCDID: {cubes[i]}, correction: {cubes[i].correction}, "
                f"left: {cubes[i].lstats}, right: {cubes[i].rstats}")

            # In the original Perl, they wrote out to the DB here, but we'll
            # do it later.  There was also a distinction that if it was
            # HiccdStitchC that the data was written out to a PVL file.  Not
            # sure why.

        # Create the balance cubes
        for ccd in group:
            i = ccd + offset
            balance_path = cubes[i].nextpath.with_suffix(".balance.cub")
            make_balance(cubes[i], conf, balance_path)
            cubes[i].nextpath = balance_path

    if not keep:
        to_del.unlink()

    return cubes