Example #1
0
def test_import_into_sqla_object():
    product = Product()
    data = {"name": "Acme anvils", "color": "black", "price": 99}

    import_into_sqla_object(product, data)

    assert product.id is None
    assert product.name == "Acme anvils"
    assert product.color == "black"
    assert product.price == 99
Example #2
0
    def test_model_class_without_export_data_method(self, query, request_ctx):
        item = Product(id=4, name="Acme explosive tennis balls")

        query.paginate().items = [item]
        query.paginate().total = 1

        with request_ctx('/?select={"id": 1}&page=3'):
            rv = collection()(lambda: Product)()

        assert rv[0] == [{"id": 4}]
        assert rv[1] == {"X-Total": 1, "X-Page": 3}
Example #3
0
    def test_non_persisted_untouched_object(self):
        export_data = ExportData()

        data = export_data(Product())
        assert len(data) == 7

        for prop in ["id", "name", "created_at", "updated_at"]:
            assert data[prop] is None

        assert data["color"] == "white"
        assert data["price"] == 0
        assert data["enabled"] is False
Example #4
0
    def test_non_persisted_touched_object(self):
        export_data = ExportData()

        data = export_data(Product(id=9, name="TNT", price=799, color="red"))

        assert len(data) == 7
        assert data["id"] == 9
        assert data["name"] == "TNT"
        assert data["color"] == "red"
        assert data["price"] == 799
        assert data["enabled"] is False
        assert data["created_at"] is None
        assert data["updated_at"] is None
Example #5
0
def insert_products():
    lst = [
        {
            "name": "Explosive Tennis Balls",
            "color": "white",
            "price": 9
        },
        {
            "name": "Binocular",
            "color": "black",
            "price": 99
        },
        {
            "name": "Acme anvils",
            "color": "black",
            "price": 999
        },
    ]

    database.session.add_all([Product(**item) for item in lst])
Example #6
0
def main():
    # Argument parser
    parser = argparse.ArgumentParser()
    parser.add_argument("list", help="List of paths of collection")
    parser.add_argument("--band",
                        help="Specific band in acix band definition",
                        type=int,
                        required=True)
    parser.add_argument(
        "--samples",
        help="Reflectance sampling, defaults to 100 (ie. 0.01)",
        type=int,
        default=100)
    parser.add_argument(
        "-s",
        "--save",
        help="Write location results as npy instead of stacking in memory",
        action="store_true",
        default=False)
    parser.add_argument("-v",
                        "--verbose",
                        help="Set verbosity to DEBUG level",
                        action="store_true",
                        default=False)
    parser.add_argument("--negative",
                        help="Save sr lt 0",
                        action="store_true",
                        default=False)
    parser.add_argument("--keepall",
                        help="Keep sr <= 0 but cloudfree",
                        action="store_true",
                        default=False)
    parser.add_argument("--stack",
                        help="Stack all sites in one file",
                        action="store_true",
                        default=False)

    args = parser.parse_args()

    # <maja_band>, <hdf_band>, <resolution>, <ref_samples>, <maja_samples>
    bdef_acix = (["band02", "SRE_B2.", "R1",
                  []], ["band03", "SRE_B3.", "R1",
                        []], ["band04", "SRE_B4.", "R1",
                              []], ["band05", "SRE_B5.", "R2",
                                    []], ["band06", "SRE_B6.", "R2",
                                          []], ["band07", "SRE_B7.", "R2", []],
                 ["band08", "SRE_B8.", "R1",
                  []], ["band8a", "SRE_B8A.", "R2",
                        []], ["band11", "SRE_B11.", "R2",
                              []], ["band12", "SRE_B12.", "R2", []])

    band_id = args.band

    # Create the logger
    logger = utl.get_logger('acix_validate_' + bdef_acix[band_id][0],
                            args.verbose)

    if (band_id < 0 or band_id > len(bdef_acix)):
        logger.error("Band ID out of range with value %i" % band_id)
        sys.exit(3)

    # vector containers for stacked data
    v_stacked_valid_ref = np.zeros((0))
    v_stacked_valid_maja = np.zeros((0))
    v_stacked_lt0_ref = np.zeros((0))
    v_stacked_lt0_maja = np.zeros((0))
    v_stacked_keep_all_ref = np.zeros((0))
    v_stacked_keep_all_maja = np.zeros((0))

    match_count = 0
    len_check = 0

    f = open(args.list, 'r')
    paths_list = f.read().splitlines()

    for p in paths_list:
        paths = p.split(',')
        location_name = paths[0].split('/')[-1]

        # vector containers for location specific data
        v_local_valid_ref = np.zeros((0))
        v_local_valid_maja = np.zeros((0))
        v_local_lt0_ref = np.zeros((0))
        v_local_lt0_maja = np.zeros((0))

        acix_vermote_collection = clc.Collection(paths[0], logger)
        acix_maja_collection = clc.Collection(paths[1], logger)
        compare = cmp.Comparison(acix_vermote_collection, acix_maja_collection,
                                 logger)

        for match in compare.matching_products:
            logger.info("One-by-one for %s between %s and %s" %
                        (match[0], match[1], match[2]))
            p_ref = prd.Product_hdf_acix(match[1], logger)
            p_maja = prd.Product_dir_maja(match[2], logger)

            try:
                b_ref = p_ref.get_band(p_ref.find_band(bdef_acix[band_id][0]),
                                       scalef=p_ref.sre_scalef)
                m_ref_qa = p_ref.get_band(p_ref.find_band("refqa"))
                b_maja = p_maja.get_band(p_maja.find_band(
                    bdef_acix[band_id][1]),
                                         scalef=p_maja.sre_scalef)
                clm = p_maja.get_band(
                    p_maja.find_band("CLM_" + bdef_acix[band_id][2]))
                edg = p_maja.get_band(
                    p_maja.find_band("EDG_" + bdef_acix[band_id][2]))
                m_maja_qa, ratio = p_maja.get_mask(clm, edg, stats=True)
                del clm
                del edg

                # default filter : any cloudfree flaged both by ref and maja and sr >= 0
                is_valid = np.where((b_ref > 0)
                                    & (b_maja > 0)
                                    & (m_ref_qa == 1)
                                    & (m_maja_qa == 1))

                if args.negative:
                    # get sr < 0 though flaged cloudfree
                    is_cloudfree_but_negative = np.where((m_maja_qa == 1)
                                                         & (m_ref_qa == 1)
                                                         & ((b_maja < 0)
                                                            | (b_ref < 0)))

                if args.keepall:
                    # get sr < 0 though flaged cloudfree
                    is_cloudfree_keep_all = np.where((m_maja_qa == 1)
                                                     & (m_ref_qa == 1))

                # stack local valid values for all timestamp matches
                v_local_valid_ref = np.append(v_local_valid_ref,
                                              (b_ref[is_valid]))
                v_local_valid_maja = np.append(v_local_valid_maja,
                                               (b_maja[is_valid]))

                if args.negative:
                    # stack local cloudfree negative sr for all timestamp matches
                    v_local_lt0_ref = np.append(
                        v_local_lt0_ref, (b_ref[is_cloudfree_but_negative]))
                    v_local_lt0_maja = np.append(
                        v_local_lt0_maja, (b_maja[is_cloudfree_but_negative]))

                if args.keepall:
                    v_local_keep_all_ref = np.append(
                        v_local_lt0_ref, (b_ref[is_cloudfree_keep_all]))
                    v_local_keep_all_maja = np.append(
                        v_local_lt0_maja, (b_maja[is_cloudfree_keep_all]))

                match_count += 1
                len_check += len(b_ref[is_valid])

                if args.stack:
                    # if all locations have to be stacked in one single vector
                    v_stacked_valid_ref = np.append(v_stacked_valid_ref,
                                                    v_local_valid_ref)
                    v_stacked_valid_maja = np.append(v_stacked_valid_maja,
                                                     v_local_valid_maja)

                    if args.negative:
                        v_stacked_lt0_ref = np.append(v_stacked_lt0_ref,
                                                      v_local_lt0_ref)
                        v_stacked_lt0_maja = np.append(v_stacked_lt0_maja,
                                                       v_local_lt0_maja)

                    if args.keepall:
                        v_stacked_keep_all_ref = np.append(
                            v_stacked_keep_all_ref, v_local_keep_all_ref)
                        v_stacked_keep_all_maja = np.append(
                            v_stacked_keep_all_maja, v_local_keep_all_maja)

                else:
                    # save local vectors in one compressed file
                    if not args.keepall:
                        np.savez_compressed(
                            location_name + "_valid_" + bdef_acix[band_id][0],
                            [
                                v_local_valid_ref.astype('float32'),
                                v_local_valid_maja.astype('float32')
                            ])
                    if args.negative:
                        np.savez_compressed(
                            location_name + "_sr_lt_0_" +
                            bdef_acix[band_id][0], [
                                v_local_lt0_ref.astype('float32'),
                                v_local_lt0_maja.astype('float32')
                            ])

                    if args.keepall:
                        np.savez_compressed(
                            location_name + "_keep_all_" +
                            bdef_acix[band_id][0], [
                                v_local_keep_all_ref.astype('float32'),
                                v_local_keep_all_maja.astype('float32')
                            ])

            except TypeError as err:
                logger.warning(
                    "Had to skip comparison for %s because of unexpected product dimension (see previous error)"
                    % (match[0]))

    if args.stack:
        # if --stack, save stacked vector in one single compressed file
        np.savez_compressed("Stacked_valid_" + bdef_acix[band_id][0], [
            v_stacked_valid_ref.astype('float32'),
            v_stacked_valid_maja.astype('float32')
        ])
        if args.negative:
            np.savez_compressed("Stacked_sr_lt_0_" + bdef_acix[band_id][0], [
                v_stacked_lt0_ref.astype('float32'),
                v_stacked_lt0_maja.astype('float32')
            ])

        if args.keepall:
            np.savez_compressed("Stacked_sr_keep_all_" + bdef_acix[band_id][0],
                                [
                                    v_stacked_keep_all_ref.astype('float32'),
                                    v_stacked_keep_all_maja.astype('float32')
                                ])

        if len_check == len(v_stacked_valid_ref) and len_check == len(
                v_stacked_valid_maja):
            logger.info("Saved %i samples to %s.npy" %
                        (len_check, location_name))
        else:
            logger.error(
                "Inconsistent sample len between len_check=%i and len(v_stacked_valid_ref)=%i"
                % (len_check, len(v_stacked_valid_ref)))

    sys.exit(0)
Example #7
0
def main():
    # Argument parser
    parser = argparse.ArgumentParser()
    parser.add_argument("list", help="List of paths of collection")
    parser.add_argument("--saveto",
                        help="subdirectory to save figs to",
                        type=str)
    parser.add_argument("--hist",
                        help="Display quicklooks with histograms",
                        action="store_true",
                        default=False)
    parser.add_argument(
        "--keepall",
        help="Display quicklooks with histograms with keep_all",
        action="store_true",
        default=False)
    parser.add_argument("-v",
                        "--verbose",
                        help="Set verbosity to DEBUG level",
                        action="store_true",
                        default=False)

    args = parser.parse_args()
    gain_true = 3.
    gain_false = 2.5

    if args.keepall:
        l_stats = []

    # Create the logger
    logger = utl.get_logger('Diag', args.verbose)

    bdef_acix = (["band02", "SRE_B2.", "R1",
                  []], ["band03", "SRE_B3.", "R1",
                        []], ["band04", "SRE_B4.", "R1",
                              []], ["band05", "SRE_B5.", "R2",
                                    []], ["band06", "SRE_B6.", "R2",
                                          []], ["band07", "SRE_B7.", "R2", []],
                 ["band08", "SRE_B8.", "R1",
                  []], ["band8a", "SRE_B8A.", "R2",
                        []], ["band11", "SRE_B11.", "R2",
                              []], ["band12", "SRE_B12.", "R2", []])

    f = open(args.list, 'r')
    paths_list = f.read().splitlines()

    for p in paths_list:
        paths = p.split(',')
        location_name = paths[0].split('/')[-1]

        acix_vermote_collection = clc.Collection(paths[0], logger)
        acix_maja_collection = clc.Collection(paths[1], logger)
        compare = cmp.Comparison(acix_vermote_collection, acix_maja_collection,
                                 logger)

        for match in compare.matching_products:
            logger.info("One-by-one for %s between %s and %s" %
                        (match[0], match[1], match[2]))
            p_ref = prd.Product_hdf_acix(match[1], logger)
            p_maja = prd.Product_dir_maja(match[2], logger)
            timestamp = match[0]

            try:
                b_ref_b2 = p_ref.get_band(p_ref.find_band(bdef_acix[0][0]),
                                          scalef=p_ref.sre_scalef)
                b_ref_b3 = p_ref.get_band(p_ref.find_band(bdef_acix[1][0]),
                                          scalef=p_ref.sre_scalef)
                b_ref_b4 = p_ref.get_band(p_ref.find_band(bdef_acix[2][0]),
                                          scalef=p_ref.sre_scalef)
                b_ref_b8 = p_ref.get_band(p_ref.find_band(bdef_acix[6][0]),
                                          scalef=p_ref.sre_scalef)
                m_ref_qa = p_ref.get_band(p_ref.find_band("refqa"))
                b_maja_b2 = p_maja.get_band(p_maja.find_band(bdef_acix[0][1]),
                                            scalef=p_maja.sre_scalef)
                b_maja_b3 = p_maja.get_band(p_maja.find_band(bdef_acix[1][1]),
                                            scalef=p_maja.sre_scalef)
                b_maja_b4 = p_maja.get_band(p_maja.find_band(bdef_acix[2][1]),
                                            scalef=p_maja.sre_scalef)
                b_maja_b8 = p_maja.get_band(p_maja.find_band(bdef_acix[6][1]),
                                            scalef=p_maja.sre_scalef)
                b_maja_aot = p_maja.get_band(p_maja.find_band("ATB_R1"),
                                             layer=1,
                                             scalef=p_maja.aot_scalef)
                b_maja_vap = p_maja.get_band(p_maja.find_band("ATB_R1"),
                                             layer=0,
                                             scalef=p_maja.vap_scalef)
                clm = p_maja.get_band(
                    p_maja.find_band("CLM_" + bdef_acix[0][2]))
                edg = p_maja.get_band(
                    p_maja.find_band("EDG_" + bdef_acix[0][2]))
                m_maja_qa, ratio = p_maja.get_mask(clm, edg, stats=True)

                if args.hist:
                    fig, axs = pl.subplots(nrows=3, ncols=3, figsize=[12, 12])
                    fig.suptitle(location_name + ' ' + timestamp[0:4] + '-' +
                                 timestamp[4:6] + '-' + timestamp[6:8],
                                 fontsize=16)

                    cset_true = axs[0, 0].imshow(np.dstack(
                        (b_maja_b4 * gain_true, b_maja_b3 * gain_true,
                         b_maja_b2 * gain_true)),
                                                 interpolation='none',
                                                 aspect='equal')
                    axs[0, 0].set_title("Maja quicklook (B4, B3, B2)")
                    cset_maja_cloud_contour = axs[0, 0].contour(m_maja_qa)
                    # axs[0, 0].clabel(cset_maja_cloud_contour, inline=1, fontsize=10)

                    cset_maja_qa = axs[1, 0].imshow(m_maja_qa,
                                                    interpolation='none',
                                                    aspect='equal',
                                                    vmin=0,
                                                    vmax=1,
                                                    cmap='gray')
                    axs[1, 0].set_title("Maja CLM & EDG (valid=1)")
                    divider = make_axes_locatable(axs[1, 0])
                    cax = divider.append_axes("right", size="5%", pad=0.05)
                    pl.colorbar(cset_maja_qa,
                                cax=cax)  # , orientation='horizontal')

                    # cset_ref_qa = axs[2, 0].imshow(m_ref_qa, interpolation='none', aspect='equal', vmin=0, vmax=1, cmap='gray')
                    # axs[2, 0].set_title("Reference QA (valid=1)")
                    # divider = make_axes_locatable(axs[2, 0])
                    # cax = divider.append_axes("right", size="5%", pad=0.05)
                    # pl.colorbar(cset_ref_qa, cax=cax)  # , orientation='horizontal')

                    cset_true = axs[2, 0].imshow(np.dstack(
                        (b_ref_b4 * gain_true, b_ref_b3 * gain_true,
                         b_ref_b2 * gain_true)),
                                                 interpolation='none',
                                                 aspect='equal')
                    axs[2, 0].set_title("Ref quicklook (B4, B3, B2)")
                    axs[2, 0].contour(m_ref_qa)

                    # cset_false = axs[0, 1].imshow(np.dstack((b_maja_b8*gain_false, b_maja_b3*gain_false, b_maja_b2*gain_false)), interpolation='none', aspect='equal')
                    # axs[0, 1].set_title("%s %s (B8,B3,B2)" % (location_name, timestamp))

                    cset_maja_vap = axs[0, 1].imshow(b_maja_vap,
                                                     interpolation='none',
                                                     aspect='equal',
                                                     cmap='RdBu')
                    axs[0, 1].set_title("Maja VAP $(g.cm^{-2})$")
                    divider = make_axes_locatable(axs[0, 1])
                    cax = divider.append_axes("right", size="5%", pad=0.05)
                    pl.colorbar(cset_maja_vap, cax=cax,
                                format='%4.2f')  # , orientation='horizontal')

                    # cset_maja_vap = axs[0, 1].imshow(np.dstack((b_maja_b4*gain_true, b_maja_b3*gain_true, b_maja_b2*gain_true)), interpolation='none', aspect='equal')
                    # axs[0, 1].set_title("Maja VAP")
                    # divider = make_axes_locatable(axs[0, 1])
                    # cax = divider.append_axes("right", size="5%", pad=0.05)
                    # pl.colorbar(cset_maja_vap, cax=cax)  # , orientation='horizontal')
                    # cset_maja_vap_contour = axs[0, 1].contour(b_maja_vap)
                    # axs[0, 1].clabel(cset_maja_vap_contour, inline=1, fontsize=10)

                    cset_maja_aot = axs[0, 2].imshow(b_maja_aot, cmap='Wistia')
                    axs[0, 2].imshow(np.dstack(
                        (b_maja_b4 * gain_true, b_maja_b3 * gain_true,
                         b_maja_b2 * gain_true)),
                                     interpolation='none',
                                     aspect='equal')
                    axs[0, 2].set_title("Maja AOT $(-)$")
                    divider = make_axes_locatable(axs[0, 2])
                    cax = divider.append_axes("right", size="5%", pad=0.05)
                    pl.colorbar(cset_maja_aot, cax=cax,
                                format='%4.2f')  # , orientation='horizontal')
                    cset_maja_aot_contour = axs[0, 2].contour(b_maja_aot,
                                                              cmap='Wistia')
                    axs[0, 2].clabel(cset_maja_aot_contour,
                                     inline=1,
                                     fontsize=10)

                    # B2
                    if args.keepall:
                        is_valid = np.where((m_ref_qa == 1) & (m_maja_qa == 1))
                        min_sr = -0.1
                        max_sr = 0.7
                        is_log = False
                        filter_label = "(QA=1)"
                        b_ref_b2_is_valid_count = len(
                            b_ref_b2[is_valid].flatten())
                        search = np.where(b_ref_b2[is_valid] < 0)
                        b_ref_b2_is_valid_and_lt0_count = len(
                            b_ref_b2[is_valid][search].flatten())
                        search = np.where(b_maja_b2[is_valid] < 0)
                        b_maja_b2_is_valid_and_lt0_count = len(
                            b_maja_b2[is_valid][search].flatten())

                    else:
                        is_valid = np.where((b_ref_b2 > 0)
                                            & (b_maja_b2 > 0)
                                            & (m_ref_qa == 1)
                                            & (m_maja_qa == 1))
                        min_sr = 0
                        max_sr = 1
                        is_log = False
                        filter_label = "(QA=1 & sr>0)"

                    axs[1, 1].hist(b_ref_b2[is_valid].flatten(),
                                   bins=200,
                                   histtype='step',
                                   log=is_log,
                                   label='Ref',
                                   range=(min_sr, max_sr),
                                   density=False)
                    axs[1, 1].hist(b_maja_b2[is_valid].flatten(),
                                   bins=200,
                                   histtype='step',
                                   log=is_log,
                                   label='Maja',
                                   range=(min_sr, max_sr),
                                   density=False)
                    axs[1,
                        1].set_title("B2 " + filter_label + " RMSE=%8.6f" %
                                     utl.rmse(b_ref_b2[is_valid].flatten(),
                                              b_maja_b2[is_valid].flatten()))
                    axs[1, 1].legend()

                    # B3
                    if args.keepall:
                        is_valid = np.where((m_ref_qa == 1) & (m_maja_qa == 1))
                        min_sr = -0.1
                        max_sr = 0.7
                        is_log = False
                        filter_label = "(QA=1)"
                        b_ref_b3_is_valid_count = len(
                            b_ref_b3[is_valid].flatten())
                        search = np.where(b_ref_b3[is_valid] < 0)
                        b_ref_b3_is_valid_and_lt0_count = len(
                            b_ref_b3[is_valid][search].flatten())
                        search = np.where(b_maja_b3[is_valid] < 0)
                        b_maja_b3_is_valid_and_lt0_count = len(
                            b_maja_b3[is_valid][search].flatten())

                    else:
                        is_valid = np.where((b_ref_b3 > 0)
                                            & (b_maja_b3 > 0)
                                            & (m_ref_qa == 1)
                                            & (m_maja_qa == 1))
                        min_sr = 0
                        max_sr = 1
                        is_log = False
                        filter_label = "(QA=1 & sr>0)"

                    axs[1, 2].hist(b_ref_b3[is_valid].flatten(),
                                   bins=200,
                                   histtype='step',
                                   log=is_log,
                                   label='Ref',
                                   range=(min_sr, max_sr))
                    axs[1, 2].hist(b_maja_b3[is_valid].flatten(),
                                   bins=200,
                                   histtype='step',
                                   log=is_log,
                                   label='Maja',
                                   range=(min_sr, max_sr))
                    axs[1,
                        2].set_title("B3 " + filter_label + " RMSE=%8.6f" %
                                     utl.rmse(b_ref_b3[is_valid].flatten(),
                                              b_maja_b3[is_valid].flatten()))
                    axs[1, 2].legend()

                    # B4
                    if args.keepall:
                        is_valid = np.where((m_ref_qa == 1) & (m_maja_qa == 1))
                        min_sr = -0.1
                        max_sr = 0.7
                        is_log = False
                        filter_label = "(QA=1)"
                        b_ref_b4_is_valid_count = len(
                            b_ref_b4[is_valid].flatten())
                        search = np.where(b_ref_b4[is_valid] < 0)
                        b_ref_b4_is_valid_and_lt0_count = len(
                            b_ref_b4[is_valid][search].flatten())
                        search = np.where(b_maja_b4[is_valid] < 0)
                        b_maja_b4_is_valid_and_lt0_count = len(
                            b_maja_b4[is_valid][search].flatten())

                    else:
                        is_valid = np.where((b_ref_b4 > 0)
                                            & (b_maja_b4 > 0)
                                            & (m_ref_qa == 1)
                                            & (m_maja_qa == 1))
                        min_sr = 0
                        max_sr = 1
                        is_log = False
                        filter_label = "(QA=1 & sr>0)"

                    axs[2, 1].hist(b_ref_b4[is_valid].flatten(),
                                   bins=200,
                                   histtype='step',
                                   log=is_log,
                                   label='Ref',
                                   range=(min_sr, max_sr))
                    axs[2, 1].hist(b_maja_b4[is_valid].flatten(),
                                   bins=200,
                                   histtype='step',
                                   log=is_log,
                                   label='Maja',
                                   range=(min_sr, max_sr))
                    axs[2,
                        1].set_title("B4 " + filter_label + " RMSE=%8.6f" %
                                     utl.rmse(b_ref_b4[is_valid].flatten(),
                                              b_maja_b4[is_valid].flatten()))
                    axs[2, 1].legend()

                    # B8
                    if args.keepall:
                        is_valid = np.where((m_ref_qa == 1) & (m_maja_qa == 1))
                        min_sr = -0.1
                        max_sr = 0.7
                        is_log = False
                        filter_label = "(QA=1)"
                        b_ref_b8_is_valid_count = len(
                            b_ref_b8[is_valid].flatten())
                        search = np.where(b_ref_b8[is_valid] < 0)
                        b_ref_b8_is_valid_and_lt0_count = len(
                            b_ref_b8[is_valid][search].flatten())
                        search = np.where(b_maja_b8[is_valid] < 0)
                        b_maja_b8_is_valid_and_lt0_count = len(
                            b_maja_b8[is_valid][search].flatten())

                    else:
                        is_valid = np.where((b_ref_b8 > 0)
                                            & (b_maja_b8 > 0)
                                            & (m_ref_qa == 1)
                                            & (m_maja_qa == 1))
                        min_sr = 0
                        max_sr = 1
                        is_log = False
                        filter_label = "(QA=1 & sr>0)"

                    axs[2, 2].hist(b_ref_b8[is_valid].flatten(),
                                   bins=200,
                                   histtype='step',
                                   log=is_log,
                                   label='Ref',
                                   range=(min_sr, max_sr))
                    axs[2, 2].hist(b_maja_b8[is_valid].flatten(),
                                   bins=200,
                                   histtype='step',
                                   log=is_log,
                                   label='Maja',
                                   range=(min_sr, max_sr))
                    axs[2,
                        2].set_title("B8 " + filter_label + " RMSE=%8.6f" %
                                     utl.rmse(b_ref_b8[is_valid].flatten(),
                                              b_maja_b8[is_valid].flatten()))
                    axs[2, 2].legend()

                    fig.tight_layout()
                    fig.subplots_adjust(top=0.88)
                    pl.savefig(location_name + '_' + timestamp +
                               '_All_quicklooks.png')
                    pl.close('all')

                    if args.keepall:
                        l_stats = l_stats + [[
                            location_name + timestamp, b_ref_b2_is_valid_count,
                            b_ref_b3_is_valid_count, b_ref_b4_is_valid_count,
                            b_ref_b8_is_valid_count,
                            b_ref_b2_is_valid_and_lt0_count,
                            b_ref_b3_is_valid_and_lt0_count,
                            b_ref_b4_is_valid_and_lt0_count,
                            b_ref_b8_is_valid_and_lt0_count,
                            b_maja_b2_is_valid_and_lt0_count,
                            b_maja_b3_is_valid_and_lt0_count,
                            b_maja_b4_is_valid_and_lt0_count,
                            b_maja_b8_is_valid_and_lt0_count
                        ]]

                else:
                    try:
                        fig, axs = pl.subplots(figsize=[12, 12])
                        pl.title(location_name + ' ' + timestamp[0:4] + '-' +
                                 timestamp[4:6] + '-' + timestamp[6:8],
                                 fontsize=16)

                        axs.imshow(np.dstack(
                            (b_maja_b4 * gain_true, b_maja_b3 * gain_true,
                             b_maja_b2 * gain_true)),
                                   interpolation='none',
                                   aspect='equal')
                        axs.set_title("MAJA Quicklook (B4, B3, B2)")
                        axs.contour(m_maja_qa)

                        pl.savefig(location_name + '_' + timestamp +
                                   '_Maja_quicklooks.png')
                        pl.close('all')

                        fig, axs = pl.subplots(figsize=[12, 12])
                        pl.title(location_name + ' ' + timestamp[0:4] + '-' +
                                 timestamp[4:6] + '-' + timestamp[6:8],
                                 fontsize=16)

                        axs.imshow(np.dstack(
                            (b_ref_b4 * gain_true, b_ref_b3 * gain_true,
                             b_ref_b2 * gain_true)),
                                   interpolation='none',
                                   aspect='equal')
                        axs.set_title("REF Quicklook (B4, B3, B2)")
                        axs.contour(m_ref_qa)

                        pl.savefig(location_name + '_' + timestamp +
                                   '_Ref_quicklooks.png')
                        pl.close('all')
                    except:
                        print(sys.exc_info())

            except:
                e = sys.exc_info()
                logger.error(e)

    b_common_stats_dataset_count = 0

    b_ref_stats_dataset_with_any_sr_lt0_count = 0
    b_ref_stats_dataset_with_atmost_025prc_sr_lt0_count = 0
    b_ref_stats_dataset_with_atmost_05prc_sr_lt0_count = 0
    b_ref_stats_dataset_with_atmost_10prc_sr_lt0_count = 0
    b_ref_stats_dataset_with_morethan_10prc_sr_lt0_count = 0

    b_maja_stats_dataset_with_any_sr_lt0_count = 0
    b_maja_stats_dataset_with_atmost_025prc_sr_lt0_count = 0
    b_maja_stats_dataset_with_atmost_05prc_sr_lt0_count = 0
    b_maja_stats_dataset_with_atmost_10prc_sr_lt0_count = 0
    b_maja_stats_dataset_with_morethan_10prc_sr_lt0_count = 0

    for l in range(len(l_stats)):
        b_common_stats_dataset_count += 1
        if (l_stats[l][5] > 0) or (l_stats[l][6] > 0) or (
                l_stats[l][7] > 0) or (l_stats[l][8] > 0):
            b_ref_stats_dataset_with_any_sr_lt0_count += 1

            if max(l_stats[l][5], l_stats[l][6], l_stats[l][7],
                   l_stats[l][8]) <= 20250:
                logger.info(
                    "STATS REF_LT025: %s: b2_ratio=%i/%i, b3_ratio=%i/%i, b4_ratio=%i/%i, b8_ratio=%i/%i"
                    % (l_stats[l][0], l_stats[l][5], l_stats[l][1],
                       l_stats[l][6], l_stats[l][2], l_stats[l][7],
                       l_stats[l][3], l_stats[l][8], l_stats[l][4]))
                b_ref_stats_dataset_with_atmost_025prc_sr_lt0_count += 1

            elif max(l_stats[l][5], l_stats[l][6], l_stats[l][7],
                     l_stats[l][8]) <= 40500:
                logger.info(
                    "STATS REF_LT05: %s: b2_ratio=%i/%i, b3_ratio=%i/%i, b4_ratio=%i/%i, b8_ratio=%i/%i"
                    % (l_stats[l][0], l_stats[l][5], l_stats[l][1],
                       l_stats[l][6], l_stats[l][2], l_stats[l][7],
                       l_stats[l][3], l_stats[l][8], l_stats[l][4]))
                b_ref_stats_dataset_with_atmost_05prc_sr_lt0_count += 1

            elif max(l_stats[l][5], l_stats[l][6], l_stats[l][7],
                     l_stats[l][8]) <= 81000:
                b_ref_stats_dataset_with_atmost_10prc_sr_lt0_count += 1
                logger.info(
                    "STATS REF_LT10: %s: b2_ratio=%i/%i, b3_ratio=%i/%i, b4_ratio=%i/%i, b8_ratio=%i/%i"
                    % (l_stats[l][0], l_stats[l][5], l_stats[l][1],
                       l_stats[l][6], l_stats[l][2], l_stats[l][7],
                       l_stats[l][3], l_stats[l][8], l_stats[l][4]))

            else:
                b_ref_stats_dataset_with_morethan_10prc_sr_lt0_count += 1
                logger.info(
                    "STATS REF_GT10: %s: b2_ratio=%i/%i, b3_ratio=%i/%i, b4_ratio=%i/%i, b8_ratio=%i/%i"
                    % (l_stats[l][0], l_stats[l][5], l_stats[l][1],
                       l_stats[l][6], l_stats[l][2], l_stats[l][7],
                       l_stats[l][3], l_stats[l][8], l_stats[l][4]))

        if (l_stats[l][9] > 0) or (l_stats[l][10] > 0) or (
                l_stats[l][11] > 0) or (l_stats[l][12] > 0):
            b_maja_stats_dataset_with_any_sr_lt0_count += 1

            if max(l_stats[l][9], l_stats[l][10], l_stats[l][11],
                   l_stats[l][12]) <= 20250:
                logger.info(
                    "STATS MAJA_LT025: %s: b2_ratio=%i/%i, b3_ratio=%i/%i, b4_ratio=%i/%i, b8_ratio=%i/%i"
                    % (l_stats[l][0], l_stats[l][9], l_stats[l][1],
                       l_stats[l][10], l_stats[l][2], l_stats[l][11],
                       l_stats[l][3], l_stats[l][12], l_stats[l][4]))
                b_maja_stats_dataset_with_atmost_025prc_sr_lt0_count += 1

            elif max(l_stats[l][9], l_stats[l][10], l_stats[l][11],
                     l_stats[l][12]) <= 40500:
                logger.info(
                    "STATS LT05: %s: b2_ratio=%i/%i, b3_ratio=%i/%i, b4_ratio=%i/%i, b8_ratio=%i/%i"
                    % (l_stats[l][0], l_stats[l][9], l_stats[l][1],
                       l_stats[l][10], l_stats[l][2], l_stats[l][11],
                       l_stats[l][3], l_stats[l][12], l_stats[l][4]))
                b_maja_stats_dataset_with_atmost_05prc_sr_lt0_count += 1

            elif max(l_stats[l][9], l_stats[l][10], l_stats[l][11],
                     l_stats[l][12]) <= 81000:
                b_maja_stats_dataset_with_atmost_10prc_sr_lt0_count += 1
                logger.info(
                    "STATS LT10: %s: b2_ratio=%i/%i, b3_ratio=%i/%i, b4_ratio=%i/%i, b8_ratio=%i/%i"
                    % (l_stats[l][0], l_stats[l][9], l_stats[l][1],
                       l_stats[l][10], l_stats[l][2], l_stats[l][11],
                       l_stats[l][3], l_stats[l][12], l_stats[l][4]))

            else:
                b_maja_stats_dataset_with_morethan_10prc_sr_lt0_count += 1
                logger.info(
                    "STATS GT10: %s: b2_ratio=%i/%i, b3_ratio=%i/%i, b4_ratio=%i/%i, b8_ratio=%i/%i"
                    % (l_stats[l][0], l_stats[l][9], l_stats[l][1],
                       l_stats[l][10], l_stats[l][2], l_stats[l][11],
                       l_stats[l][3], l_stats[l][12], l_stats[l][4]))

    logger.info("STATS: Tested %i location and timestamps" %
                b_common_stats_dataset_count)
    logger.info("STATS: Found %i datasets with sr_ref < 0" %
                b_ref_stats_dataset_with_any_sr_lt0_count)
    logger.info("STATS:     %i datasets with at most 2.5%% of sr_ref < 0" %
                b_ref_stats_dataset_with_atmost_025prc_sr_lt0_count)
    logger.info("STATS:     %i datasets with 2.5%% to 5%% of sr_ref < 0" %
                b_ref_stats_dataset_with_atmost_05prc_sr_lt0_count)
    logger.info("STATS:     %i datasets with 5%% to 10%% of sr_ref < 0" %
                b_ref_stats_dataset_with_atmost_10prc_sr_lt0_count)
    logger.info("STATS:     %i datasets with more than 10%% of sr_ref < 0" %
                b_ref_stats_dataset_with_morethan_10prc_sr_lt0_count)

    logger.info("STATS: Found %i datasets with sr_maja < 0" %
                b_maja_stats_dataset_with_any_sr_lt0_count)
    logger.info("STATS:     %i datasets with at most 2.5%% of sr_maja < 0" %
                b_maja_stats_dataset_with_atmost_025prc_sr_lt0_count)
    logger.info("STATS:     %i datasets with 2.5%% to 5%% of sr_maja < 0" %
                b_maja_stats_dataset_with_atmost_05prc_sr_lt0_count)
    logger.info("STATS:     %i datasets with 5%% to 10%% of sr_maja < 0" %
                b_maja_stats_dataset_with_atmost_10prc_sr_lt0_count)
    logger.info("STATS:     %i datasets with more than 10%% of sr_maja < 0" %
                b_maja_stats_dataset_with_morethan_10prc_sr_lt0_count)

    print(b_common_stats_dataset_count,
          b_ref_stats_dataset_with_any_sr_lt0_count,
          b_ref_stats_dataset_with_atmost_10prc_sr_lt0_count)
    print(l_stats)

    sys.exit(0)