Exemple #1
0
def extract_unique_craters(CP, craters_unique):
    """Top level function that extracts craters from model predictions,
    converts craters from pixel to real (degree, km) coordinates, and filters
    out duplicate detections across images.

    Parameters
    ----------
    CP : dict
        Crater Parameters needed to run the code.
    craters_unique : array
        Empty master array of unique crater tuples in the form 
        (long, lat, radius).

    Returns
    -------
    craters_unique : array
        Filled master array of unique crater tuples.
    """

    # Load/generate model preds
    try:
        preds = h5py.File(CP['dir_preds'], 'r')[CP['datatype']]
        print("Loaded model predictions successfully")
    except:
        print("Couldnt load model predictions, generating")
        preds = get_model_preds(CP)

    # need for long/lat bounds
    P = h5py.File(CP['dir_data'], 'r')
    llbd, pbd, distcoeff = ('longlat_bounds', 'pix_bounds',
                            'pix_distortion_coefficient')
    #r_moon = 1737.4
    dim = (float(CP['dim']), float(CP['dim']))

    N_matches_tot = 0
    for i in range(CP['start_of_images'],
                   CP['start_of_images'] + CP['n_imgs']):
        id = proc.get_id(i)

        coords = tmt.template_match_t(preds[i])

        # convert, add to master dist
        if len(coords) > 0:

            new_craters_unique = estimate_longlatdiamkm(
                dim, P[llbd][id], P[distcoeff][id][0], coords)
            N_matches_tot += len(coords)

            # Only add unique (non-duplicate) craters
            if len(craters_unique) > 0:
                craters_unique = add_unique_craters(new_craters_unique,
                                                    craters_unique, CP['llt2'],
                                                    CP['rt2'])
            else:
                craters_unique = np.concatenate(
                    (craters_unique, new_craters_unique))

    np.save(CP['dir_result'], craters_unique)
    return craters_unique
def get_data(CP):
    """Reads in or generates model predictions.

    Parameters
    ----------
    CP : dict
        Containins directory locations for loading data and storing
        predictions.

    Returns
    -------
    craters : h5py
        Model predictions.
    """
    n_imgs, dtype = CP['n_imgs'], CP['datatype']

    data = h5py.File(CP['dir_data'], 'r')

    Data = {
        dtype: [
            data['input_images'][:n_imgs].astype('float32'),
            data['target_masks'][:n_imgs].astype('float32')
        ]
    }
    data.close()
    craters = pd.HDFStore(CP['crater_data'], 'r')
    csvs = []
    minrad, maxrad, cutrad, n_csvs, dim = 3, 50, 0.8, len(craters), 256
    diam = 'Diameter (pix)'
    for i in range(n_csvs):
        csv = craters[proc.get_id(i, 2)]
        # remove small/large/half craters
        csv = csv[(csv[diam] < 2 * maxrad) & (csv[diam] > 2 * minrad)]
        csv = csv[(csv['x'] + cutrad * csv[diam] / 2 <= dim)]
        csv = csv[(csv['y'] + cutrad * csv[diam] / 2 <= dim)]
        csv = csv[(csv['x'] - cutrad * csv[diam] / 2 > 0)]
        csv = csv[(csv['y'] - cutrad * csv[diam] / 2 > 0)]

        if len(csv) < 1:  # Exclude csvs with few craters
            csvs.append([-1])
        else:
            csv_coords = np.asarray((csv['x'], csv['y'], csv[diam] / 2)).T
            csvs.append(csv_coords)
    return Data, csvs
Exemple #3
0
def extract_unique_craters(CP, craters_unique):
    """Top level function that extracts craters from model predictions,
    converts craters from pixel to real (degree, km) coordinates, and filters
    out duplicate detections across images.

    Parameters
    ----------
    CP : dict
        Crater Parameters needed to run the code.
    craters_unique : array
        Empty master array of unique crater tuples in the form 
        (long, lat, radius).

    Returns
    -------
    craters_unique : array
        Filled master array of unique crater tuples.
    """

    # Load/generate model preds
    try:
        preds = h5py.File(CP['dir_preds'], 'r')[CP['datatype']]
        print("Loaded model predictions successfully")
    except:
        print("Couldnt load model predictions, generating")
        preds = get_model_preds(CP)

    # need for long/lat bounds
    P = h5py.File(CP['dir_data'], 'r')
    llbd, pbd, distcoeff = ('longlat_bounds', 'pix_bounds',
                            'pix_distortion_coefficient')
    #r_moon = 1737.4
    dim = (float(CP['dim']), float(CP['dim']))

    N_matches_tot = 0
    for i in range(CP['start_of_images'],CP['start_of_images']+CP['n_imgs']):
        id = proc.get_id(i)
        
        coords = tmt.template_match_t(preds[i])
Exemple #4
0
def get_metrics(data, craters, dim, model, beta=1):
    """Function that prints pertinent metrics at the end of each epoch. 

    Parameters
    ----------
    data : hdf5
        Input images.
    craters : hdf5
        Pandas arrays of human-counted crater data. 
    dim : int
        Dimension of input images (assumes square).
    model : keras model object
        Keras model
    beta : int, optional
        Beta value when calculating F-beta score. Defaults to 1.
    """
    X, Y = data[0], data[1]
    dim =256
    # Get csvs of human-counted craters
    csvs = []
    X = X[1:]
    Y=Y[1:]
    minrad, maxrad, cutrad, n_csvs = 1, 50, 0.8, len(X)
    diam = 'Diameter (pix)'
    for i in range(1,n_csvs):
        try:
            csv = craters[proc.get_id(i,4)]
        except:
            csvs.append([-1])
            print ('Skipping iteration number =' ,i  ,' as no crater is available in that area')
            continue
        # remove small/large/half craters
        csv = csv[(csv[diam] < 2 * maxrad) & (csv[diam] > 2 * minrad)]
        csv = csv[(csv['x'] + cutrad * csv[diam] / 2 <= dim)]
        csv = csv[(csv['y'] + cutrad * csv[diam] / 2 <= dim)]
        csv = csv[(csv['x'] - cutrad * csv[diam] / 2 > 0)]
        csv = csv[(csv['y'] - cutrad * csv[diam] / 2 > 0)]
        if len(csv) < 1:    # Exclude csvs with few craters
            csvs.append([-1])
        else:
            try:
                csv_coords = np.asarray((csv['x'], csv['y'], csv[diam] / 2)).T
                csvs.append(csv_coords)
            except:
                print('A fix for mars')
    # Calculate custom metrics
    print("")
    print("*********Custom Loss*********")
    recall, precision, fscore = [], [], []
    frac_new, frac_new2, maxrad = [], [], []
    err_lo, err_la, err_r = [], [], []
    frac_duplicates = []
#    print(len(csvs[1]))
#    print(len(csvs[2]))
    preds = model.predict(X)
    for i in range(len(csvs)):
        if len(csvs[i]) < 1:
            continue
        
        try:
            if (csvs[i].count(-1) ==1):
                print ('Skipping bcoz csvs ==-1')
                continue
        except:
            print ('out of try block')
      
        (N_match, N_csv, N_detect, maxr,
         elo, ela, er, frac_dupes) = tmt.template_match_t2c(preds[i], csvs[i],
                                                            rmv_oor_csvs=0)
        if N_match > 0:
            p = float(N_match) / float(N_match + (N_detect - N_match))
            r = float(N_match) / float(N_csv)
            f = (1 + beta**2) * (r * p) / (p * beta**2 + r)
            diff = float(N_detect - N_match)
            fn = diff / (float(N_detect) + diff)
            fn2 = diff / (float(N_csv) + diff)
            recall.append(r)
            precision.append(p)
            fscore.append(f)
            frac_new.append(fn)
            frac_new2.append(fn2)
            maxrad.append(maxr)
            err_lo.append(elo)
            err_la.append(ela)
            err_r.append(er)
            frac_duplicates.append(frac_dupes)
        else:
            print("skipping iteration %d,N_csv=%d,N_detect=%d,N_match=%d" %
                  (i, N_csv, N_detect, N_match))

    print("binary XE score = %f" % model.evaluate(X, Y))
    if len(recall) > 0:
        print("mean and std of N_match/N_csv (recall) = %f, %f" %
              (np.mean(recall), np.std(recall)))
        print("""mean and std of N_match/(N_match + (N_detect-N_match))
              (precision) = %f, %f""" % (np.mean(precision), np.std(precision)))
        print("mean and std of F_%d score = %f, %f" %
              (beta, np.mean(fscore), np.std(fscore)))
        print("""mean and std of (N_detect - N_match)/N_detect (fraction
              of craters that are new) = %f, %f""" %
              (np.mean(frac_new), np.std(frac_new)))
        print("""mean and std of (N_detect - N_match)/N_csv (fraction of
              "craters that are new, 2) = %f, %f""" %
              (np.mean(frac_new2), np.std(frac_new2)))
        print("median and IQR fractional longitude diff = %f, 25:%f, 75:%f" %
              (np.median(err_lo), np.percentile(err_lo, 25),
               np.percentile(err_lo, 75)))
        print("median and IQR fractional latitude diff = %f, 25:%f, 75:%f" %
              (np.median(err_la), np.percentile(err_la, 25),
               np.percentile(err_la, 75)))
        print("median and IQR fractional radius diff = %f, 25:%f, 75:%f" %
              (np.median(err_r), np.percentile(err_r, 25),
               np.percentile(err_r, 75)))
        print("mean and std of frac_duplicates: %f, %f" %
              (np.mean(frac_duplicates), np.std(frac_duplicates)))
        print("""mean and std of maximum detected pixel radius in an image =
              %f, %f""" % (np.mean(maxrad), np.std(maxrad)))
        print("""absolute maximum detected pixel radius over all images =
              %f""" % np.max(maxrad))
        print("")
def extract_unique_craters(CP, craters_unique):
    """Top level function that extracts craters from model predictions,
    converts craters from pixel to real (degree, km) coordinates, and filters
    out duplicate detections across images.

    Parameters
    ----------
    CP : dict
        Crater Parameters needed to run the code.
    craters_unique : array
        Empty master array of unique crater tuples in the form
        (long, lat, radius).

    Returns
    -------
    craters_unique : array
        Filled master array of unique crater tuples.
    """

    # Load/generate model preds
    try:
        preds = h5py.File(CP['dir_preds'], 'r')[CP['datatype']]

        print("Loaded model predictions successfully")
    except:
        print("Couldnt load model predictions, generating")
        preds = get_model_preds(CP)
    Data, Carters = get_data(CP)
    # need for long/lat bounds
    P = h5py.File(CP['dir_data'], 'r')
    llbd, pbd, distcoeff = ('longlat_bounds', 'pix_bounds',
                            'pix_distortion_coefficient')
    #r_moon = 1737.4
    dim = (float(CP['dim']), float(CP['dim']))

    N_matches_tot = 0
    if not os.path.exists(CP['result_img']):
        os.mkdir(CP['result_img'])
    lenstr = ""
    lenstr1 = "true_carter"
    lenstr2 = "detect_carter"
    lenstr3 = "undetected_carter"
    num = 0
    num1 = 0
    num2 = 0
    num3 = 0
    for i in range(CP['n_imgs']):
        id = proc.get_id(i, 2)
        print("Drawing picture:%d" % i)
        input_images = Data[CP['datatype']][0][i]
        imgs = Image.fromarray(input_images.astype('uint8')).convert('RGB')
        img = cv2.cvtColor(np.asarray(imgs), cv2.COLOR_RGB2BGR)

        coords = tmt.template_match_t(preds[i])
        num = num + len(coords)
        lenstr = lenstr + " " + str(len(coords))
        matplotlib.image.imsave(CP['result_img'] + "/" + str(i) + '_mask.jpg',
                                preds[i])
        true_carter, detect_carter, Undetected_carter = get_coords_classification(
            coords, Carters[i])
        lenstr1 = lenstr1 + " " + str(len(true_carter))
        num1 = num1 + len(true_carter)
        lenstr2 = lenstr2 + " " + str(len(detect_carter))
        num2 = num2 + len(detect_carter)
        lenstr3 = lenstr3 + " " + str(len(Undetected_carter))
        num3 = num3 + len(Undetected_carter)
        draw_pic(img, coords, Carters[i],
                 CP['result_img'] + "/" + str(i) + '.jpg')
        if len(coords) > 0:
            # for i in range(len(coords)):
            new_craters_unique = estimate_longlatdiamkm(
                dim, P[llbd][id], P[distcoeff][id][0], coords)
            N_matches_tot += len(coords)
            #print(id,new_craters_unique)
            # Only add unique (non-duplicate) craters
            if len(craters_unique) > 0:
                craters_unique = add_unique_craters(new_craters_unique,
                                                    craters_unique, CP['llt2'],
                                                    CP['rt2'])
            else:
                craters_unique = np.concatenate(
                    (craters_unique, new_craters_unique))
    print(lenstr)
    print("total num:%d" % num)
    print(lenstr1)
    print(num1)
    print(lenstr2)
    print(num2)
    print(lenstr3)
    print(num3)
    np.save(CP['dir_result'], craters_unique)
    return craters_unique