Example #1
0
def main(args):
    print(args.atl08)
    atl08file = args.atl08[0].split('/')[-1]
    newfilename = 'ATL08_30m_' + atl08file.split('ATL08_')[1]
    newatl08file = os.path.join(args.outdir[0], newfilename)

    copyfile(args.atl08[0], newatl08file)
    h5f = h5py.File(newatl08file, 'a')
    gt_list = ['gt1r', 'gt1l', 'gt2r', 'gt2l', 'gt3r', 'gt3l']
    for gt in gt_list:
        base_key = gt + '/land_segments/30m_segment/'
        print('ATL03 Heights')
        atl03 = get_atl03_struct(args.atl03[0], gt, args.atl08[0])

        print('ATL08 Land Segments')
        atl08 = get_atl08_struct(args.atl08[0], gt, atl03)

        print('Match ATL08 to ATL03 by segment')
        upsampled_atl08_bin = create_atl08_bin(atl03, atl08, res_at=30)

        print('Append Data to ATL08')
        all_cols = upsampled_atl08_bin.columns

        for i in range(0, len(all_cols)):
            a = np.array(upsampled_atl08_bin[all_cols[i]])
            h5f[base_key + all_cols[i]] = a

    h5f.close()
Example #2
0
def copy_and_append_atl08(atl03filepath, atl08filepath, out_dir, in_res=30):
    atl08file = atl08filepath.split('/')[-1]
    newatl08file = out_dir + atl08file
    newatl08file = os.join(out_dir, atl08file)

    copyfile(atl08filepath, newatl08file)
    h5f = h5py.File(newatl08file, 'a')
    gt_list = ['gt1r', 'gt1l', 'gt2r', 'gt2l', 'gt3r', 'gt3l']
    for gt in gt_list:
        base_key = gt + '/land_segments/30m_segment/'
        print('ATL03 Heights')
        atl03 = get_atl03_struct(atl03filepath, gt, atl08filepath)

        print('ATL08 Land Segments')
        atl08 = get_atl08_struct(atl08filepath, gt, atl03)

        print('Match ATL08 to ATL03 by segment')
        upsampled_atl08_bin = create_atl08_bin(atl03, atl08, res_at=in_res)

        print('Append Data to ATL08')
        all_cols = upsampled_atl08_bin.columns

        for i in range(0, len(all_cols)):
            a = np.array(upsampled_atl08_bin[all_cols[i]])
            h5f[base_key + all_cols[i]] = a

    h5f.close()
Example #3
0
        # 	continue
        # file_03 = file_03_test[0]
        print('warning: using alt file_03')
        print(file_03)
        print(file_08)

    # if file_03 != '/bigtex_data/data/release/002/ATL03_r002/Finland/ATL03_20181019013011_03120105_002_01.h5':
    # 	continue

    if gt_03 != gt_08:
        print('gt_03 != gt_08')
        print(file_03)
        print(file_08)
        continue

    atl03 = icesatReader.get_atl03_struct(file_03, gt, file_08)
    atl03_g = icesatReader.read_atl03_geolocation(file_03, gt)
    atl08 = icesatReader.get_atl08_struct(file_08, gt, atl03)
    # atl08 = icesatReader.get_atl08_struct(file_08, gt)

    # fp = h5.File(file_08, 'r')
    # delta_time_08_h5 = np.array(fp[gt + '/land_segments/delta_time'])
    # t_08_h5 = delta_time_08_h5 - delta_time_08_h5[0]
    # fp.close()

    seg_id_beg_08 = np.array(atl08.df.segment_id_beg).astype(int)
    seg_id_end_08 = np.array(atl08.df.segment_id_end).astype(int)

    t = np.array(atl03.df.time).astype(float)
    # t_08 = np.array(atl08.df.time).astype(float)
Example #4
0
    atl08file = 'ATL08_20181118120428_07770103_002_01.h5'

    # Inputs
    atl03filepath = basepath03 + atl03file
    atl08filepath = basepath08 + atl08file
    gt = 'gt1r'

    header_file_path =\
        '/LIDAR/server/USERS/eric/1_experiment/Finland_HeaderData.mat'

    kml_bounds_txt1 = '/LIDAR/server/USERS/eric/2_production/kmlBounds.txt'

    print('Generate ATL03 Struct')
    atl03 = get_atl03_struct(atl03filepath,
                             gt,
                             atl08filepath,
                             epsg='32635',
                             kml_bounds_txt=kml_bounds_txt1,
                             header_file_path=header_file_path)

    atl03.df = atl03.df[atl03.df['time'] < 12]

    df, rotation_data = get_atl_alongtrack(atl03.df)

    atl03.df = df
    atl03.rotationData = rotation_data

    print('Convert Struct to Legacy')
    atl03legacy, rotationData, headerData = convert_atl03_to_legacy(atl03)

    # Legacy Truth Swath Inputs
    buffer = 50  # Distance in cross-track (meters) around ATL03 track to look for truth data
Example #5
0
    if overwrite_h5:
        ovrh5 = True
        # per ground track

    # if 1:
    #     gt = f_debug[f]
    for gt in gt_all:
        # if gt != 'gt1r':
        #   continue

        print(gt)

        try:
            # reads and classifies photons, if valid
            atl03 = ir.get_atl03_struct(file_03, gt, file_08)
        except (KeyError, UnboundLocalError) as e:
            print(file_03)
            print('warning:', e)
            # usually when "heights" dataset isn't found, or atl file is corrupted
            continue

        if not atl03.dataIsMapped:
            print('warning: data not mapped')
            # cannot do analysis without classified photons
            continue

        df_03 = atl03.df

        df1 = df_03[df_03.classification == 1] # ground
        df2 = df_03[df_03.classification == 2] # low veg
Example #6
0
def segment_analysis(header_file_path, kml_bounds_txt1, truthSwathDir,
                     outFilePath_truth, outFilePath_corrected, epsg_str,
                     atl03filepath, atl08filepath, gt, min_time, max_time,
                     outfolder):

    # header_file_path =\
    #     '/LIDAR/server/USERS/eric/1_experiment/Finland_HeaderData.mat'

    # kml_bounds_txt1 = '/LIDAR/server/USERS/eric/2_production/kmlBounds.txt'
    # epsg_string = '32635'
    # Read ATL03 Struct
    print('Generate ATL03 Struct')
    atl03 = get_atl03_struct(atl03filepath,
                             gt,
                             atl08filepath,
                             epsg=epsg_string,
                             kml_bounds_txt=kml_bounds_txt1,
                             header_file_path=header_file_path)

    # Read ATL03 Geolocation Subgroup as DF
    print('Read Geolocation Subgroup')
    geolocation = read_atl03_geolocation(atl03filepath, gt)
    atl03.df = append_atl03_geolocation(atl03.df,
                                        geolocation,
                                        fields=['segment_id'])
    geolocation, gl_rotation_data, gl_epsg = match_atl_to_atl03(
        geolocation, atl03)

    # Trim Data by Time
    print('Trim Data by Time')
    atl03.df = atl03.df[atl03.df['time'] < max_time]
    geolocation = geolocation[geolocation['time'] < max_time]

    atl03.df = atl03.df[atl03.df['time'] > min_time]
    geolocation = geolocation[geolocation['time'] > min_time]

    atl03.df = atl03.df.reset_index(drop=True)
    geolocation = geolocation.reset_index(drop=True)

    # Recalculate alongtrack/crosstrack for shortened granule
    atl03.df, rotation_data = get_atl_alongtrack(atl03.df)
    atl03.rotationData = rotation_data

    # "Match" the geolocation df to the ATL03 struct
    print('Match Geolocation to ATL03')

    geolocation, gl_rotation_data, gl_epsg = match_atl_to_atl03(
        geolocation, atl03)
    # Convert the ATL03 Struct to the legacy ATL03 Struct
    print('Convert Struct to Legacy')
    atl03legacy, rotationData, headerData = convert_atl03_to_legacy(atl03)

    # Legacy Truth Swath Inputs
    buffer = 50  # Distance in cross-track (meters) around ATL03 track
    useExistingTruth = False  # Option to use existing truth data if it exists
    # truthSwathDir = '/laserpewpew/data/validation/data/Finland/LAS_UTM'
    # outFilePath_truth = '/LIDAR/server/USERS/eric/1_experiment/finland_analysis/' +\
    #     'las/truth'
    createTruthFile = True  # Option to create output truth .las file

    # Call getAtlTruthSwath (with ACE)
    print('Run Legacy Truth Swath')
    try:
        timeStart = time.time()
        atlTruthData = getAtlTruth(atl03legacy, headerData, rotationData,
                                   useExistingTruth, truthSwathDir, buffer,
                                   outFilePath_truth, createTruthFile)

        atlTruthData.classification[atlTruthData.classification == 3] = 4
        atlTruthData.classification[atlTruthData.classification == 5] = 4

        timeEnd = time.time()
        timeElapsedTotal = timeEnd - timeStart
        timeElapsedMin = np.floor(timeElapsedTotal / 60)
        timeElapsedSec = timeElapsedTotal % 60
        print('   Script Completed in %d min %d sec.' %
              (timeElapsedMin, timeElapsedSec))
        print('\n')

        # outFilePath_corrected = '/LIDAR/server/USERS/eric/1_experiment/' +\
        #     'finland_analysis/las/truth_corrected'

        print('Run Legacy Offset Coorection')
        atlCorrections = legacy_get_meas_error(atl03legacy, atlTruthData,
                                               rotationData,
                                               outFilePath_corrected)

        # Apply ATLCorrections to the Geolocation
        geolocation.alongtrack = geolocation.alongtrack +\
            atlCorrections.alongTrack
        geolocation.crosstrack = geolocation.crosstrack +\
            atlCorrections.crossTrack

        # Apply ATLCorrectuibs to ATL03 Legacy
        atl03legacy.alongTrack = atl03legacy.alongTrack +\
            atlCorrections.alongTrack
        atl03legacy.crossTrack = atl03legacy.crossTrack +\
            atlCorrections.crossTrack
        atl03legacy.z = atl03legacy.z +\
            atlCorrections.z

        # Apply ATLCorrections to ATL03 DF
        atl03.df.alongtrack = atl03.df.alongtrack +\
            atlCorrections.alongTrack
        atl03.df.crosstrack = atl03.df.crosstrack +\
            atlCorrections.crossTrack
        atl03.df.h_ph = atl03.df.h_ph +\
            atlCorrections.z

        # Run Superfilter Legacy
        superTruth, sortedMeasured = superFilter(atl03legacy,
                                                 atlTruthData,
                                                 xBuf=5.5,
                                                 classCode=[])

        # Run Perfect Classifier Legacy and assign to ATL03\
        truthgroundclass = 2
        truthcanopyclass = [3, 4, 5]
        unclassedlist = [6, 9, 13, 18]
        measpc, measoc = perfectClassifier(sortedMeasured,
                                           superTruth,
                                           ground=[truthgroundclass],
                                           canopy=truthcanopyclass,
                                           unclassed=unclassedlist,
                                           keepsize=True)

        # Sort ATL03 by Along Track

        ##Create new column for Index1
        print('Sort Alongtrack')
        atl03.df['index1'] = atl03.df.index

        ##Sort by Along-track
        atl03.df = atl03.df.sort_values(by=['alongtrack'])

        ##Reset Index
        atl03.df = atl03.df.reset_index(drop=True)

        ##Join PC to DF
        atl03.df = pd.concat(
            [atl03.df,
             pd.DataFrame(measpc, columns=['perfect_class'])],
            axis=1)
        atl03.df = pd.concat(
            [atl03.df,
             pd.DataFrame(measoc, columns=['generic_class'])],
            axis=1)

        ##Sort by Index1
        atl03.df = atl03.df.sort_values(by=['index1'])

        ##Reset Index
        atl03.df = atl03.df.reset_index(drop=True)

        ##Remove Index1
        atl03.df = atl03.df.drop(columns=['index1'])

        # Read ATL09
        # atl09 = get_atl09_struct(atl09filepath, gt, atl03)

        # df_seg = geolocation.merge(atl09.df, on="segment_id",how='left')
        # df_seg = df_seg.fillna(method='ffill',limit=14)

        # Assign Segment ID Values to Tuth Data
        seg_id_truth, include = estimate_segment_id_legacy(
            geolocation, gt, superTruth)

        # Calculate Error and Meterics

        ##Filter Truth Data by Include
        alongtrack = superTruth.alongTrack.flatten()
        crosstrack = superTruth.crossTrack.flatten()
        z = superTruth.z.flatten()
        easting = superTruth.easting.flatten()
        northing = superTruth.northing.flatten()
        classification = superTruth.classification.flatten()
        intensity = superTruth.intensity.flatten()

        alongtrack = alongtrack[include == 1]
        crosstrack = crosstrack[include == 1]
        z = z[include == 1]
        easting = easting[include == 1]
        northing = northing[include == 1]
        classification = classification[include == 1]
        intensity = intensity[include == 1]
        seg_id_truth = seg_id_truth[include == 1]
        truth_flag = True
    except:
        print('Truth Failed, continue with rest of code')
        atl03.df['perfect_class'] = np.nan
        atl03.df['generic_class'] = np.nan

        truth_flag = False

    ##Create ATLTruth DF
    if truth_flag == True:
        df_truth = pd.DataFrame(z, columns=['z'])
        df_truth = pd.concat(
            [df_truth,
             pd.DataFrame(crosstrack, columns=['crosstrack'])],
            axis=1)
        df_truth = pd.concat(
            [df_truth,
             pd.DataFrame(alongtrack, columns=['alongtrack'])],
            axis=1)
        df_truth = pd.concat(
            [df_truth, pd.DataFrame(easting, columns=['easting'])], axis=1)
        df_truth = pd.concat(
            [df_truth, pd.DataFrame(northing, columns=['northing'])], axis=1)
        df_truth = pd.concat([
            df_truth,
            pd.DataFrame(classification, columns=['classification'])
        ],
                             axis=1)
        # df_truth = pd.concat([df_truth,pd.DataFrame(
        # intensity,columns=['intensity'])],axis=1)
        df_truth = pd.concat(
            [df_truth,
             pd.DataFrame(seg_id_truth, columns=['segment_id'])],
            axis=1)

    ##Find ATL08 Segment Range

    ###Read ATL08
    atl08 = get_atl08_struct(atl08filepath, gt, atl03)

    atl08.df = atl08.df[atl08.df['time'] <= (max_time - min_time)]
    atl08.df = atl08.df[atl08.df['time'] >= 0]
    atl08.df = atl08.df.reset_index(drop=True)

    atl08.df, atl08_rotation_data, atl08_epsg = match_atl_to_atl03(
        atl08.df, atl03)

    atl03.df = atl03.df.reset_index(drop=True)
    geolocation = geolocation.reset_index(drop=True)

    ###Get ATL08 Keys
    atl08_seg = np.array(atl08.df.segment_id_beg)
    seg_id = np.array(geolocation.segment_id)
    atl08_key_df = pd.DataFrame(atl08_seg, columns=['segment_id'])
    atl08_key_df = pd.concat(
        [atl08_key_df,
         pd.DataFrame(atl08_seg, columns=['segment_id_beg'])],
        axis=1)
    atl08_key_df = pd.concat(
        [atl08_key_df,
         pd.DataFrame(atl08_seg, columns=['seg_id'])], axis=1)

    key_df = pd.DataFrame(seg_id, columns=['segment_id'])
    key_df = key_df.merge(atl08_key_df, on="segment_id", how='left')
    key_df = key_df.fillna(method='ffill', limit=4)

    max_seg = max(geolocation.segment_id)
    min_seg = min(geolocation.segment_id)

    key_df = key_df[key_df['segment_id'] <= max_seg]
    key_df = key_df[key_df['segment_id'] >= min_seg]

    ###Merge Geolocation/ATL09
    # df_atl09 = df_seg.merge(key_df, on="segment_id", how="left")

    ###Merge ATL08 Keys to Truth
    if truth_flag == True:
        df_truth = df_truth.merge(key_df, on="segment_id", how="left")

    ###Merge ATL08 Keys to ATL03
    df_atl03 = atl03.df.merge(key_df, on="segment_id", how="left")

    # Calculate Meterics

    # Assign Geolocation/ATL09

    # zgroup = df_atl09.groupby('segment_id_beg')
    # zout = zgroup.aggregate(pd.Series.mode)
    # zout = zgroup.aggregate(np.median)
    # zout['segment_id_beg'] = zout.index
    # zout = zout.reset_index(drop = True)
    # zout['segment_id_beg'] = zout['seg_id']
    # df_out = df_out.merge(zout, on="segment_id_beg",how='left')
    # return df_out
    # zout = zout.reset_index().T.drop_duplicates().T
    # atl09_list = ['aclr_true','apparent_surf_reflec','backg_c',
    #               'backg_theoret','beam_azimuth','beam_elevation','segment_id_beg']
    # zout = zout.filter(['segment_id_beg'])

    # zout2 = zout2.filter([outfield,'segment_id_beg'])
    # df_test3 = atl08.df.merge(zout, on="segment_id_beg",how='left')

    # Assign Tif
    ##Cornie
    cornie = '/LIDAR/server/USERS/eric/1_experiment/global_products/' +\
        'Corine_LandCover_europe/cornie_landcover_finland_UTM.tif'
    data, epsg, ulx, uly, resx, resy = read_geotiff(cornie)

    x = np.array(atl08.df.easting)
    y = np.array(atl08.df.northing)
    result = find_intersecting_values(x, y, data, ulx, uly, resx, resy)

    atl08.df['Corine_LC'] = result

    ##Forest Canopy Height
    simard = '/LIDAR/server/USERS/eric/1_experiment/global_products/' +\
        'Corine_LandCover_europe/Simard_Forest_Height_Finland_UTM_auto.tif'
    data, epsg, ulx, uly, resx, resy = read_geotiff(simard)

    x = np.array(atl08.df.easting)
    y = np.array(atl08.df.northing)
    result = find_intersecting_values(x, y, data, ulx, uly, resx, resy)

    atl08.df['Simard_Forest_Height'] = result

    ##Truth Meterics
    ###Truth Ground Median
    print('Apply meterics')
    if truth_flag == True:
        atl08.df = calculate_seg_meteric(df_truth, atl08.df, [2], np.median,
                                         'z', 'truth_ground_median')

        ###Truth Canopy Max 98
        atl08.df = calculate_seg_meteric(df_truth, atl08.df, [4], get_max98,
                                         'z', 'truth_canopy_max98')

        ##Measured Meterics
        ###ATL03 Ground Median
        atl08.df = calculate_seg_meteric(df_atl03,
                                         atl08.df, [1],
                                         np.median,
                                         'h_ph',
                                         'atl03_ground_median',
                                         classfield='generic_class')

        ###ATL03 Canopy Max 98
        atl08.df = calculate_seg_meteric(df_atl03,
                                         atl08.df, [2],
                                         get_max98,
                                         'h_ph',
                                         'atl03_canopy_max98',
                                         classfield='generic_class')
    else:
        atl08.df['truth_ground_median'] = np.nan
        atl08.df['truth_canopy_max98'] = np.nan
        atl08.df = calculate_seg_meteric(df_atl03,
                                         atl08.df, [1],
                                         np.median,
                                         'h_ph',
                                         'atl03_ground_median',
                                         classfield='classification')

        ###ATL03 Canopy Max 98
        atl08.df = calculate_seg_meteric(df_atl03,
                                         atl08.df, [2, 3],
                                         get_max98,
                                         'h_ph',
                                         'atl03_canopy_max98',
                                         classfield='classification')

    ##Perfect Metericsdemocratic primary count
    ###ATL03 Ground Median
    if truth_flag == True:
        atl08.df = calculate_seg_meteric(df_atl03,
                                         atl08.df, [1],
                                         np.median,
                                         'h_ph',
                                         'pc_ground_median',
                                         classfield='perfect_class')

        ###ATL03 Canopy Max 98
        atl08.df = calculate_seg_meteric(df_atl03,
                                         atl08.df, [2],
                                         get_max98,
                                         'h_ph',
                                         'pc_canopy_max98',
                                         classfield='perfect_class')

        ##Truth Size
        atl08.df = calculate_seg_meteric(df_truth, atl08.df, [2], np.size, 'z',
                                         'truth_n_ground')

        atl08.df = calculate_seg_meteric(df_truth, atl08.df, [4], np.size, 'z',
                                         'truth_n_canopy')

        atl08.df = calculate_seg_meteric(df_truth, atl08.df, [0], np.size, 'z',
                                         'truth_n_unclassed')
    else:
        atl08.df['pc_ground_median'] = np.nan
        atl08.df['pc_canopy_max98'] = np.nan
        atl08.df['truth_n_ground'] = np.nan
        atl08.df['truth_n_unclassed'] = np.nan

    ##ATL03 Size
    atl08.df = calculate_seg_meteric(df_atl03, atl08.df, [-1], np.size, 'h_ph',
                                     'atl03_n_unclassified')

    atl08.df = calculate_seg_meteric(df_atl03, atl08.df, [0], np.size, 'h_ph',
                                     'atl03_n_draggan')

    atl08.df = calculate_seg_meteric(df_atl03, atl08.df, [1], np.size, 'h_ph',
                                     'atl03_n_ground')

    atl08.df = calculate_seg_meteric(df_atl03, atl08.df, [2], np.size, 'h_ph',
                                     'atl03_n_canopy')

    atl08.df = calculate_seg_meteric(df_atl03, atl08.df, [3], np.size, 'h_ph',
                                     'atl03_n_high_canopy')

    ##GC Size
    if truth_flag == True:
        atl08.df = calculate_seg_meteric(df_atl03,
                                         atl08.df, [0],
                                         np.size,
                                         'h_ph',
                                         'gc_n_unclassified',
                                         classfield='generic_class')

        atl08.df = calculate_seg_meteric(df_atl03,
                                         atl08.df, [1],
                                         np.size,
                                         'h_ph',
                                         'gc_n_ground',
                                         classfield='generic_class')

        atl08.df = calculate_seg_meteric(df_atl03,
                                         atl08.df, [2],
                                         np.size,
                                         'h_ph',
                                         'gc_n_canopy',
                                         classfield='generic_class')
    else:
        atl08.df = calculate_seg_meteric(df_atl03, atl08.df, [-1, 0], np.size,
                                         'h_ph', 'gc_n_unclassified')

        atl08.df = calculate_seg_meteric(df_atl03, atl08.df, [1], np.size,
                                         'h_ph', 'gc_n_ground')

        atl08.df = calculate_seg_meteric(df_atl03, atl08.df, [2, 3], np.size,
                                         'h_ph', 'gc_n_canopy')

    ##PC Size
    if truth_flag == True:
        atl08.df = calculate_seg_meteric(df_atl03,
                                         atl08.df, [0],
                                         np.size,
                                         'h_ph',
                                         'pc_n_unclassified',
                                         classfield='perfect_class')

        atl08.df = calculate_seg_meteric(df_atl03,
                                         atl08.df, [1],
                                         np.size,
                                         'h_ph',
                                         'pc_n_ground',
                                         classfield='perfect_class')

        atl08.df = calculate_seg_meteric(df_atl03,
                                         atl08.df, [2],
                                         np.size,
                                         'h_ph',
                                         'pc_n_canopy',
                                         classfield='perfect_class')
    else:
        atl08.df['pc_n_unclassified'] = np.nan
        atl08.df['pc_n_ground'] = np.nan
        atl08.df['pc_n_canopy'] = np.nan

    ##GC Unique Time
    atl08.df = calculate_seg_meteric(df_atl03,
                                     atl08.df, [0],
                                     get_len_unique,
                                     'time',
                                     'gc_nshots_unclassed',
                                     classfield='generic_class')

    atl08.df = calculate_seg_meteric(df_atl03,
                                     atl08.df, [1],
                                     get_len_unique,
                                     'time',
                                     'gc_nshots_ground',
                                     classfield='generic_class')

    atl08.df = calculate_seg_meteric(df_atl03,
                                     atl08.df, [2],
                                     get_len_unique,
                                     'time',
                                     'gc_nshots_canopy',
                                     classfield='generic_class')

    if truth_flag == True:
        atl08.df['alongtrackoffset'] = float(atlCorrections.alongTrack)
        atl08.df['crosstrackoffset'] = float(atlCorrections.crossTrack)
        atl08.df['zoffset'] = float(atlCorrections.z)
    else:
        atl08.df['alongtrackoffset'] = np.nan
        atl08.df['crosstrackoffset'] = np.nan
        atl08.df['zoffset'] = np.nan

    # ATL03
    outfilename = outfolder + '/mat/atl03/' + atl03.atlFileName + '_' + gt + "_" +\
        str(min_time) + '_' + str(max_time) + '.mat'
    convert_df_to_mat(atl03.df, outfilename)

    outfilename = outfolder + '/pkl/atl03/' + atl03.atlFileName + '_' + gt + "_" +\
        str(min_time) + '_' + str(max_time) + '.pkl'
    atl03.df.to_pickle(outfilename)

    # ATL08
    outfilename = outfolder + '/mat/atl08/' + atl08.atlFileName + '_' + gt + "_" +\
        str(min_time) + '_' + str(max_time) + '.mat'
    convert_df_to_mat(atl08.df, outfilename)

    outfilename = outfolder + '/csv/atl08/' + atl08.atlFileName + '_' + gt + "_" +\
        str(min_time) + '_' + str(max_time) + '.csv'
    atl08.df.to_csv(outfilename)

    outfilename = outfolder + '/pkl/atl08/' + atl08.atlFileName + '_' + gt + "_" +\
        str(min_time) + '_' + str(max_time) + '.pkl'
    atl08.df.to_pickle(outfilename)

    # Truth
    if truth_flag == True:
        truth_file = atl03.atlFileName.split('ATL03_')[1]
        outfilename = outfolder + '/mat/truth/' + "truth_" + truth_file +\
            '_' + gt + "_" + str(min_time) + '_' + str(max_time) + '.mat'
        convert_df_to_mat(df_truth, outfilename)

        outfilename = outfolder + '/pkl/truth/' + "truth_" + truth_file +\
            '_' + gt + "_" + str(min_time) + '_' + str(max_time) + '.pkl'
        df_truth.to_pickle(outfilename)

        outfilename = atl03.atlFileName + '_' + gt + "_" + str(min_time) +\
            '_' + str(max_time)
        generate_atl03_truth_plot(atl03, outfolder, outfilename, df_truth)
        outfilename = atl08.atlFileName + '_' + gt + "_" + str(min_time) +\
            '_' + str(max_time)
        generate_atl08_truth_plot(atl03, atl08, outfolder, outfilename,
                                  df_truth)
    else:
        outfilename = atl03.atlFileName + '_' + gt + "_" + str(min_time) +\
            '_' + str(max_time)
        generate_atl03_plot(atl03, outfolder, outfilename)

        outfilename = atl08.atlFileName + '_' + gt + "_" + str(min_time) +\
            '_' + str(max_time)
        generate_atl08_plot(atl03, atl08, outfolder, outfilename)
Example #7
0
    from icesatReader import get_atl03_struct
    from icesatReader import get_atl08_struct

    from shutil import copyfile
    import h5py
    copyfile(atl08filepath, newatl08file)

    # gt1l/land_segments/30m_segment/
    h5f = h5py.File(output_folder + atl08file, 'a')

    gt_list = ['gt1r', 'gt1l', 'gt2r', 'gt2l', 'gt3r', 'gt3l']
    for gt in gt_list:
        base_key = gt + '/land_segments/30m_segment/'
        print('ATL03 Heights')
        atl03 = get_atl03_struct(atl03filepath, gt, atl08filepath)

        print('ATL08 Land Segments')
        atl08 = get_atl08_struct(atl08filepath, gt, atl03)

        print('Match ATL08 to ATL03 by segment')
        upsampled_atl08_bin = create_atl08_bin(atl03, atl08, res_at=30)

        all_cols = upsampled_atl08_bin.columns

        for i in range(0, len(all_cols)):
            a = np.array(upsampled_atl08_bin[all_cols[i]])
            h5f[base_key + all_cols[i]] = a

    h5f.close()