예제 #1
0
def generate_xbt_nc(gatts, data, annex, output_folder):
    """create an xbt profile"""
    netcdf_filepath = os.path.join(
        output_folder, "%s.nc" % create_filename_output(gatts, data))
    LOGGER.info('Creating output %s' % netcdf_filepath)

    output_netcdf_obj = Dataset(netcdf_filepath, "w", format="NETCDF4")
    # set global attributes
    for gatt_name in gatts.keys():
        setattr(output_netcdf_obj, gatt_name, gatts[gatt_name])

    history_att = create_nc_history_list(annex)
    if history_att != '':
        setattr(output_netcdf_obj, 'history', history_att)

    # this will overwrite the value found in the original NetCDF file
    ships = SHIP_CALL_SIGN_LIST
    if gatts['Platform_code'] in ships:
        output_netcdf_obj.ship_name = ships[gatts['Platform_code']]
        output_netcdf_obj.Callsign = gatts['Platform_code']
    elif difflib.get_close_matches(
            gatts['Platform_code'], ships, n=1, cutoff=0.8) != []:
        output_netcdf_obj.Callsign = difflib.get_close_matches(
            gatts['Platform_code'], ships, n=1, cutoff=0.8)[0]
        output_netcdf_obj.Platform_code = output_netcdf_obj.Callsign
        output_netcdf_obj.ship_name = ships[output_netcdf_obj.Callsign]
        LOGGER.warning(
            'Vessel call sign %s seems to be wrong. Using his closest match to the AODN vocabulary: %s'
            % (gatts['Platform_code'], output_netcdf_obj.Callsign))
    else:
        LOGGER.warning(
            'Vessel call sign %s is unknown in AODN vocabulary, Please contact [email protected]'
            % gatts['Platform_code'])

    output_netcdf_obj.date_created = datetime.now().strftime(
        "%Y-%m-%dT%H:%M:%SZ")
    output_netcdf_obj.geospatial_vertical_min = min(data['DEPTH'])
    output_netcdf_obj.geospatial_vertical_max = max(data['DEPTH'])
    output_netcdf_obj.geospatial_lat_min = data['LATITUDE']
    output_netcdf_obj.geospatial_lat_max = data['LATITUDE']
    output_netcdf_obj.geospatial_lon_min = data['LONGITUDE']
    output_netcdf_obj.geospatial_lon_max = data['LONGITUDE']
    output_netcdf_obj.time_coverage_start = data['TIME'].strftime(
        '%Y-%m-%dT%H:%M:%SZ')
    output_netcdf_obj.time_coverage_end = data['TIME'].strftime(
        '%Y-%m-%dT%H:%M:%SZ')

    output_netcdf_obj.createDimension('DEPTH', len(data['DEPTH']))
    output_netcdf_obj.createVariable("DEPTH", "f", "DEPTH")
    output_netcdf_obj.createVariable('DEPTH_quality_control', "b", "DEPTH")

    var_time = output_netcdf_obj.createVariable(
        "TIME", "d", fill_value=get_imos_parameter_info('TIME', '_FillValue'))
    output_netcdf_obj.createVariable("TIME_quality_control",
                                     "b",
                                     fill_value=99)

    output_netcdf_obj.createVariable("LATITUDE",
                                     "f",
                                     fill_value=get_imos_parameter_info(
                                         'LATITUDE', '_FillValue'))
    output_netcdf_obj.createVariable("LATITUDE_quality_control",
                                     "b",
                                     fill_value=99)

    output_netcdf_obj.createVariable("LONGITUDE",
                                     "f",
                                     fill_value=get_imos_parameter_info(
                                         'LONGITUDE', '_FillValue'))
    output_netcdf_obj.createVariable("LONGITUDE_quality_control",
                                     "b",
                                     fill_value=99)

    output_netcdf_obj.createVariable("TEMP",
                                     "f", ["DEPTH"],
                                     fill_value=get_imos_parameter_info(
                                         'TEMP', '_FillValue'))
    output_netcdf_obj.createVariable(
        "TEMP_quality_control",
        "b", ["DEPTH"],
        fill_value=data['TEMP_quality_control'].fill_value)

    conf_file_generic = os.path.join(os.path.dirname(__file__),
                                     'generate_nc_file_att')
    generate_netcdf_att(output_netcdf_obj,
                        conf_file_generic,
                        conf_file_point_of_truth=True)

    for var in data.keys():
        if var == 'TIME':
            time_val_dateobj = date2num(data['TIME'],
                                        output_netcdf_obj['TIME'].units,
                                        output_netcdf_obj['TIME'].calendar)
            var_time[:] = time_val_dateobj
        else:
            output_netcdf_obj[var][:] = data[var]

    # default value for abstract
    if not hasattr(output_netcdf_obj, 'abstract'):
        setattr(output_netcdf_obj, 'abstract', output_netcdf_obj.title)

    output_netcdf_obj.close()
    return netcdf_filepath
def process_flight_plan(prd, USES_CUR, USES_FORE, fore_start, file):

    logging.basicConfig(filename=prd['log path'],
                        filemode='a',
                        level=logging.INFO)
    # Load Flight Data and EchoTop Coordinates
    flight_tr = np.loadtxt(file, delimiter=',')
    flt_time = flight_tr[:, 0]
    flt_lat = flight_tr[:, 1]
    flt_lon = flight_tr[:, 2]
    flt_alt = flight_tr[:, 3]

    relevant_data = np.zeros(
        (len(gb.LOOKAHEAD_SECONDS), len(prd['products']), prd['cube height'],
         prd['lats'].shape[0], prd['lats'].shape[1]),
        dtype=float)
    idx_active_cur_file, idx_forecast_times = None, [-1] * (
        len(gb.LOOKAHEAD_SECONDS) - fore_start)

    flt_startdate = num2date(flt_time[0],
                             units='seconds since 1970-01-01T00:00:00',
                             calendar='gregorian')
    flt_enddate = num2date(flt_time[-1],
                           units='seconds since 1970-01-01T00:00:00',
                           calendar='gregorian')
    # Generate list of EchoTop Report Times
    cur_timestamps, fore_timestamps, idx_fore_day_split, idx_cur_day_split, PATH_DATA_CUR_DATE, PATH_DATA_FORE_DATE = get_relevant_timestamps(
        flt_startdate, flt_enddate, flt_time, file, prd['products'],
        prd['sorted path'], USES_FORE, USES_CUR)

    aligned_cur_start = flt_time[0] - (flt_time[0] % prd['refresh rate'])
    aligned_cur_end = flt_time[-1] - (flt_time[-1] % prd['refresh rate'])
    expected_cur_timestamps = np.arange(aligned_cur_start, aligned_cur_end,
                                        prd['refresh rate'])
    diff = set(expected_cur_timestamps) - set(cur_timestamps)
    if len(diff) > 0:
        logging.error(
            "EchoTop Current Data Missing {} Entries During Flight {} ({} - {})"
            .format(len(diff), file, flt_startdate.isoformat(),
                    flt_enddate.isoformat()))
    '''
    # Create Basemap, plot on Latitude/Longitude scale
    m = Basemap(width=12000000, height=9000000, rsphere=gb.R_EARTH,
                resolution='l', area_thresh=1000., projection='lcc',
                lat_0=gb.LAT_ORIGIN, lon_0=gb.LON_ORIGIN)
    m.drawcoastlines()
    Parallels = np.arange(0., 80., 10.)
    Meridians = np.arange(10., 351., 20.)

    # Labels = [left,right,top,bottom]
    m.drawparallels(Parallels, labels=[False, True, True, False])
    m.drawmeridians(Meridians, labels=[True, False, False, True])
    fig2 = plt.gca()
    '''

    # Closest-Approximation - From Weather Data
    weather_cubes_time = np.zeros((len(flt_time)), dtype=float)
    weather_cubes_lat = np.zeros((len(flt_time), gb.CUBE_SIZE, gb.CUBE_SIZE))
    weather_cubes_lon = np.zeros((len(flt_time), gb.CUBE_SIZE, gb.CUBE_SIZE))
    weather_cubes_alt = np.zeros((len(flt_time), prd['cube height']))
    weather_cubes_data = np.zeros(
        (len(flt_time), len(gb.LOOKAHEAD_SECONDS), len(prd['products']),
         prd['cube height'], gb.CUBE_SIZE, gb.CUBE_SIZE))

    print('Data Collection Begin\t', str(datetime.datetime.now()))
    for i in range(len(flight_tr[:, ])):
        # Open EchoTop File Covering the Current Time
        if USES_CUR:
            idx_cur_file = np.argmin((flt_time[i]) % cur_timestamps)
            if idx_cur_file != idx_active_cur_file:
                idx_active_cur_file = idx_cur_file
                if idx_active_cur_file < idx_cur_day_split: idx_cur_day = 0
                else: idx_cur_day = 1
                PATH_DATA_CUR = PATH_DATA_CUR_DATE[idx_cur_day] + os.listdir(
                    PATH_DATA_CUR_DATE[idx_cur_day])[idx_active_cur_file -
                                                     (idx_cur_day *
                                                      idx_cur_day_split)]
                data_cur_rootgrp = Dataset(PATH_DATA_CUR,
                                           'r',
                                           format='NetCDF4')

                for v in range(len(prd['products'])):
                    data_cur_rootgrp.variables[prd['products']
                                               [v]].set_auto_mask(False)
                    idx_alt = np.abs(prd['alts'] - flt_alt[i]).argmin()
                    if idx_alt == 0: idx_alt = 1
                    relevant_data[0][v] = data_cur_rootgrp[prd['products'][v]][
                        0, idx_alt - 1:idx_alt + 2]
                data_cur_rootgrp.close()
        if USES_FORE:
            idx_fore_data = np.argmin(flt_time[i] % fore_timestamps)
            if idx_fore_data < idx_fore_day_split: idx_fore_day = 0
            else: idx_fore_day = 1
            PATH_DATA_FORE = PATH_DATA_FORE_DATE[idx_fore_day] + os.listdir(
                PATH_DATA_FORE_DATE[idx_fore_day])[idx_fore_data -
                                                   (idx_fore_day *
                                                    idx_fore_day_split)]
            data_fore_rootgrp = Dataset(PATH_DATA_FORE, 'r', format='NETCDF4')
            data_fore_timestamps = data_fore_rootgrp['time'][:]
            for v in prd['products']:
                data_fore_rootgrp.variables[v].set_auto_mask(False)
            for t in range(fore_start, len(gb.LOOKAHEAD_SECONDS)):
                idx_time = np.argmin(data_fore_timestamps %
                                     (flt_time[i] + gb.LOOKAHEAD_SECONDS[t]))
                if idx_time != idx_forecast_times[t - fore_start]:
                    idx_forecast_times[t - fore_start] = idx_time
                    for v in range(len(prd['products'])):
                        data_fore_rootgrp.variables[prd['products']
                                                    [v]].set_auto_mask(False)
                        idx_alt = np.abs(prd['alts'] - flt_alt[i]).argmin()
                        if idx_alt == 0: idx_alt = 1
                        relevant_data[t][v] = data_cur_rootgrp[
                            prd['products'][v]][0, idx_alt - 1:idx_alt + 2]
            data_fore_rootgrp.close()

        # Heading Projection & Ortho for point
        if i == len(flt_time[:]) - 1:
            heading = gb.heading_a_to_b(flt_lon[i - 1], flt_lat[i - 1],
                                        flt_lat[i], flt_lon[i])
        else:
            heading = gb.heading_a_to_b(flt_lon[i], flt_lat[i], flt_lat[i + 1],
                                        flt_lon[i + 1])
        unitstep_x, unitstep_y, unitstep_ortho_x, unitstep_ortho_y = get_axes(
            prd['lats'], prd['lons'], flt_lat[i], flt_lon[i], heading,
            prd['spatial res'])

        # Generate 20-point axis orthogonal to heading
        centerline_ortho_x, actual_ortho_delta_x = np.linspace(
            -(gb.CUBE_SIZE / 2) * unitstep_ortho_x,
            (gb.CUBE_SIZE / 2) * unitstep_ortho_x,
            num=gb.CUBE_SIZE,
            retstep=True)
        centerline_ortho_y, actual_ortho_delta_y = np.linspace(
            -(gb.CUBE_SIZE / 2) * unitstep_ortho_y,
            (gb.CUBE_SIZE / 2) * unitstep_ortho_y,
            num=gb.CUBE_SIZE,
            retstep=True)
        # Generate 20-point axis along heading
        centerline_x, actual_delta_x = np.linspace(
            -(gb.CUBE_SIZE / 2) * unitstep_x, (gb.CUBE_SIZE / 2) * unitstep_x,
            num=gb.CUBE_SIZE,
            retstep=True)
        centerline_y, actual_delta_y = np.linspace(
            -(gb.CUBE_SIZE / 2) * unitstep_y, (gb.CUBE_SIZE / 2) * unitstep_y,
            num=gb.CUBE_SIZE,
            retstep=True)

        # Collect and Append Single Cube
        weather_cube_proj = np.zeros((2, gb.CUBE_SIZE, gb.CUBE_SIZE),
                                     dtype=float)
        weather_cube_actual = np.zeros((2, gb.CUBE_SIZE, gb.CUBE_SIZE),
                                       dtype=float)
        weather_cube_alt = np.zeros((prd['cube height']), dtype=float)
        # Cube Dims (lookahead x products x height x lat x lon) (t,v,z,lat,lon)
        weather_cube_data = np.zeros(
            (len(gb.LOOKAHEAD_SECONDS), len(prd['products']),
             prd['cube height'], gb.CUBE_SIZE, gb.CUBE_SIZE),
            dtype=float)

        # Vectorized Cube Data Extraction
        weather_cube_proj[0] = flt_lon[i] + np.tile(
            centerline_x, (gb.CUBE_SIZE, 1)) + np.tile(centerline_ortho_x,
                                                       (gb.CUBE_SIZE, 1)).T
        weather_cube_proj[1] = flt_lat[i] + np.tile(
            centerline_y, (gb.CUBE_SIZE, 1)) + np.tile(centerline_ortho_y,
                                                       (gb.CUBE_SIZE, 1)).T
        '''
        m.scatter(prd['lons'],prd['lats'],latlon=True)
        m.scatter(weather_cube_proj[0],weather_cube_proj[1],latlon=True)
        '''

        weather_cube_alt = prd['alts'][idx_alt - 1:idx_alt + 2]

        weather_cube_actual, weather_cube_data = fill_cube_utm(
            weather_cube_proj, relevant_data, prd['UTM'],
            prd['UTM-latlon idxs'], prd['lats'], prd['lons'],
            len(gb.LOOKAHEAD_SECONDS), len(prd['products']),
            prd['cube height'])

        # Print the max Error between cube points
        if i % 30 == 0:
            err = np.abs(weather_cube_actual - weather_cube_proj)
            err_dist = np.sqrt(np.square(err[0]) + np.square(err[1]))
            maxerr = err_dist.flatten()[err_dist.argmax()]
            print("{}\tMax Distance Err:\t".format(datetime.datetime.now()),
                  "{:10.4f}\t".format(maxerr), "\t", str(i + 1), ' / ',
                  len(flight_tr[:, 1] - 1), '\t',
                  file.split('/')[-1])

        # Append current cube to list of data
        weather_cubes_lat[i] = weather_cube_actual[1]
        weather_cubes_lon[i] = weather_cube_actual[0]
        weather_cubes_alt[i] = weather_cube_alt
        weather_cubes_data[i] = weather_cube_data
        weather_cubes_time[i] = flt_time[i]
    '''
    # Verification: Plot collected cubes v. actual flight points
    m.scatter(weather_cubes_lon, weather_cubes_lat, marker=',', color='blue', latlon=True)
    m.scatter(flight_tr[:, 2], flight_tr[:, 1], marker=',', color='red', latlon=True)
    plt.show(block=False)
    PATH_FIGURE_PROJECTION = gb.PATH_PROJECT + '/Output/Weather Cubes/Plots/' \
                             + flt_startdate.isoformat().replace(':', '_') + '.' + gb.FIGURE_FORMAT
    plt.savefig(PATH_FIGURE_PROJECTION, format=gb.FIGURE_FORMAT)
    plt.close()
    '''

    # write to NetCDF
    file_local = file.split('/')[-1]
    PATH_NC_FILENAME = prd['output path'] + flt_startdate.isoformat(
    )[:10] + '/' + file_local.split('.')[0] + '.nc'
    print('WRITING TO:\t', PATH_NC_FILENAME)
    if not os.listdir(prd['output path']).__contains__(
            flt_startdate.isoformat()[:10]):
        os.mkdir(prd['output path'] + flt_startdate.isoformat()[:10])
    cubes_rootgrp = Dataset(PATH_NC_FILENAME, 'w', type='NetCDF4')

    # Add Dimensions
    cubes_rootgrp.createDimension('time', size=None)
    cubes_rootgrp.createDimension('lookahead', size=len(gb.LOOKAHEAD_SECONDS))
    cubes_rootgrp.createDimension('XPoints', size=gb.CUBE_SIZE)
    cubes_rootgrp.createDimension('YPoints', size=gb.CUBE_SIZE)
    cubes_rootgrp.createDimension('ZPoints', size=prd['cube height'])

    # Add Variables
    cubes_rootgrp.createVariable('time', datatype=float, dimensions=('time'))
    cubes_rootgrp.variables['time'].units = 'Seconds since 1970-01-01T00:00:00'
    cubes_rootgrp.variables['time'].calendar = 'gregorian'
    cubes_rootgrp.createVariable('lookahead',
                                 datatype=float,
                                 dimensions=('lookahead'))
    cubes_rootgrp.variables[
        'lookahead'].units = 'Seconds ahead of current time'
    cubes_rootgrp.createVariable('XPoints',
                                 datatype=float,
                                 dimensions=('XPoints'))
    cubes_rootgrp.variables['XPoints'].units = 'indexing for each weather cube'
    cubes_rootgrp.createVariable('YPoints',
                                 datatype=float,
                                 dimensions=('YPoints'))
    cubes_rootgrp.variables['YPoints'].units = 'indexing for each weather cube'
    cubes_rootgrp.createVariable('latitude',
                                 datatype=float,
                                 dimensions=('time', 'XPoints', 'YPoints'))
    cubes_rootgrp.createVariable('longitude',
                                 datatype=float,
                                 dimensions=('time', 'XPoints', 'YPoints'))
    cubes_rootgrp.createVariable('altitudes',
                                 datatype=float,
                                 dimensions=('time', 'ZPoints'))
    for prod in prd['products']:
        cubes_rootgrp.createVariable(prod,
                                     datatype=float,
                                     dimensions=('time', 'lookahead',
                                                 'ZPoints', 'XPoints',
                                                 'YPoints'))

    # Add Metadata: Flight Callsign, Earth-radius,
    cubes_rootgrp.Callsign = file.split('_')[-1].split('.')[0]
    cubes_rootgrp.rEarth = gb.R_EARTH

    # Assign Weather Cube Data to netCDF Variables
    cubes_rootgrp.variables['XPoints'][:] = np.arange(0, gb.CUBE_SIZE, 1)
    cubes_rootgrp.variables['YPoints'][:] = np.arange(0, gb.CUBE_SIZE, 1)
    cubes_rootgrp.variables['time'][:] = weather_cubes_time
    cubes_rootgrp.variables['latitude'][:] = weather_cubes_lat
    cubes_rootgrp.variables['longitude'][:] = weather_cubes_lon
    for p in range(len(prd['products'])):
        cubes_rootgrp.variables[prd['products']
                                [p]][:] = weather_cubes_data[:, :, p, :, :, :]

    cubes_rootgrp.close()
    return 0
def downsample_file(decimation_factor: int, abspath: str):
    newfile = None

    # Save interpolated files to sorted location
    newdir = '\\'.join(abspath.split('\\')[:-3]) + '\\Interpolated'
    if not os.path.isdir(newdir):
        os.mkdir(newdir)
    newdate = abspath.split('\\')[-2]
    newdir = newdir + '\\' + newdate
    if not os.path.isdir(newdir):
        os.mkdir(newdir)

    abspath_newfile = os.path.join(newdir, os.path.split(abspath)[1])

    if abspath.__contains__('.nc'):
        grp = Dataset(abspath, 'r')
        decimation_idx = range(0, len(grp['Echo_Top']), decimation_factor)

        newfile = Dataset(abspath_newfile, 'w', type='NetCDF4')

        # Add Dimensions: t, X/YPoints
        newfile.createDimension('time', size=None)
        newfile.createDimension('XPoints', size=gb.CUBE_SIZE)
        newfile.createDimension('YPoints', size=gb.CUBE_SIZE)

        # Add Variables: t, X/YPoints, lat/lon, echotop
        newfile.createVariable('time', datatype=float, dimensions=('time'))
        newfile.variables['time'].units = 'Seconds since 1970-01-01T00:00:00'
        newfile.variables['time'].calendar = 'gregorian'
        newfile.createVariable('XPoints',
                               datatype=float,
                               dimensions=('XPoints'))
        newfile.variables['XPoints'].units = 'indexing for each weather cube'
        newfile.createVariable('YPoints',
                               datatype=float,
                               dimensions=('YPoints'))
        newfile.variables['YPoints'].units = 'indexing for each weather cube'
        newfile.createVariable('Latitude',
                               datatype=float,
                               dimensions=('time', 'XPoints', 'YPoints'))
        newfile.createVariable('Longitude',
                               datatype=float,
                               dimensions=('time', 'XPoints', 'YPoints'))
        newfile.createVariable('Echo_Top',
                               datatype=float,
                               dimensions=('time', 'XPoints', 'YPoints'))

        # Add Metadata: Flight Callsign, Earth-radius,
        newfile.Callsign = os.path.split(abspath_newfile)[1].split('_')[4][:-3]
        newfile.rEarth = gb.R_EARTH
        if len(decimation_idx) > 0:
            # Assign Weather Cube Data to netCDF Variables
            newfile.variables['XPoints'][:] = np.arange(0, gb.CUBE_SIZE, 1)
            newfile.variables['YPoints'][:] = np.arange(0, gb.CUBE_SIZE, 1)
            newfile.variables['time'][:] = grp['time'][decimation_idx]
            newfile.variables['Latitude'][:] = grp['Latitude'][decimation_idx]
            newfile.variables['Longitude'][:] = grp['Longitude'][
                decimation_idx]
            newfile.variables['Echo_Top'][:] = grp['Echo_Top'][decimation_idx]
        newfile.close()

    else:
        nda_tmp = np.genfromtxt(abspath, delimiter=',')
        if isinstance(nda_tmp, np.ndarray):
            newfile = nda_tmp[range(0, len(nda_tmp), decimation_factor)]
            np.savetxt(abspath_newfile, newfile, fmt='%s', delimiter=',')
        else:
            print('{} invalid file, no entries'.format(abspath))