def extract_TRMM_HDF(hdf_list, layer_indexs, outdir, resolution): """ Extracts TRMM products from HDF to tif. http://pmm.nasa.gov/data-access/data-products :param hdf_list: list of hdf files or directory with hdfs :param layer_indexs: list of integer layer indexs :param outdir: directory to place outputs :param resolution: The size of a pixel in degrees, either "0.25", "0.5", "1.0", "5.0" depending on the specific TRMM product you are extracting. :return: a list of all files created as output """ hdf_list = core.enf_filelist(hdf_list) output_filelist = [] # load the GPM datatype from the library datatype = datatype_library()["TRMM_{0}_GLOBAL".format(resolution)] # for every hdf file in the input list for hdf in hdf_list: # extract layers and add the new filepaths to the output filelist hdf_output_filelist = _extract_HDF_datatype(hdf, layer_indexs, outdir, datatype) output_filelist += hdf_output_filelist return output_filelist
def extract_GPM_IMERG(hdf_list, layer_indexs, outdir = None, resolution = "0.1"): """ Extracts GPM_IMERG data from its HDF5 format. :param hdf_list: list of hdf files or directory with hdfs :param layer_indexs: list of integer layer indexs :param outdir: directory to place outputs :param resolution: The size of a pixel in degrees, either "0.1" or "0.15" depending on GPM product. :return: a list of all files created as output Typical contents of a GPM HDF are: == =========== ================================ ============== ID layer shape Layer name data type == =========== ================================ ============== 0 [3600x1800] HQobservationTime (16-bit int) 1 [3600x1800] HQprecipSource (16-bit int) 2 [3600x1800] HQprecipitation (32-bit float) 3 [3600x1800] IRkalmanFilterWeight (16-bit int) 4 [3600x1800] IRprecipitation (32-bit float) 5 [3600x1800] precipitationCal (32-bit float) 6 [3600x1800] precipitationUncal (32-bit float) 7 [3600x1800] probabilityLiquidPrecipitation (16-bit int) 8 [3600x1800] randomError (32-bit float) == =========== ================================ ============== """ hdf_list = core.enf_filelist(hdf_list) output_filelist = [] # load the GPM datatype from the library datatype = datatype_library()["GPM_IMERG_{0}_GLOBAL".format(resolution)] # for every hdf file in the input list for hdf in hdf_list: # extract layers and add the new filepaths to the output filelist hdf_output_filelist = _extract_HDF_datatype(hdf, layer_indexs, outdir, datatype, nodata_value = -9999.9) output_filelist += hdf_output_filelist return output_filelist
def extract_MPE_NetCDF(netcdf_list, layer_indexs, outdir, area): """ extracts SMOS data from its native NetCDF format. :param netcdf_list: list of hdf files or directory with netcdfs :param layer_indexs: list of integer layer indices :param outdir: directory to place outputs :param area: presently only supports "CONUS" :return: A list of all files created as output """ netcdf_list = core.enf_filelist(netcdf_list) output_filelist = [] # load the GPM datatype from the library dtype = datatype_library()["MPE_HRAP_{0}".format(area)] # for every hdf file in the input list for netcdf in netcdf_list: data = _extract_NetCDF_layer_data(netcdf, layer_indexs) for layer_index in layer_indexs: dataset = data[layer_index] outpath = core.create_outname(outdir, netcdf, str(layer_index), "tif") print("creating dataset at {0}".format(outpath)) _gdal_dataset_to_tif(dataset, outpath, cust_projection=dtype.projectionTXT, cust_geotransform=dtype.geotransform, force_custom=False, nodata_value=-1) output_filelist.append(outpath) return output_filelist
def extract_MPE_NetCDF(netcdf_list, layer_indexs, outdir, area): """ extracts SMOS data from its native NetCDF format. :param netcdf_list: list of hdf files or directory with netcdfs :param layer_indexs: list of integer layer indices :param outdir: directory to place outputs :param area: presently only supports "CONUS" :return: A list of all files created as output """ netcdf_list = core.enf_filelist(netcdf_list) output_filelist = [] # load the GPM datatype from the library dtype = datatype_library()["MPE_HRAP_{0}".format(area)] # for every hdf file in the input list for netcdf in netcdf_list: data = _extract_NetCDF_layer_data(netcdf, layer_indexs) for layer_index in layer_indexs: dataset = data[layer_index] outpath = core.create_outname(outdir, netcdf, str(layer_index), "tif") print("creating dataset at {0}".format(outpath)) _gdal_dataset_to_tif(dataset, outpath, cust_projection = dtype.projectionTXT, cust_geotransform = dtype.geotransform, force_custom = False, nodata_value = -1) output_filelist.append(outpath) return output_filelist