def _set_time_coverage_metadata(self, gdal_metadata): ### GET START TIME from METADATA time_coverage_start = None if 'time_coverage_start' in gdal_metadata: time_coverage_start = parse_time( gdal_metadata['time_coverage_start']) ### GET END TIME from METADATA time_coverage_end = None if 'time_coverage_end' in gdal_metadata: time_coverage_end = parse_time(gdal_metadata['time_coverage_end']) # set time_coverage_start if available if time_coverage_start is not None: self.dataset.SetMetadataItem('time_coverage_start', time_coverage_start.isoformat()) # set time_coverage_end if available if time_coverage_end is not None: self.dataset.SetMetadataItem('time_coverage_end', time_coverage_end.isoformat())
def __init__(self, filename, gdalDataset, gdalMetadata, resolution='low', **kwargs): ''' Create LANDSAT VRT from multiple tif files or single tar.gz file''' mtlFileName = '' bandFileNames = [] bandSizes = [] bandDatasets = [] fname = os.path.split(filename)[1] if (filename.endswith('.tar') or filename.endswith('.tar.gz') or filename.endswith('.tgz')): # try to open .tar or .tar.gz or .tgz file with tar try: tarFile = tarfile.open(filename) except: raise WrongMapperError # collect names of bands and corresponding sizes # into bandsInfo dict and bandSizes list tarNames = sorted(tarFile.getnames()) for tarName in tarNames: # check if TIF files inside TAR qualify if (tarName[0] in ['L', 'M'] and os.path.splitext(tarName)[1] in ['.TIF', '.tif']): # open TIF file from TAR using VSI sourceFilename = '/vsitar/%s/%s' % (filename, tarName) gdalDatasetTmp = gdal.Open(sourceFilename) # keep name, GDALDataset and size bandFileNames.append(sourceFilename) bandSizes.append(gdalDatasetTmp.RasterXSize) bandDatasets.append(gdalDatasetTmp) elif (tarName.endswith('MTL.txt') or tarName.endswith('MTL.TXT')): # get mtl file mtlFileName = tarName elif ((fname.startswith('L') or fname.startswith('M')) and (fname.endswith('.tif') or fname.endswith('.TIF') or fname.endswith('._MTL.txt'))): # try to find TIF/tif files with the same name as input file path, coreName = os.path.split(filename) coreName = os.path.splitext(coreName)[0].split('_')[0] coreNameMask = coreName + '*[tT][iI][fF]' tifNames = sorted(glob.glob(os.path.join(path, coreNameMask))) for tifName in tifNames: sourceFilename = tifName gdalDatasetTmp = gdal.Open(sourceFilename) # keep name, GDALDataset and size bandFileNames.append(sourceFilename) bandSizes.append(gdalDatasetTmp.RasterXSize) bandDatasets.append(gdalDatasetTmp) # get mtl file mtlFiles = glob.glob(coreName + '*[mM][tT][lL].[tT][xX][tT]') if len(mtlFiles) > 0: mtlFileName = mtlFiles[0] else: raise WrongMapperError # if not TIF files found - not appropriate mapper if not bandFileNames: raise WrongMapperError # get appropriate band size based on number of unique size and # required resoltuion if resolution == 'low': bandXSise = min(bandSizes) elif resolution in ['high', 'hi']: bandXSise = max(bandSizes) else: raise ValueError('Wrong resolution %s for file %s' % (resolution, filename)) # find bands with appropriate size and put to metaDict metaDict = [] for bandFileName, bandSize, bandDataset in zip(bandFileNames, bandSizes, bandDatasets): if bandSize == bandXSise: # let last part of file name be suffix bandSuffix = os.path.splitext(bandFileName)[0].split('_')[-1] metaDict.append({ 'src': { 'SourceFilename': bandFileName, 'SourceBand': 1, 'ScaleRatio': 0.1 }, 'dst': { 'wkv': 'toa_outgoing_spectral_radiance', 'suffix': bandSuffix } }) gdalDataset4Use = bandDataset # create empty VRT dataset with geolocation only self._init_from_gdal_dataset(gdalDataset4Use) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) if len(mtlFileName) > 0: mtlFileName = os.path.join( os.path.split(bandFileNames[0])[0], mtlFileName) mtlFileLines = [ line.strip() for line in self.read_vsi(mtlFileName).split('\n') ] dateString = [ line.split('=')[1].strip() for line in mtlFileLines if ('DATE_ACQUIRED' in line or 'ACQUISITION_DATE' in line) ][0] timeStr = [ line.split('=')[1].strip() for line in mtlFileLines if ('SCENE_CENTER_TIME' in line or 'SCENE_CENTER_SCAN_TIME' in line) ][0] time_start = parse_time(dateString + 'T' + timeStr).isoformat() time_end = (parse_time(dateString + 'T' + timeStr) + datetime.timedelta(microseconds=60000000)).isoformat() self.dataset.SetMetadataItem('time_coverage_start', time_start) self.dataset.SetMetadataItem('time_coverage_end', time_end) # set platform platform = 'LANDSAT' if fname[2].isdigit(): platform += '-' + fname[2] ee = pti.get_gcmd_platform(platform) self.dataset.SetMetadataItem('platform', json.dumps(ee)) # set instrument instrument = { 'LANDSAT': 'MSS', 'LANDSAT-1': 'MSS', 'LANDSAT-2': 'MSS', 'LANDSAT-3': 'MSS', 'LANDSAT-4': 'TM', 'LANDSAT-5': 'TM', 'LANDSAT-7': 'ETM+', 'LANDSAT-8': 'OLI' }[platform] ee = pti.get_gcmd_instrument(instrument) self.dataset.SetMetadataItem('instrument', json.dumps(ee))
def __init__(self, filename, gdalDataset, gdalMetadata, GCP_STEP=20, MAX_LAT=90, MIN_LAT=50, resolution='low', **kwargs): ''' Create VRT Parameters ---------- GCP_COUNT : int number of GCPs along each dimention ''' ifile = os.path.split(filename)[1] if not ifile.startswith('GW1AM2_') or not ifile.endswith('.h5'): raise WrongMapperError try: ProductName = gdalMetadata['ProductName'] PlatformShortName = gdalMetadata['PlatformShortName'] SensorShortName = gdalMetadata['SensorShortName'] except: raise WrongMapperError if (not ProductName == 'AMSR2-L1R' or not PlatformShortName == 'GCOM-W1' or not SensorShortName == 'AMSR2'): raise WrongMapperError if resolution == 'low': subDatasetWidth = 243 else: subDatasetWidth = 486 # get GCPs from lon/lat grids latGrid = gdal.Open( 'HDF5:"%s"://Latitude_of_Observation_Point_for_89A' % filename).ReadAsArray() lonGrid = gdal.Open( 'HDF5:"%s"://Longitude_of_Observation_Point_for_89A' % filename).ReadAsArray() if subDatasetWidth == 243: latGrid = latGrid[:, ::2] lonGrid = lonGrid[:, ::2] dx = .5 dy = .5 gcps = [] k = 0 maxY = 0 minY = latGrid.shape[0] for i0 in range(0, latGrid.shape[0], GCP_STEP): for i1 in range(0, latGrid.shape[1], GCP_STEP): # create GCP with X,Y,pixel,line from lat/lon matrices lon = float(lonGrid[i0, i1]) lat = float(latGrid[i0, i1]) if (lon >= -180 and lon <= 180 and lat >= MIN_LAT and lat <= MAX_LAT): gcp = gdal.GCP(lon, lat, 0, i1 + dx, i0 + dy) gcps.append(gcp) k += 1 maxY = max(maxY, i0) minY = min(minY, i0) yOff = minY ySize = maxY - minY # remove Y-offset from gcps for gcp in gcps: gcp.GCPLine -= yOff metaDict = [] subDatasets = gdalDataset.GetSubDatasets() metadata = gdalDataset.GetMetadata() for subDataset in subDatasets: # select subdatasets fro that resolution (width) if (subDatasetWidth == int( subDataset[1].split(']')[0].split('x')[-1]) and 'Latitude' not in subDataset[0] and 'Longitude' not in subDataset[0]): name = subDataset[0].split('/')[-1] # find scale scale = 1 for meta in metadata: if name + '_SCALE' in meta: scale = float(metadata[meta]) # create meta entry metaEntry = { 'src': { 'SourceFilename': subDataset[0], 'sourceBand': 1, 'ScaleRatio': scale, 'ScaleOffset': 0, 'yOff': yOff, 'ySize': ySize, }, 'dst': { 'name': name } } metaDict.append(metaEntry) # create VRT from one of the subdatasets gdalSubDataset = gdal.Open(metaEntry['src']['SourceFilename']) self._init_from_dataset_params(subDatasetWidth, ySize, (1, 0, 0, ySize, 0, -1), NSR().wkt) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) self.dataset.SetMetadataItem( 'time_coverage_start', parse_time(gdalMetadata['ObservationStartDateTime']).isoformat()) self.dataset.SetMetadataItem( 'time_coverage_end', parse_time(gdalMetadata['ObservationEndDateTime']).isoformat()) # append GCPs and lat/lon projection to the vsiDataset self.dataset.SetGCPs(gcps, NSR().wkt) self.reproject_gcps( '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lon_0=0 +no_defs' ) self.tps = True mm = pti.get_gcmd_instrument('AMSR2') ee = pti.get_gcmd_platform('GCOM-W1') self.dataset.SetMetadataItem('instrument', json.dumps(mm)) self.dataset.SetMetadataItem('platform', json.dumps(ee))
def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs): ''' Create CSKS VRT ''' if filename.split('/')[-1][0:4] != "CSKS": raise WrongMapperError # Get coordinates metadata = gdalMetadata['Estimated_Bottom_Left_Geodetic_Coordinates'] bottom_left_lon = float(metadata.split(' ')[1]) bottom_left_lat = float(metadata.split(' ')[0]) metadata = gdalMetadata['Estimated_Bottom_Right_Geodetic_Coordinates'] bottom_right_lon = float(metadata.split(' ')[1]) bottom_right_lat = float(metadata.split(' ')[0]) metadata = gdalMetadata['Estimated_Top_Left_Geodetic_Coordinates'] top_left_lon = float(metadata.split(' ')[1]) top_left_lat = float(metadata.split(' ')[0]) metadata = gdalMetadata['Estimated_Top_Right_Geodetic_Coordinates'] top_right_lon = float(metadata.split(' ')[1]) top_right_lat = float(metadata.split(' ')[0]) metadata = gdalMetadata['Scene_Centre_Geodetic_Coordinates'] center_lon = float(metadata.split(' ')[1]) center_lat = float(metadata.split(' ')[0]) # Get sub-datasets subDatasets = gdalDataset.GetSubDatasets() # Get file names from dataset or subdataset if subDatasets.__len__() == 1: filenames = [filename] else: filenames = [f[0] for f in subDatasets] for i, elem in enumerate(filenames): if filenames[i][-3:] == 'QLK': filenames.pop(i) #print filenames subDataset = gdal.Open(filenames[0]) # generate list of GCPs gcps = [] # create GCP with X,Y,Z(?),pixel,line from lat/lon matrices gcp = gdal.GCP(float(bottom_left_lon), float(bottom_left_lat), 0, 0, 0) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 0, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) gcp = gdal.GCP(float(bottom_right_lon), float(bottom_right_lat), 0, subDataset.RasterXSize, 0) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 1, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) gcp = gdal.GCP(float(top_left_lon), float(top_left_lat), 0, 0, subDataset.RasterYSize) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 2, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) gcp = gdal.GCP(float(top_right_lon), float(top_right_lat), 0, subDataset.RasterXSize, subDataset.RasterYSize) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 3, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) gcp = gdal.GCP(float(center_lon), float(center_lat), 0, int(np.round(subDataset.RasterXSize / 2.)), int(round(subDataset.RasterYSize / 2.))) gcps.append(gcp) #self.logger.debug('%d %d %d %f %f', 4, gcp.GCPPixel, gcp.GCPLine, # gcp.GCPX, gcp.GCPY) # append GCPs and lat/lon projection to the vsiDataset latlongSRS = osr.SpatialReference() latlongSRS.ImportFromProj4( "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs") latlongSRSWKT = latlongSRS.ExportToWkt() # create empty VRT dataset with geolocation only # x_size, y_size, geo_transform, projection, gcps=None, gcp_projection='', **kwargs self._init_from_dataset_params( subDataset.RasterXSize, subDataset.RasterYSize, (0, 1, 0, subDataset.RasterYSize, 0, -1), latlongSRSWKT, gcps, latlongSRSWKT) #print self.filename # Read all bands later #band='S01' #res='SBI' # Use only full size "original" datasets for i, elem in enumerate(filenames): if filenames[i][-3:] == 'SBI': # Add real and imaginary raw counts as bands src = { 'SourceFilename': filenames[i], 'SourceBand': 1, 'DataType': gdal.GDT_Int16 } dst = { 'dataType': gdal.GDT_Float32, 'name': 'RawCounts_%s_real' % gdalMetadata[filenames[i][-7:-4] + '_Polarisation'] } self.create_band(src, dst) src = { 'SourceFilename': filenames[i], 'SourceBand': 2, 'DataType': gdal.GDT_Int16 } dst = { 'dataType': gdal.GDT_Float32, 'name': 'RawCounts_%s_imaginary' % gdalMetadata[filenames[i][-7:-4] + '_Polarisation'] } self.create_band(src, dst) self.dataset.FlushCache() for i, elem in enumerate(filenames): if filenames[i][-3:] == 'SBI': # Calculate sigma0 scaling factor Rref = float(gdalMetadata['Reference_Slant_Range']) Rexp = float(gdalMetadata['Reference_Slant_Range_Exponent']) alphaRef = float(gdalMetadata['Reference_Incidence_Angle']) F = float(gdalMetadata['Rescaling_Factor']) K = float(gdalMetadata[filenames[i][-7:-4] + '_Calibration_Constant']) Ftot = Rref**(2. * Rexp) Ftot *= np.sin(alphaRef * np.pi / 180.0) Ftot /= F**2. Ftot /= K #print Ftot src = [{ 'SourceFilename': self.filename, 'DataType': gdal.GDT_Float32, 'SourceBand': 2 * i + 1, 'ScaleRatio': np.sqrt(Ftot) }, { 'SourceFilename': self.filename, 'DataType': gdal.GDT_Float32, 'SourceBand': 2 * i + 2, 'ScaleRatio': np.sqrt(Ftot) }] dst = { 'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave', 'PixelFunctionType': 'RawcountsToSigma0_CosmoSkymed_SBI', 'polarisation': gdalMetadata[filenames[i][-7:-4] + '_Polarisation'], 'name': 'sigma0_%s' % gdalMetadata[filenames[i][-7:-4] + '_Polarisation'], 'SatelliteID': gdalMetadata['Satellite_ID'], 'dataType': gdal.GDT_Float32 } #'pass': gdalMetadata[''] # - I can't find this in the metadata... self.create_band(src, dst) self.dataset.FlushCache() self.dataset.SetMetadataItem( 'time_coverage_start', parse_time(gdalMetadata['Scene_Sensing_Start_UTC']).isoformat()) self.dataset.SetMetadataItem( 'time_coverage_end', parse_time(gdalMetadata['Scene_Sensing_Stop_UTC']).isoformat())
def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, rmMetadatas=[ 'NETCDF_VARNAME', '_Unsigned', 'ScaleRatio', 'ScaleOffset', 'dods_variable' ], **kwargs): # Remove 'NC_GLOBAL#' and 'GDAL_' and 'NANSAT_' # from keys in gdalDataset tmpGdalMetadata = {} geoMetadata = {} origin_is_nansat = False if not gdalMetadata: raise WrongMapperError for key in gdalMetadata.keys(): newKey = key.replace('NC_GLOBAL#', '').replace('GDAL_', '') if 'NANSAT_' in newKey: geoMetadata[newKey.replace('NANSAT_', '')] = gdalMetadata[key] origin_is_nansat = True else: tmpGdalMetadata[newKey] = gdalMetadata[key] gdalMetadata = tmpGdalMetadata fileExt = os.path.splitext(inputFileName)[1] # Get file names from dataset or subdataset subDatasets = gdalDataset.GetSubDatasets() if len(subDatasets) == 0: filenames = [inputFileName] else: filenames = [f[0] for f in subDatasets] # add bands with metadata and corresponding values to the empty VRT metaDict = [] xDatasetSource = '' yDatasetSource = '' firstXSize = 0 firstYSize = 0 for _, filename in enumerate(filenames): subDataset = gdal.Open(filename) # choose the first dataset whith grid if (firstXSize == 0 and firstYSize == 0 and subDataset.RasterXSize > 1 and subDataset.RasterYSize > 1): firstXSize = subDataset.RasterXSize firstYSize = subDataset.RasterYSize firstSubDataset = subDataset # get projection from the first subDataset projection = firstSubDataset.GetProjection() # take bands whose sizes are same as the first band. if (subDataset.RasterXSize == firstXSize and subDataset.RasterYSize == firstYSize): if projection == '': projection = subDataset.GetProjection() if ('GEOLOCATION_X_DATASET' in filename or 'longitude' in filename): xDatasetSource = filename elif ('GEOLOCATION_Y_DATASET' in filename or 'latitude' in filename): yDatasetSource = filename else: for iBand in range(subDataset.RasterCount): subBand = subDataset.GetRasterBand(iBand + 1) bandMetadata = subBand.GetMetadata_Dict() if 'PixelFunctionType' in bandMetadata: bandMetadata.pop('PixelFunctionType') sourceBands = iBand + 1 # sourceBands = i*subDataset.RasterCount + iBand + 1 # generate src metadata src = { 'SourceFilename': filename, 'SourceBand': sourceBands } # set scale ratio and scale offset scaleRatio = bandMetadata.get( 'ScaleRatio', bandMetadata.get( 'scale', bandMetadata.get('scale_factor', ''))) if len(scaleRatio) > 0: src['ScaleRatio'] = scaleRatio scaleOffset = bandMetadata.get( 'ScaleOffset', bandMetadata.get( 'offset', bandMetadata.get('add_offset', ''))) if len(scaleOffset) > 0: src['ScaleOffset'] = scaleOffset # sate DataType src['DataType'] = subBand.DataType # generate dst metadata # get all metadata from input band dst = bandMetadata # set wkv and bandname dst['wkv'] = bandMetadata.get('standard_name', '') # first, try the name metadata if 'name' in bandMetadata: bandName = bandMetadata['name'] else: # if it doesn't exist get name from NETCDF_VARNAME bandName = bandMetadata.get('NETCDF_VARNAME', '') if len(bandName) == 0: bandName = bandMetadata.get( 'dods_variable', '') # remove digits added by gdal in # exporting to netcdf... if (len(bandName) > 0 and origin_is_nansat and fileExt == '.nc'): if bandName[-1:].isdigit(): bandName = bandName[:-1] if bandName[-1:].isdigit(): bandName = bandName[:-1] # if still no bandname, create one if len(bandName) == 0: bandName = 'band_%03d' % iBand dst['name'] = bandName # remove non-necessary metadata from dst for rmMetadata in rmMetadatas: if rmMetadata in dst: dst.pop(rmMetadata) # append band with src and dst dictionaries metaDict.append({'src': src, 'dst': dst}) # create empty VRT dataset with geolocation only self._init_from_gdal_dataset(firstSubDataset, metadata=gdalMetadata) # add bands with metadata and corresponding values to the empty VRT self.create_bands(metaDict) self._create_complex_bands(filenames) if len(projection) == 0: # projection was not set automatically # get projection from GCPProjection projection = geoMetadata.get('GCPProjection', '') if len(projection) == 0: # no projection was found in dataset or metadata: # generate WGS84 by default projection = NSR().wkt # fix problem with MET.NO files where a, b given in m and XC/YC in km if ('UNIT["kilometre"' in projection and ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]' in projection): projection = projection.replace( ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]', '') # set projection self.dataset.SetProjection(self.repare_projection(projection)) # check if GCPs were added from input dataset gcps = firstSubDataset.GetGCPs() gcpProjection = firstSubDataset.GetGCPProjection() # if no GCPs in input dataset: try to add GCPs from metadata if not gcps: gcps = self.add_gcps_from_metadata(geoMetadata) # if yet no GCPs: try to add GCPs from variables if not gcps: gcps = self.add_gcps_from_variables(inputFileName) if gcps: if len(gcpProjection) == 0: # get GCP projection and repare gcpProjection = self.repare_projection( geoMetadata.get('GCPProjection', '')) # add GCPs to dataset self.dataset.SetGCPs(gcps, gcpProjection) self.dataset.SetProjection('') self._remove_geotransform() # Find proper bands and insert GEOLOCATION ARRAY into dataset if len(xDatasetSource) > 0 and len(yDatasetSource) > 0: self._add_geolocation( Geolocation.from_filenames(xDatasetSource, yDatasetSource)) elif not gcps: # if no GCPs found and not GEOLOCATION ARRAY set: # Set Nansat Geotransform if it is not set automatically geoTransform = self.dataset.GetGeoTransform() if len(geoTransform) == 0: geoTransformStr = geoMetadata.get('GeoTransform', '(0|1|0|0|0|0|1)') geoTransform = eval(geoTransformStr.replace('|', ',')) self.dataset.SetGeoTransform(geoTransform) subMetadata = firstSubDataset.GetMetadata() ### GET START TIME from METADATA time_coverage_start = None if 'start_time' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['start_time']) elif 'start_date' in gdalMetadata: time_coverage_start = parse_time(gdalMetadata['start_date']) elif 'time_coverage_start' in gdalMetadata: time_coverage_start = parse_time( gdalMetadata['time_coverage_start']) ### GET END TIME from METADATA time_coverage_end = None if 'stop_time' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['stop_time']) elif 'stop_date' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['stop_date']) elif 'time_coverage_stop' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['time_coverage_stop']) elif 'end_time' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['end_time']) elif 'end_date' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['end_date']) elif 'time_coverage_end' in gdalMetadata: time_coverage_end = parse_time(gdalMetadata['time_coverage_end']) ### GET start time from time variable if (time_coverage_start is None and 'time#standard_name' in subMetadata and subMetadata['time#standard_name'] == 'time' and 'time#units' in subMetadata): # get data from netcdf data ncFile = Dataset(inputFileName, 'r') time_var = ncFile.variables['time'] t0 = time_var[0] if len(time_var) == 1: t1 = t0 + 1 else: t1 = time_var[-1] time_units_start = parse(time_var.units, fuzzy=True, ignoretz=True) time_units_to_seconds = { 'second': 1.0, 'hour': 60 * 60.0, 'day': 24 * 60 * 60.0 } for key in time_units_to_seconds: if key in time_var.units: factor = time_units_to_seconds[key] break time_coverage_start = time_units_start + datetime.timedelta( seconds=t0 * factor) time_coverage_end = time_units_start + datetime.timedelta( seconds=t1 * factor) ## finally set values of time_coverage start and end if available if time_coverage_start is not None: self.dataset.SetMetadataItem('time_coverage_start', time_coverage_start.isoformat()) if time_coverage_end is not None: self.dataset.SetMetadataItem('time_coverage_end', time_coverage_end.isoformat()) if 'sensor' not in gdalMetadata: self.dataset.SetMetadataItem('sensor', 'unknown') if 'satellite' not in gdalMetadata: self.dataset.SetMetadataItem('satellite', 'unknown') if 'source_type' not in gdalMetadata: self.dataset.SetMetadataItem('source_type', 'unknown') if 'platform' not in gdalMetadata: self.dataset.SetMetadataItem('platform', 'unknown') if 'instrument' not in gdalMetadata: self.dataset.SetMetadataItem('instrument', 'unknown') self.logger.info('Use generic mapper - OK!')
def parse_time(time_string): return utils.parse_time(time_string)
def test_parse_time_incorrect(self): dt = parse_time('2016-01-19Z') self.assertEqual(type(dt), datetime.datetime)