def getDataset(scene_path, pol): ds = None transform = {} # generate file list filelist = fio.getFileList( 'S1*_*{}_tnr_bnr_Orb_Cal_ML_TF_TC_dB.tif'.format(pol), scene_path) if len(filelist) == 1: # open raster ds = gdal.Open(filelist[0]) if ds is not None: transform['fwd'] = affine.Affine.from_gdal(*ds.GetGeoTransform()) transform['rev'] = ~transform['fwd'] band = ds.GetRasterBand(1) return {'ds': ds, 'band': band, 'transform': transform}
def compileVrt(scene, product): out_pathname = None # generate file list filelist = fio.getFileList('*', scene) if len(filelist) > 0: # need to guarantee consistent band ordering sorted_list = [] # for each band bands = product.getElementsByTagName('band') for band in bands: if (band.hasAttribute("filename")): for obj in filelist: # add entry to sort list if configuration matches argument if fnmatch.fnmatch(os.path.basename(obj), str(band.attributes["filename"].value)): sorted_list.append(obj) break # build vrt on validation of successful sort if len(sorted_list) == len(bands): out_pathname = scene + '/' + product.attributes["name"].value + '.vrt' updateImages(sorted_list) vrt = gdal.BuildVRT(out_pathname, sorted_list, options=gdal.BuildVRTOptions(separate=True)) vrt = None return out_pathname
def checkScene(pathname): sceneOK = False # decompress product file path = os.path.dirname(pathname) out, err, code = ps.execute('unzip', ["-o", "-d", path, pathname]) if (code <= 1): filelist = fio.getFileList('*.tiff', path) if len(filelist) > 0: # open scene and extract gcps in_ds = gdal.Open(filelist[0]) gcps = in_ds.GetGCPs() min_x = 180 max_x = -180 for gcp in gcps: min_x = min(min_x, gcp.GCPX) max_x = max(max_x, gcp.GCPX) # large longitude difference when crossing antimeridian if max_x - min_x < 10: sceneOK = True else: print('... scene crosses antimeridan - skipping: {}'.format( pathname)) # housekeeping of raw zip sub-folder zip_path = os.path.splitext(pathname)[0] if (os.path.exists(zip_path)): shutil.rmtree(zip_path) return sceneOK
def getSceneList(args): scene_list = [] # single file argument if args.file is not None: scene_list.append(args.file) else: # generate file list filelist = fio.getFileList('*.zip', args.path) if len(filelist) > 0: # parse datetime from pathname for obj in filelist: dt = parser.getDateTime(obj) # get files satisfying constraints if dt.timestamp() >= args.start.timestamp() and dt.timestamp( ) <= args.end.timestamp(): scene_list.append(obj) return sorted(scene_list)
out_path = os.path.join(ard_path, args.product) # geocode scene geocode( scene=scene, dem=args.dem, tempdir=os.path.join(out_path, 'process'), outdir=out_path, targetres=args.res, scaling='db', func_geoback=1, # cleanup=False, export_extra=['inc_geo', 'ls_map_geo']) # reproject imagery to epsg:3460 image_list = fio.getFileList('S1*.tif', out_path) options = reproject.getTransform(image_list[0], { 't_epsg': 32760, 'res_x': 20, 'res_y': 20 }) for img_pathname in image_list: warp_pathname = img_pathname.replace('.tif', '_warp.tif') reproject.toEpsg( img_pathname, warp_pathname, options, ['TILED=YES', 'BLOCKXSIZE=256', 'BLOCKYSIZE=256']) else: print('scene crosses anti-meridian: ' + scene)
plt.ylabel("relative information") plt.xlabel("features") plt.show() # compute svm fit #model = SVC( kernel='rbf' ) model = MLPClassifier(hidden_layer_sizes=(30, 30, 30), max_iter=1000) model.fit(X, y) y_pred = model.predict(X) print(confusion_matrix(y, y_pred)) print(classification_report(y, y_pred)) # generate file list filelist = fio.getFileList('S1*_VV_tnr_bnr_Orb_Cal_ML_TF_TC_dB.tif', '/data/ard/fiji') for obj in filelist: path = os.path.dirname(obj) # get training set feature, name_list = getFeatureFrame(sample, path, 1000) # get scatter matrix X, y = getScatterMatrix(feature) #plotFeatureSeparability( X, y, name_list ) y_pred = model.predict(X) print(path) print(confusion_matrix(y, y_pred))