def trigger_radio_processing(product): global PROCESSING_STATUS PROCESSING_STATUS = 'PASSED' #to be pass in this interface (as parameter) infra = main_infra.mainInfra() stat_table = [] stat_table_sort = [] roi_list = [rec for rec in glob.glob(os.path.join(product, 'ROI*'))] for roi in roi_list: log.infog(' -- Processing of ' + roi + ' : ') mtl = metadata_extraction.LandsatMTL(roi) mtl.set_test_site_information( infra.configuration_site_description_file) log.info(' -- Convert to RAD / TOA') mtl.display_mtl_info() r = rad.radiometric_calibration(product, mtl) log.info(' -- Extract / store statistics : ') image_file_list = mtl.rhotoa_image_list output_txt_file = RADIOMETRIC_STABILITY_RHO_RESULTS #infra.result_radiometricStability roi_id = os.path.basename(roi) gainList = [str(rec) for rec in mtl.rescaling_gain] #print mtl.band_sequence for image_file in image_file_list: a = None a = Image.Statistics(image_file, roi_id) r = a.get_statistics() stat_table.append(r) stat_table_sort = sorted(stat_table, key=lambda x: x[1]) #sort by band_number infra.update_text_file(mtl, stat_table_sort)
def trigger_radio_processing(): infra = main_infra.mainInfra() product_id = 'LC81810402013218LGN00' #Hypothese : nous sommes dans le repertoire stabilityMonitoring interest = 'stabilityMonitoring' product_list = [ rec for rec in glob.glob( os.path.join(infra.processing_location, interest, 'input', product_id, 'ROI*')) ] for product in product_list: log.infog(' -- Processing of ' + product + ' : ') mtl = metadata_extraction.LandsatMTL(product) log.info(' -- Convert to RAD / TOA') #r=rad.radiometric_calibration(product,mtl) log.info(' -- Extract / store statistics ') mtl.update_image_file_list() #mtl.display_mtl_info() image_file_list = mtl.rhotoa_image_list output_txt_file = infra.result_radiometricStability roi_id = os.path.basename(product) gainList = [str(rec) for rec in mtl.rescaling_gain] #print mtl.band_sequence extract_reflectance_in_roy.reduction_on_roi(mtl, image_file_list, output_txt_file, roi_id) print '\n' print output_txt_file #Move Product From input to done input_product = os.path.join(infra.processing_location, interest, 'input', product_id) output_rep = os.path.join(infra.processing_location, interest, 'done') cmd = ' '.join(['mv', input_product, output_rep]) os.system(cmd)
def search_func(): msg = """ What do you want to search by: \t1. Search by Name \t2. Search by Address \t3. Search by City \t4. Search by Vendor Enter the number you want to search by: """ # use check_input function to get "bywhat" we will query the database bywhat = check_input(1, 4, msg) if bywhat == 1: bywhat = "Name" elif bywhat == 2: bywhat = "Address" elif bywhat == 3: bywhat = "City" elif bywhat == 4: bywhat = "Vendor" # get "what" the user wants to find what = raw_input("Search by %s: " % bywhat) # build query string to pass to the database query function q = ("SELECT * FROM `customers` WHERE `%s` REGEXP \'%s\' LIMIT 0 , 30" % (bywhat, what)) # build query string to pass to the database query function but only for Mgmt IP column qIP = ("SELECT `Mgmt IP` FROM `customers` WHERE `%s` REGEXP \'%s\' LIMIT 0 , 30" % (bywhat, what)) # store database query result in "search" variable to later iterate search = db.query(str(q)) # this is just to enumerate the results e = 0 for entry in search: print "----------------------------------------------------" e += 1 print "%d. " % e, print "%s" % entry['Name'], print "\n\tAddress: %s" % entry['Address'], print "\n\tVendor: %s" % entry['Vendor'], print "\n\tCity: %s" % entry['City'], print "\n\tMgmt IP: %s" % entry['Mgmt IP'], # for each iteration, we will use subprocess to ping the 'Mgmt IP' element status = subprocess.call( ['ping', '-c1', '-W10', '-w2', entry['Mgmt IP']], stdout = open(os.devnull, 'wb')) if status == 0: print "is", # this is to colorize the word UP log.infog("UP") else: print "is", # this is to colorize the word DOWN log.err("DOWN") print "----------------------------------------------------" # store the database query for the Mgmt IP column only in a variable called "search_IP" search_IP = list(db.query(str(qIP))) # return this variable to later use it to find out user's desired IP to connect to return search_IP
def search_func(): msg = """ What do you want to search by: \t1. Search by Name \t2. Search by Address \t3. Search by City \t4. Search by Vendor Enter the number you want to search by: """ bywhat = check_input(1, 4, msg) if bywhat == 1: bywhat = "Name" elif bywhat == 2: bywhat = "Address" elif bywhat == 3: bywhat = "City" elif bywhat == 4: bywhat = "Vendor" what = raw_input("Search by %s: " % bywhat) q = ("SELECT * FROM `customers` WHERE `%s` REGEXP \'%s\' LIMIT 0 , 30" % (bywhat, what)) qIP = ( "SELECT `Mgmt IP` FROM `customers` WHERE `%s` REGEXP \'%s\' LIMIT 0 , 30" % (bywhat, what)) search = db.query(str(q)) e = 0 for entry in search: print "----------------------------------------------------" e += 1 print "%d. " % e, print "%s" % entry['Name'], print "\n\tAddress: %s" % entry['Address'], print "\n\tVendor: %s" % entry['Vendor'], print "\n\tCity: %s" % entry['City'], print "\n\tMgmt IP: %s" % entry['Mgmt IP'], status = subprocess.call( ['ping', '-c1', '-W10', '-w1', entry['Mgmt IP']], stdout=open(os.devnull, 'wb')) if status == 0: print "is", log.infog("UP") else: print "is", log.err("DOWN") print "----------------------------------------------------" search_IP = list(db.query(str(qIP))) return search_IP
def computeRadialError(self, dst_file_name): if len(self.image_list) == 2: log.infog(' - Start -> Radial Error Computation \n') x = self.image_list[0] #DX y = self.image_list[1] #DY log.infog(' -- File 1 -> ' + x) log.infog(' -- File 2 -> ' + y) src_x_ds = gdal.Open(str(x)) src_y_ds = gdal.Open(str(y)) x_array = src_x_ds.GetRasterBand(1).ReadAsArray() y_array = src_y_ds.GetRasterBand(1).ReadAsArray() r_array = np.sqrt(x_array * x_array + y_array * y_array) tmp_ds = gdal.GetDriverByName('MEM').CreateCopy('', src_x_ds, 0) tmp_ds.GetRasterBand(1).WriteArray(r_array, 0, 0) gdal.GetDriverByName('GTiff').CreateCopy(dst_file_name, tmp_ds, 0) src_x_ds = None src_y_ds = None log.infog(' - [End] -> Radial Error Computation \n') log.info(' -- Create : ' + dst_file_name + '\n') return True else: log.warning(' - Missing Inputs \n') return False
def Mlab_run(mtl,mat_filename,productWorkingDirectory,pixelSpacing): medicisDirectory = productWorkingDirectory result_file = mat_filename log.infog(' -- Save in : ') log.infog(' --- '+result_file+' \n') matlabProcessingDir=infra.matlab_processing rootdir='/home/saunier/Documents/MATLAB' updateMultitemporalMatFile(mtl, medicisDirectory, result_file, matlabProcessingDir, activityLabel, pixelSpacing, rootdir)
def image_matching(mtl,infra,interband,cor): refImage = interband.refImage workImage = interband.workImage if workImage is None : print ('missing product') return if refImage is None : print ('missing product') return band_type = interband.band_type roi = interband.roi ref_channel = interband.ref_channel work_channel = interband.work_channel band_number = work_channel #Define the working directory wd_name=mtl.landsat_scene_id+'_'+os.path.basename(roi)+'_BD_'+band_number productWorkingDirectory=os.path.join(infra.processing_location,interest,'wd',wd_name) if os.path.exists(productWorkingDirectory) is False: cmd=' '.join(['mkdir -v',productWorkingDirectory]) os.system(cmd) print ('\n') else : log.warn('-- Existing Product Working Directory / Remove Content') cmd=' '.join(['rm -rf',os.path.join(productWorkingDirectory,'*')]) os.system(cmd) print ('\n') refimageName=refImage inputImage=workImage grille=cor.grille paramMedicis=infra.configuration_medicis log.infog(" -- Execute Medicis ") executeMedicis(refImage,inputImage,cor.grille,paramMedicis) return productWorkingDirectory
def display_list(interest, status): path_to_product = os.path.join(infra.processing_location, interest, status, '*') product_list = glob.glob(path_to_product) log.infog(' -- Interest / status ' + interest + ' / ' + status + ' :') if len(product_list) > 0: log.info(' --- Number of processed products : ' + str(len(product_list))) for rec in product_list: log.info(rec) else: log.info(' --- No product ') print ' '
def checkDirectoryPresence(self): print ' ' print 'Landsat 8 Processing of Cyclic Report ' print ' ' log.infog(' Check Directory Presence : \n') if os.path.exists(self.input_data_location): log.info(' -- Input Data Location Directory exist') if os.path.exists(self.code_location): print ' -- Code Location Directory exist' else: log.err(' -- Code Location Directory is Missing \n') if os.path.exists(self.processing_location): print ' -- Processing Directory exist' else: log.err(' -- Processing Directory is Missing \n') if os.path.exists(self.result_location): print ' -- Result Directory exist' else: log.err(' -- Result Directory is Missing \n') if os.path.exists(self.configuration): print ' -- Configuration Directory exist' else: log.err(' -- Configuration Directory is Missing \n') if os.path.exists(self.reference_data_vector_file_location): print ' -- Vector - Shape File Directory exist' else: log.err(' -- Vector - Shape File is Missing \n') if os.path.exists(self.reference_data_raster_file_location): print ' -- Raster Reference Directory exist' else: log.err(' -- Raster Reference Directory is Missing \n') if os.path.exists(self.reference_data_dem_file_location): print ' -- DEM Reference Directory exist' else: log.err(' -- DEM Reference Directory is Missing \n') if os.path.exists(self.reference_data_meteo_file_location): print ' -- Meteo Files Directory exist \n' else: log.err(' -- Meteo Files is Missing \n')
def gpio_8_7_callback(channel): #if GPIO.input('P8_7'): # if port P8_7 == 1 logging.info('Exit Antenna-pointing triggered.') if GPIO.event_detected("P8_7"): log.info("Rising edge detected on " + channel) else: # if port P8_7 != 1 log.info("Falling edge detected on " + channel) t1.stop() t0.start() if signalstrength < 0.0: log.err('Signal not available!') # else: # start_leds() logging.debug('AT Cmd: AT_IPOINT=1') at_data = { 'command': 'AT_IPOINT=1' } # Exit antenna-poiting At-cmd (AT_IPOINT) at_data = json.dumps(at_data) log.infog(at_data) req = requests.post('https://192.168.1.1/auth/v1/at', auth=('admin', 'm@nufacturing'), verify=False, data=at_data, headers={'Content-Type': 'application/json'}) if req.status_code == 200: d = json.loads(req.text) at_resp = d['response'] log.infog('AT:' + at_resp) logging.debug('AT Resp: ' + at_resp) else: log.err('AT command failure!') print(req.text) logging.warning('AT command failure: ' + req.text) t0.stop() t1.start()
def Run(): log.infog("Running...") try: while lj.oscrun: # If you want an idea # t0 = time.time() lj.OSCframe() if linked: alink.BeatEvent() AllFX() time.sleep(0.002) #t1 = time.time() # looptime = t1 - t0 # 25 frames/sec -> 1 frame is 0.04 sec long # if looptime is 0.01 sec # 0.04/0.01 = 4 loops with the same anim # so speedanim is 1 / 4 = 0.25 # speedanim = 1 / (0.04 / looptime) lj.DrawDests() #print("Took %f" % (t1 - t0, )) #except KeyboardInterrupt: # pass except Exception: log.err("Exception") traceback.print_exc() # Gently stop on CTRL C finally: lj.WebStatus("Aurora Disconnected") log.info("Stopping OSC...") lj.OSCstop() log.infog("Aurora Stopped.")
def threshold_displacement_image(self, cor_conf_image, d_image): #[cor_conf_image] Input confidence Image #[d_image] DC, DX or DY Image. #Applied Confidence Image to d_image #Threshold is 0.8 : conf_threshold = 0.8 log.infog(' - Start threshold_displacement_image \n') #save input cor_conf_image to old d_image_old = d_image.replace('.TIF', '_OLD.TIF') shutil.copy(d_image, d_image_old) #Define destination file dst_file_name = d_image #cor_conf_image src_ds = gdal.Open(str(d_image)) geotransform = src_ds.GetGeoTransform() print '-- Origin = (', geotransform[0], ',', geotransform[3], ')' print '-- Input Pixel Size = (', geotransform[1], ',', geotransform[ 5], ')' conf_ds = gdal.Open(str(cor_conf_image)) dc_array = conf_ds.GetRasterBand(1).ReadAsArray() d_array = src_ds.GetRasterBand(1).ReadAsArray() nb_line = dc_array.shape[0] nb_col = dc_array.shape[1] m1 = d_array m1[dc_array <= conf_threshold] = 10 tmp_ds = gdal.GetDriverByName('MEM').CreateCopy('', src_ds, 0) tmp_ds.GetRasterBand(1).WriteArray(m1, 0, 0) #Write output gdal.GetDriverByName('GTiff').CreateCopy(dst_file_name, tmp_ds, 0) src_ds = None conf_ds = None tmp_ds = None log.infog(' -- Create : ' + dst_file_name + '\n') log.infog(' -- Create : ' + dst_file_name + '\n') log.infog(' - End threshold_confidence_image \n')
def trigger_directLocation_processing(product,interband): global PROCESSING_STATUS PROCESSING_STATUS='PASSED' #to be pass in this interface (as parameter) infra=main_infra.mainInfra() mtl = metadata_extraction.LandsatMTL(product) scene_id = mtl.landsat_scene_id mtl.set_test_site_information(infra.configuration_site_description_file) country_name=((mtl.test_site[0]).split())[0] site_name=((mtl.test_site[0]).split())[1] repo_ref=os.path.join(infra.reference_data_raster_file_location,country_name,site_name,'ROI'); #Access to assessment manager ? band_list=assessmentManager_get_band_list(infra,mtl) mtl=None #http class : interface with web server http = http_side.performance_report(product) #-- MLAB output file initialisation file_label = interband.file_label band_type = interband.band_type #-- Loop on all image roi, perform correlation and statistics #LOOP OVER ROIs roi_list=[rec for rec in glob.glob(os.path.join(product,'ROI*'))] for roi in roi_list: roi_name=os.path.basename(roi).split('_')[1] print ' ' print ' -------------------------' log.infog(' -- Processing of '+roi_name+' : ') mtl = metadata_extraction.LandsatMTL(roi) mtl.set_test_site_information(infra.configuration_site_description_file) mtl.add_roi_name_information(roi_name) #START Image Matching Loop wd_list = [] interband.roi = roi for band_twin in band_combination[band_type]: interband.set_ref_channel(band_twin) ref_channel = interband.ref_channel work_channel = interband.work_channel #Define the working directory wd_name=mtl.landsat_scene_id+'_ROI_'+roi_name+'_BD_'+ref_channel productWorkingDirectory=os.path.join(infra.processing_location,interest,'wd',wd_name) ref_channel_reg = os.path.join(os.path.join(repo_ref,roi_name,'*B0'+ref_channel+'*.tiff')) work_channel_reg = os.path.join(os.path.join(product,'ROI_'+roi_name,'*B'+work_channel+'*.TIF')) if len(glob.glob(ref_channel_reg)) > 0 : interband.refImage = glob.glob(ref_channel_reg)[0] interband.refImageTrue = True else : log.err('Missing reference product ') return if len(glob.glob(work_channel_reg)) > 0 : interband.workImage = glob.glob(work_channel_reg)[0] interband.workImageTrue = True else : log.err('Missing one product as input ') log.err(work_channel_reg) return #CORREL log.infog(" -- Prepare Parameters") cor = correl_image(os.path.join(productWorkingDirectory,'grille.hdf')) log.infog(" -- Start Matching") productWorkingDirectory = image_matching(mtl, infra, interband, cor) #productWorkingDirectory = '/home/saunier/DEV/LS08_ideas_cyclic_report__processing/PROCESSING/directLocation/wd/LC81980332017172MTI00_ROI_ibiza_BD_8' wd_list = (productWorkingDirectory) log.infog(" -- Processing of : "+productWorkingDirectory) # cor = correl_image(os.path.join(productWorkingDirectory,'grille.hdf')) #GEOCODING log.infog(" -- Geocoded Medicis Results") inputImage = interband.workImage refimageName = interband.workImage #? #Size of input Full Image (correlation undersampled input images ...) src_filename = inputImage src_ds = gdal.Open(str(src_filename)) image_width = src_ds.RasterXSize image_length = src_ds.RasterYSize src_ds = None cor.geocoded(inputImage,refimageName,productWorkingDirectory) if (cor.geocoded_valid) : dx = cor.dx dy = cor.dy dc = cor.dc mask = cor.mask else : log.warning(" -- [ERROR ] No Correlation Results in ") log.warning(" -- [ERROR ] No Correlation Results in ") return #FILTERING : IF MASK AVAILABLE - APPLIED log.infog(" -- Mask Confidence Image with land_sea mask \n") land_sea_mask = os.path.join(product,roi,'land_sea_mask.tif') if os.path.exists(land_sea_mask): log.infog(" --- Land Sea Mask Exist : "+land_sea_mask) input_image = dc mask = land_sea_mask log.infog(" --- Applied Land Sea Mask to confidence image") list_image = [] list_image.append(input_image) im_st = im_p.image(list_image) im_st.maskImage(mask,productWorkingDirectory) log.info(" -- Create "+im_st.masked_image+' \n') cmd = ' '.join(['mv', im_st.masked_image,dc]) os.system(cmd) im_st = None #FILTERING : APPLIED CONFIDENCE THRESHOLD ON DC - can applied to DX,DY,DC list_image = [] list_image.append(dc) im_st = im_p.image(list_image) im_st.histerysisThreshold(confidence,productWorkingDirectory) dc = im_st.masked_image dc_mask = im_st.mask_image #BINARY MASK im_st = None #UPDATE DC / DC MASK WITH 3 SIGMA THRESHOLD ON DX , DY im_st = im_p.image([dc,dc_mask]) n_value = 3.5 #SIGMA VALUE threshold = confidence #CONFIDENCE VALUE - im_st.sigma_threshold(dx,n_value,confidence) im_st.sigma_threshold(dy,n_value,confidence) dc = im_st.masked_image dc_mask = im_st.mask_image #BINARY MASK cmd = ' '.join(['mv', im_st.masked_image,cor.dc]) #Ecrase DC - original in "OLD" os.system(cmd) im_st = None #APPLIED DC BINARY MASK TO DX, DY mask = dc_mask list_image = [] list_image.append(dx) im_st = im_p.image(list_image) im_st.maskImage(mask,productWorkingDirectory) log.info(" -- Create "+im_st.masked_image+' \n') dx = im_st.masked_image im_st = None list_image = [] list_image.append(dy) im_st = im_p.image(list_image) im_st.maskImage(mask,productWorkingDirectory) log.info(" -- Create "+im_st.masked_image+' \n') dy = im_st.masked_image im_st = None log.infog(" -- Mask :"+dc_mask) log.infog(" -- DX :"+dx) log.infog(" -- DY :"+dy) log.infog(" -- DC :"+dc+'\n') #COMPUTE DX²+ DY² - RADIAL ERROR list_image = [dx,dy] dst_file_name = os.path.join(productWorkingDirectory,scene_id+ '_B'+interband.ref_channel+ '_B'+interband.work_channel+ '_radialError_'+ str(np.int(confidence*100))+'.tif',) im_st = im_p.image(list_image) if (im_st.computeRadialError(dst_file_name)): cor.radial_error = dst_file_name cor.radial_error_valid = True im_st = None #STATISTICS DX #STATISTICS DY #STATISTICS DC #-- QL Radial Error - Overlayed Image Band with B1 B2 B3 Image #Rescale QL Radial Error to size of B1 , B2 , B3 list_image = [interband.workImage] im1 = cor.radial_error dst_filename = im1.replace('.tif','_rescale.tif') im_st = im_p.image(list_image) im_st.rescaleImage(im1,dst_filename) cor.radial_error_rescaled = dst_filename im1 = dc_mask dst_filename = im1.replace('.TIF','_rescale.tif') im_st.rescaleImage(im1,dst_filename) #Geometric Rescaling cor.dc_mask_rescaled = dst_filename im_st = None #BURN TIF IMAGES WITH RADIAL ERRORS #Convert each image to 8 Bits list_image = [] list_image.append(glob.glob(os.path.join(product,roi,'*B8.TIF'))[0]) # QL Generation list_image.append(glob.glob(os.path.join(product,roi,'*B8.TIF'))[0]) # QL Generation list_image.append(glob.glob(os.path.join(product,roi,'*B8.TIF'))[0]) # QL Generation im_st = im_p.image(list_image) dst_repo = productWorkingDirectory output_list = im_st.byte_rescaling(dst_repo ) im_st = None #Burn each one tif images list_image = output_list output_list = [] for image in list_image: output_list.append(image.replace('.tif','_burn.tif')) im_st = im_p.image(list_image) im_filename = cor.radial_error_rescaled #Radial Error Value map to QL mask_filename = cor.dc_mask_rescaled #DC Mask to select pixel im_st.burn_image(output_list,im_filename,mask_filename) im_st = None #Create the quick look image_data_list = output_list ql_name = os.path.join(productWorkingDirectory,scene_id+ '_B'+interband.ref_channel+ '_B'+interband.work_channel+ '_Radial_Error_QL.jpg') outputdir = productWorkingDirectory quick_look_resolution = 30 im_st = im_p.image(image_data_list) # Create Object instance im_st.createimageQuicklook(ql_name,outputdir,quick_look_resolution) #-- MLAB Production #result file is matlab file (.mat) src_ds = gdal.Open(str(interband.refImage)) pixelSpacing= (src_ds.GetGeoTransform())[1] #Pixel spacing of the orginal Image src_ds = None #-- Option #createimageQuicklook(inputImage,productWorkingDirectory) #addDigitalElevationImage(inputImage,productWorkingDirectory,demReference) #-- End Option # ext = '.mat' # result_mat_file= interband.get_output_filename(mtl,infra,ext) #-- Summary Statistisque file_label - same level of mat file but # just list results in a CSV. # ext = '.txt' # result_sta_file= interband.get_output_filename(mtl,infra,ext) log.infog(" -- Update Multi Temporal MAT File \n") metadataFile=mtl # Set Filename of ".mat" file filename = site_name+'_multiTemp.mat' result_mat_file=os.path.join(infra.result_location,interest,filename) medicisDirectory=productWorkingDirectory #[NFO] Mlab_run - attente des fichiers dx,dy,dc normés # M=dir([obj.repName filesep '*_dx-displacement.TIF']); # M=dir([obj.repName filesep '*_dy-displacement.TIF']); # M=dir([obj.repName filesep '*_dc-confidence.TIF']); Mlab_run(mtl, result_mat_file, productWorkingDirectory, pixelSpacing) ##TEMPS MININIMUM A 30 BIEN ATTENDRE LA FIN DE TRAITEMENT print (' ') log.warn('TIME SLEEP ACTIVIATED - 30 s - Wait end of M LAB PROCESSING') log.warn('TIME SLEEP ACTIVIATED - 30 s - Important to let Completion') log.warn('TIME SLEEP ACTIVIATED - 30 s - of the mat file ') print (' ') time.sleep(30) #--- 2. Repatriate BD2 BD3 BD4 Images #--- 3. Open Images and Burn Values #--- 4. Create QL #-- CP to the HTTP src_directory = productWorkingDirectory #-- Copy MLAB Results (PNG) to repository #-- Copy DC mask image (PNG)to http repository #-- Copy Radial Error image (JPG)to http repository http.updateContent(productWorkingDirectory,interest,band_type) #Generate A single stat/csv for all Report WD in this processing result_sta_file = os.path.join(productWorkingDirectory,'stat.txt') log.infog(" - [STA File] STA File : "+result_sta_file) #PARTIE EXPORT SUR HTTP A SEPARER DE CETTE FONCTION print ( ' ') log.infog(" - Populate HTTP \n") #-- STA FILE : dst_file_name = ''.join([mtl.landsat_scene_id,'_','roi_',roi_name,'_multiTemp_',band_type,'.txt']) http.updateContentWith_FILE(result_sta_file,dst_file_name,interest,band_type) log.infog(" - CP Matlab File \n") http.updateWithDirectLocationMATFile(result_mat_file) input_product=product output_rep=os.path.join(product,'../../done') cmd = ' '.join(['mv',input_product,output_rep]) log.info(' - [CMD ] '+cmd) os.system(cmd)
def main(): global skywire, latitude, longitude, ledState, signalstrength, t1 url = "" device_id = "" fw_ver = "" user = "******" password = "******" url = 'https://192.168.1.1/auth/' brm_err_cnt = 5 logging.info('ONYX main started.') t0 = RepeatedTimer(2, start_leds) t0.start() # start_leds() # time.sleep(20) for i in range(0, 5, 1): print("Waiting BRM connection...") if connected_to_brm(): break check_usim = True set_ant_flag = True s = requests.Session() s.auth = (user, password) t0.stop() try: # initial BRM websocket checking response = s.get(url, verify=False) print response.status_code except requests.HTTPError as e: print("Checking internet connection failed, status code {0}.".format( e.response.status_code)) logging.warning('No internet connection available.') except requests.ConnectionError: print("No internet connection available.") logging.error( 'Initial BRM websocket checking error. \nExit Onyx main!') sys.exit(1) # start_leds() # t0 = RepeatedTimer(1, start_leds,sb) # t0.start() # req = requests.get(url,auth=(user, password), verify=False) # s = requests.Session() # s.get(url,auth=('admin', 'm@nufacturing'), cert=False, verify=False) #r = requests.get(url, auth=('admin', 'm@nufacturing'), cert=False, verify=False) # r = requests.get('https://192.168.1.1/auth/', auth=('admin', 'm@nufacturing')) # print req.status_code log.info('== BRM INFORMATION ==') req = s.get(url + "v1/config/setting/serial_number", auth=('admin', 'm@nufacturing'), verify=False) print 'Serial number :' + req.text logging.info('Serial number :' + req.text) req = s.get(url + "v1/device/temp", auth=('admin', 'm@nufacturing'), verify=False) data = json.loads(req.text) print 'BRM Temperature :', data['brmTemp'], 'Celcius' logging.info('Temperature :' + req.text) # geocode = getGeoCode('tai seng mrt') # latitude = geocode['results'][0]['geometry']['location']['lat'] # longitude = geocode['results'][0]['geometry']['location']['lng'] # print("Latitude:%s\nLongitude:%s\n"%(latitude, longitude)) req = s.get(url + "v1/device/signalstrength", auth=('admin', 'm@nufacturing'), verify=False) if req.status_code == 200: data = json.loads(req.text) print 'Signal Strength :', data, 'dBHz' else: log.err('Signal strength request failed!') print(req.status_code) rest = MyRestFul() r = rest.get('v1/config/setting/serial_number') print r.text r = rest.get('v1/device/temp') print ' temperature: ' + r.text r = rest.get('v1/device/id') print 'id.: ' + r.text # r = rest.get('v1/device/firmware') # #r.text = r.text.encode('utf-8').strip() # print 'firmware ver.: ' + r.text r = rest.get('v1/device/bist') print 'BIST: ' + r.text r = rest.get('v1/config') print 'configuration: ' + r.text log.info('== BRM INFORMATION ==') logging.info(r.text) #rest.logout() # Get antenna pointing status (bypass antenna pointing) req = rest.get('v1/device/antenna_pointing') if req.status_code == 200: data = json.loads(req.text) ant_pointing = data['enabled'] print ant_pointing else: log.err('antenna_pointing!') print(req.text) # time.sleep(5) # Setup GPS # setupAGPS() # Create push button for BRM exit antenna pointing GPIO.setup("P8_7", GPIO.IN) GPIO.add_event_detect("P8_7", GPIO.RISING, callback=gpio_8_7_callback) #GPIO.wait_for_edge('P8_7', GPIO.RISING) #your amazing code here #detect wherever: #if GPIO.event_detected("P8_7"): # print "event detected!" ledState = False ######### timer ############## sb = signalBar() t1 = RepeatedTimer(1, signalStrength, sb) logging.debug('Signal strength timer started...') t1.start() # t0 = RepeatedTimer(1, start_leds) # t0.start() # t2.start() # logging.debug('waiting before canceling %s', t2.getName()) # time.sleep(2) # logging.debug('canceling %s', t2.getName()) # print 'before cancel t2.is_alive() = ', t2.is_alive() # t2.cancel() # time.sleep(1) # print 'after cancel t2.is_alive() = ', t2.is_alive() # t1.join() # t2.join() # t1.stop() logging.debug('done') ######################################## print("Main loop...\r\n") while True: try: # TODO: write code... req = rest.get('v1/device/signalstrength') if req.status_code == 200: data = json.loads(req.text) # print data if req.text == '{}': # or data == '': # print req.text signalstrength = -1.0 else: signalstrength = data['signalstrength'] print(req.text) logging.debug(req.text) else: log.err('Signal strength request failed!') print(req.text) signalstrength = -1.0 logging.warn(req.text) # USIM Status if check_usim: req = rest.get('v1/device/usim/status') if req.status_code == 200: data = json.loads(req.text) sim_status = data['status'] log.infog('USIM Status: ' + sim_status) logging.debug(req.text) check_usim = False else: #if req.status_code == 403: data = json.loads(req.text) err_msg = data['errors']['message'] log.err('USIM error: ' + err_msg) logging.err(req.text) # Location Update req = rest.get('v1/location') if req.status_code == 200: data = json.loads(req.text) #print data gps_fix = data['fix']['fixdesc'] lon = data['lon'] lat = data['lat'] log.info('Location (GPS) : %s (Lon:%s Lat:%s)' % (gps_fix, lon, lat)) logging.info(data) # print ' Lon : %s Lat: %s' % (lon, lat) else: log.err('location request failed!') print(req.text) logging.err(req.text) if (gps_fix == '3d') and (set_ant_flag) and (ant_pointing): req = rest.get('v1/device/satellites_table') if req.status_code == 200: data = json.loads(req.text) print data ele_max = max(data, key=lambda item: item['elevation']) sat_id = ele_max['id'] sat_elevation = ele_max['elevation'] sat_azimuth = ele_max['azimuth'] log.infog( 'Select satellites id: #%s (Elevation:%s Azimuth:%s)' % (sat_id, sat_elevation, sat_azimuth)) logging.info( 'Select satellites id: #%s (Elevation:%s Azimuth:%s)' % (sat_id, sat_elevation, sat_azimuth)) #set_ant_flag = True # set current satellte id else: log.err('location satellites request failed!') print(req.text) logging.err('location satellites request failed!') time.sleep(3) #if set_ant_flag: ## Set current satellite data = {'satid': sat_id} print data req = rest.put('v1/device/current_satellite', payload=data) if req.status_code == 200: d = json.loads(req.text) log.infog('Set current satellite id:' + str(sat_id)) logging.info('Set current satellite id:' + str(sat_id)) # connection = data['co'] else: log.err('command failure') print(req.text) logging.err(req.text) set_ant_flag = False time.sleep(1) except requests.exceptions.Timeout as e: # Maybe set up for a retry, or continue in a retry loop print("Error Timeout:", e) logging.error("Error Timeout:", e) except requests.exceptions.ConnectionError as e: print("Error Connecting:", e) logging.error("Error Connecting:", e) except requests.exceptions.TooManyRedirects as e: # Tell the user their URL was bad and try a different one print("Error TooManyRedirects:", e) logging.error("Error TooManyRedirects:", e) except requests.exceptions.RequestException as e: # catastrophic error. bail. print("Error RequestException:", e) print e logging.error("Error RequestException:", e) sys.exit(1)
def updateContent(self, src, interest, bandType): #Manage output when Interband registration if interest == 'interbandRegistration': if bandType == 'ms': dst = self.interband_report_location_ms roi = ((os.path.basename(src)).split('_'))[2] ref_ch = ((os.path.basename(src)).split('_'))[4] work_ch = ((os.path.basename(src)).split('_'))[-1] if bandType == 'pan': dst = self.interband_report_location_pan roi = ((os.path.basename(src)).split('_'))[2] ref_ch = 'REF8' work_ch = ((os.path.basename(src)).split('_'))[-1] dst_rep = 'roi_' + roi + '_' + ref_ch + '_' + work_ch #Prepare output directory if not os.path.exists(os.path.join(dst, dst_rep)): log.infog('-- Create Output Directory on http') cmd = ' '.join(['mkdir -v ', os.path.join(dst, dst_rep)]) os.system(cmd) #Copy PNG files cmd = ' '.join([ 'cp -r ', os.path.join(src, '*.png'), os.path.join(dst, dst_rep) ]) os.system(cmd) #Copy JPG files cmd = ' '.join([ 'cp -r ', os.path.join(src, '*Radial_Error_QL.jpg'), os.path.join(dst, dst_rep) ]) os.system(cmd) #Prepare Files (gdal warp) src_file = glob.glob( os.path.join(src, '*confidence_binaryMask_*rescale.tif'))[0] dst_file = os.path.join(dst, dst_rep, 'dc-confidence_mask.png') cmd = ' '.join([ 'gdal_translate -ot Byte -scale 0 1 0 255', src_file, dst_file, '-of PNG' ]) os.system(cmd) #Copy Results print cmd #Manage output when Direct Location if interest == 'directLocation': dst = self.geolocation_report_location print 'DST : ' + dst if bandType == 'ms': roi = ((os.path.basename(src)).split('_'))[2] ref_ch = ((os.path.basename(src)).split('_'))[4] work_ch = ((os.path.basename(src)).split('_'))[-1] if bandType == 'pan': roi = ((os.path.basename(src)).split('_'))[2] ref_ch = 'REF8' work_ch = ((os.path.basename(src)).split('_'))[-1] dst_rep = 'roi_' + roi + '_' + ref_ch + '_' + work_ch #Prepare output directory if not os.path.exists(os.path.join(dst, dst_rep)): log.infog('-- Create Output Directory on http') cmd = ' '.join(['mkdir -v ', os.path.join(dst, dst_rep)]) os.system(cmd) #Copy PNG files cmd = ' '.join([ 'cp -r ', os.path.join(src, '*.png'), os.path.join(dst, dst_rep) ]) os.system(cmd) #Copy JPG files cmd = ' '.join([ 'cp -r ', os.path.join(src, '*Radial_Error_QL.jpg'), os.path.join(dst, dst_rep) ]) os.system(cmd) #Prepare Files (gdal warp) src_file = glob.glob( os.path.join(src, '*confidence_binaryMask_*rescale.tif'))[0] dst_file = os.path.join(dst, dst_rep, 'dc-confidence_mask.png') cmd = ' '.join([ 'gdal_translate -ot Byte -scale 0 1 0 255', src_file, dst_file, '-of PNG' ]) os.system(cmd) #Copy Results #Manage output when Stability Monitoring if interest == 'stabilityMonitoring': dst = self.stability_monitoring_report_location
def geocoded(self,workimage,refimageName,DIROUT) : if not self.grille_valid : log.warn('Missing Grille.hdf file') if self.dc_valid : self.geocoded_valid = True else : self.set_output_name(workimage,DIROUT) log.infog('--- Geocoded Medicis Results --- ') src_filename = self.grille dst_ds0_filename = self.mask dst_ds1_filename = self.dx dst_ds2_filename = self.dy dst_ds3_filename = self.dc src_ds = gdal.Open(str(src_filename)) src_sds_name = src_ds.GetSubDatasets() mask_data = gdal.Open(src_sds_name[0][0], gdal.GA_Update) dx_displacement_data = gdal.Open(src_sds_name[1][0], gdal.GA_Update) dy_displacement_data = gdal.Open(src_sds_name[2][0], gdal.GA_Update) dc_confidence_data = gdal.Open(src_sds_name[3][0], gdal.GA_Update) format = 'GTiff' driver = gdal.GetDriverByName(format) src_ds_prj = gdal.Open(workimage) projection = src_ds_prj.GetProjection() geotransform = src_ds_prj.GetGeoTransform() log.info(' ') log.info('Origin = ('+str(geotransform[0])+' , '+str(geotransform[3])+')') log.info('Input Pixel Size = ('+str(geotransform[1])+' , '+str(geotransform[5])+')') pixelXSize_Geo = geotransform[1] pixelYSize_Geo = geotransform[5] geotransformOut = geotransform scX = int(np.true_divide(src_ds_prj.RasterXSize, mask_data.RasterXSize) + 0.5) scY = int(np.true_divide(src_ds_prj.RasterYSize, mask_data.RasterYSize) + 0.5) l = list(geotransformOut) l[1] = pixelXSize_Geo * scX l[5] = pixelYSize_Geo * scY geotransformOut = tuple(l) #print ' ==> ' + str(scX) + ' ' + str(scY) log.info('Output Pixel Size = ('+str(geotransformOut[1])+' , '+str(geotransformOut[5])+')\n') #print ' ' mask_data.SetProjection(projection) dx_displacement_data.SetProjection(projection) dy_displacement_data.SetProjection(projection) dc_confidence_data.SetProjection(projection) mask_data.SetGeoTransform(geotransformOut) dx_displacement_data.SetGeoTransform(geotransformOut) dy_displacement_data.SetGeoTransform(geotransformOut) dc_confidence_data.SetGeoTransform(geotransformOut) dst_ds0 = driver.CreateCopy(dst_ds0_filename, mask_data, 0) dst_ds1 = driver.CreateCopy(dst_ds1_filename, dx_displacement_data, 0) dst_ds2 = driver.CreateCopy(dst_ds2_filename, dy_displacement_data, 0) dst_ds3 = driver.CreateCopy(dst_ds3_filename, dc_confidence_data, 0) log.info( ' ') log.info('Creation de Mask data :' + dst_ds0_filename) log.info('Creation de DX Displacement :' + dst_ds1_filename) log.info('Creation de DY Displacement :' + dst_ds2_filename) log.info('Creation de DC Confidence :' + dst_ds3_filename + '\n') mask_data = None dx_displacement = None dy_displacement = None dc_confidence = None dst_ds0 = None dst_ds1 = None dst_ds2 = None dst_ds3 = None src_ds = None workimageName = workimage gdal_edit = os.path.join('/home/saunier/swig/python/scripts/', 'gdal_edit.py') param = [ 'python2.7', gdal_edit, '-mo META-TAG_IMAGEDESCRIPTION="Correlation Validity Flag, Ref/work Images:', str(refimageName), '/', str(workimageName), '"', dst_ds0_filename] cmd = ' '.join(param) os.system(cmd) param = [ 'python2.7', gdal_edit, '-mo META-TAG_IMAGEDESCRIPTION="Correlation Line Displacements, Ref/work Images:', str(refimageName), '/', str(workimageName), '"', dst_ds1_filename] cmd = ' '.join(param) os.system(cmd) param = [ 'python2.7', gdal_edit, '-mo META-TAG_IMAGEDESCRIPTION="Correlation Pixel Displacements, Ref/work Images:', str(refimageName), '/', str(workimageName), '"', dst_ds2_filename] cmd = ' '.join(param) os.system(cmd) param = [ 'python2.7', gdal_edit, '-mo META-TAG_IMAGEDESCRIPTION="Correlation Confidence Matrix, Ref/work Images:', str(refimageName), '/', str(workimageName), '"', dst_ds3_filename] cmd = ' '.join(param) os.system(cmd) self.dx_valid = True self.dy_valid = True self.dc_valid = True self.mask_valid = True self.geocoded_valid = True
def get_statistics(self,productWorkingDirectory,im,confidence_threshold) : #Filter DX , DY with IM (radial error) print ' ' log.infog(' -- Get statistics ') dc = self.dc dx = self.dx dy = self.dy log.info(' -- Create binary mask') log.info(' -- Threshold confidence Image @ value > '+str(confidence_threshold)) list_image = [] list_image.append(dc) im_st = im_p.image(list_image) im_st.histerysisThreshold(confidence_threshold,productWorkingDirectory) dc = im_st.masked_image dc_mask = im_st.mask_image #BINARY MASK im_st = None im_st = im_p.image([dc,dc_mask]) n_value = 3.5 #SIGMA VALUE threshold = confidence_threshold #CONFIDENCE VALUE - log.info(' -- Set null value on DC for Im value > '+str(n_value)+ 'sigma') #@Coordinates where IM values > 3.5 sigma, DC == 0 if (im_st.sigma_threshold(im,n_value,confidence_threshold)) : print 'Update of DC Image :'+im_st.masked_image print 'Update of DC Mask :'+im_st.mask_image im_masked = im_st.masked_image #UPDATE DC / DC MASK WITH 3 SIGMA THRESHOLD ON DX , DY dc_filename = im_masked dc_ds = gdal.OpenShared(dc_filename) dc_array = dc_ds.GetRasterBand(1).ReadAsArray() #dc_mask_ds = gdal.OpenShared(dc_mask_filename) #dc_mask_array = dc_mask_ds.GetRasterBand(1).ReadAsArray().astype(np.bool) #Load Input Dataset - Image (DX ou DY) imx = dx im_ds = gdal.OpenShared(imx) im_array_x = im_ds.GetRasterBand(1).ReadAsArray() imy = dy im_ds2 = gdal.OpenShared(imy) im_array_y = im_ds2.GetRasterBand(1).ReadAsArray() #SELECT PIXELS WHERE CONFIDENCE > Threshold #Statistiscs - keep only values of IM where DC above threshold vx = im_array_x[dc_array >= confidence_threshold] vy = im_array_y[dc_array >= confidence_threshold] log.info (' DX / DY Native statistics : ' ) if (np.size(vx)) > 0 : print('%%%%% min max median mean std ' ) mi = np.min(vx) mx = np.max(vx) md = np.median(vx) moy = np.mean(vx) std = np.std(vx) print ('%%%%% DX (line) : '+str(mi)+' '+str(mx)+' '+str(md)+' '+str(moy)+' '+str(std)+'\n') mi = np.min(vy) mx = np.max(vy) md = np.median(vy) moy = np.mean(vy) std = np.std(vy) print ('%%%%% DY (pixel) : '+str(mi)+' '+str(mx)+' '+str(md)+' '+str(moy)+' '+str(std)+'\n') SUCCESS = True else : log.warn ('No data in DC above confidence threshold ' ) SUCCESS = False
def maskImage(self, mask, dst_rep_name): # Applied mask_to_image_list can rescale mask to fit with image size #Only one Mask applied to the list #For each image in image_list, if Binary mask = 1 then pixel value of image is kept #For each image in image_list, if Binary mask = 0 then pixel value of image is set to '0' # [msk ] Name of the binary mask # [dst_rep_name ] Repository where new images are stored # #MASK is rescalled to the input image size log.infog(' - Start mask the confidence image with land sea mask \n') #Name of Rescaled Land Sea Mask input_image = self.image_list[0] mask_rad = os.path.basename(mask).split('.')[0] mask_path = os.path.dirname(mask) mask_rescale = os.path.join(mask_path, mask_rad + '_rescale.tif') self.mask_image = mask_rescale mask_tmp = os.path.join(mask_path, mask_rad + '_tmp.tif') if os.path.exists(mask_rescale): os.remove(glob.glob(mask_rescale)[0]) #save input to OLD input_image_old = r.sub('.tif|.TIF', '_OLD.TIF', input_image) shutil.copy(input_image, input_image_old) #define output image input_file_name = os.path.basename(input_image) output_file_name = r.sub('.tif|.TIF', '_masked.TIF', input_image) dst_file_name = os.path.join(dst_rep_name, output_file_name) self.masked_image = dst_file_name #src_ds - cor_conf_image src_ds = gdal.Open(str(input_image)) geotransform = src_ds.GetGeoTransform() print '-- Origin = (', geotransform[0], ',', geotransform[3], ')' print '-- Input Pixel Size = (', geotransform[1], ',', geotransform[ 5], ')' #msk_ds - land_sea_mask mask_ds = gdal.Open(str(mask)) #Get bands dc_array = src_ds.GetRasterBand(1).ReadAsArray() mask_array = mask_ds.GetRasterBand(1).ReadAsArray() nb_line = dc_array.shape[0] nb_col = dc_array.shape[1] #Rescale mask to input image scale - 2 stages cmd = ' '.join(['gdal_translate -of GTiff','-strict -tr ',str(geotransform[1]),' ', \ str(geotransform[1]), # '-outsize ',str(nb_line),' ',str(nb_col), ' -r nearest', mask,mask_tmp]) os.system(cmd) cmd = ' '.join([ 'gdal_translate -of GTiff', '-strict ', '-outsize ', str(nb_col), ' ', str(nb_line), ' -r nearest', mask_tmp, mask_rescale ]) os.system(cmd) #Applied Land Sea Mask to Correlation Confidence Image : mask_ds = gdal.Open(str(mask_rescale)) mask_array = mask_ds.GetRasterBand(1).ReadAsArray() nb_line_1 = mask_array.shape[0] nb_col_2 = mask_array.shape[1] if (nb_line_1 != nb_line): log.warn('-- The image size of Mask and Confidence Different \n') log.warn(' '.join([ '-- Masque line_nbr x col_nbr : ', str(nb_line_1), 'X', str(nb_col_2), ' \n' ])) log.warn(' '.join([ '-- Confident Image line_nbr x col_nbr : ', str(nb_line), 'X', str(nb_col), ' ', ' \n' ])) return else: self.mask_image_valid = True m1 = np.copy(dc_array) m1[mask_array == 0] = 0 tmp_ds = gdal.GetDriverByName('MEM').CreateCopy('', src_ds, 0) tmp_ds.GetRasterBand(1).WriteArray(m1, 0, 0) #Write output gdal.GetDriverByName('GTiff').CreateCopy(dst_file_name, tmp_ds, 0) tmp_ds = None src_ds = None os.remove(mask_tmp) log.infog( ' - [End] -> Mask the confidence image with land sea mask \n') return
def __init__(self): infra_dir = REFERENCE self.input_data_location = INPUT_DATA self.code_location = CODE self.matlab_processing = CODE_m self.processing_location = PROCESSING if os.path.exists(self.processing_location) is False: cmd = ' '.join(['mkdir -v', self.processing_location]) os.system(cmd) interest_list = [ 'stabilityMonitoring', 'directLocation', 'interbandRegistration' ] for interest in interest_list: interest_dir = os.path.join(self.processing_location, interest, 'done') if os.path.exists(interest_dir) is False: cmd = ' '.join(['mkdir -vp', interest_dir]) os.system(cmd) log.infog(' -- Create ' + interest + ' and done directories \n') interest_dir = os.path.join(self.processing_location, interest, 'input') if os.path.exists(interest_dir) is False: cmd = ' '.join(['mkdir -vp', interest_dir]) os.system(cmd) log.infog(' -- Create ' + interest + ' and input directories \n') interest_dir = os.path.join(self.processing_location, interest, 'wd') if os.path.exists(interest_dir) is False: cmd = ' '.join(['mkdir -vp', interest_dir]) os.system(cmd) log.infog(' -- Create ' + interest + ' and input directories \n') self.result_location = RESULT if os.path.exists(self.result_location) is False: cmd = ' '.join(['mkdir -v', self.result_location]) os.system(cmd) for interest in interest_list: th_rep = os.path.join(self.result_location, interest) if os.path.exists(th_rep) is False: cmd = ' '.join(['mkdir -v', th_rep]) os.system(cmd) #RESULT self.result_radiometricStability = Radio_Txt_File( os.path.join(self.result_location, 'stabilityMonitoring', 'radio_stability.txt')) #CONFIGURATION self.configuration = CONFIGURATION self.configuration_assessment_description = os.path.join( self.configuration, ('assessmentControler.xml')) self.configuration_site_description_file = os.path.join( self.configuration, 'desc_Site.xml') self.configuration_reference_description = os.path.join( self.configuration, 'reference_data_description.xml') self.configuration_medicis = os.path.join( self.configuration, 'Medicis_General_correl_tm.txt') #REFERENCE DATA self.reference_data_raster_file_location = os.path.join( infra_dir, 'RASTER') self.reference_data_vector_file_location = os.path.join( infra_dir, 'VECTOR') self.reference_data_dem_file_location = os.path.join(infra_dir, 'DEM') self.reference_data_meteo_file_location = os.path.join( infra_dir, 'METEO')
def histerysisThreshold(self, threshold, dst_rep_name): # HISTERYSIS Thresholding - # # Output images are "Int" type and ready for QL Generation # [threshold] : To apply to input image # [dst_rep_name] : Where to store results # |__> maskedImage # |__> binaryMask (pixels affected by thresholding) src_ds = gdal.Open(str(self.image_list[0])) #DC self.confidence_threshold = threshold array = src_ds.GetRasterBand(1).ReadAsArray() #dc_array log.infog(' - Start -> Threshold Input Images \n') output_array = np.copy(array) ##Applied Threshold output_array[array < threshold] = 0 binary_array = np.copy(array) binary_array[array < threshold] = 0 binary_array[array >= threshold] = 1 #Save Masked Image tmp_ds = gdal.GetDriverByName('MEM').CreateCopy('', src_ds, 0) tmp_ds.GetRasterBand(1).WriteArray(output_array, 0, 0) ##Write output tmp_file_name = os.path.basename(self.image_list[0]) suff = ''.join(['_maskedImage_', str(np.int(threshold * 100)), '.TIF']) new_name = r.sub('.tif|.TIF', suff, tmp_file_name) dst_file_name = os.path.join(dst_rep_name, new_name) gdal.GetDriverByName('GTiff').CreateCopy(dst_file_name, tmp_ds, 0) self.masked_image = dst_file_name tmp_ds = None #Save Binary Mask Image tmp_ds = gdal.GetDriverByName('MEM').CreateCopy('', src_ds, 0) tmp_ds.GetRasterBand(1).WriteArray(binary_array, 0, 0) ##Write output tmp_file_name = os.path.basename(self.image_list[0]) suff = ''.join( ['_binaryMask_', str(np.int(threshold * 100)), 'tmp.TIF']) new_name1 = r.sub('.tif|.TIF', suff, tmp_file_name) dst_file_name1 = os.path.join(dst_rep_name, new_name1) suff = ''.join(['_binaryMask_', str(np.int(threshold * 100)), '.TIF']) new_name2 = r.sub('.TIF', suff, tmp_file_name) dst_file_name2 = os.path.join(dst_rep_name, new_name2) gdal.GetDriverByName('GTiff').CreateCopy(dst_file_name1, tmp_ds, 0) cmd = ' '.join( ['gdal_translate -ot Byte', dst_file_name1, dst_file_name2]) os.system(cmd) cmd = ' '.join(['rm -f', dst_file_name1]) os.system(cmd) self.mask_image = dst_file_name2 tmp_ds = None src_ds = None log.info(' -- Create : ' + self.mask_image + '\n') log.info(' -- Create : ' + self.masked_image + '\n') log.infog(' - [End] -> Hysterisis threshold \n')
OSCinPort = 8032 sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind(('', 6454)) dmxeq = {} dmxstates = [] dmxinit = False universe = [] for i in range(1, 514): dmxstates.append(-1) print("") log.infog("Artnet v0.1") print("Arguments parsing if needed...") argsparser = argparse.ArgumentParser(description="Artnet & DMX for LJ") argsparser.add_argument("-u", "--universe", help="Universe, not implemented (0 by default)", type=int) argsparser.add_argument("-s", "--subuniverse", help="Subniverse, not implemented (0 by default)", type=int) argsparser.add_argument( "-r", "--redisIP", help="IP of the Redis server used by LJ (127.0.0.1 by default) ", type=str)
if __name__ == '__main__': if len(sys.argv) == 2 : #The block below is not Valid - Enter Valid atest block print sys.argv[1] product = glob.glob(sys.argv[1]) print product if len(product) == 0 : log.err('-- Invalid Product') else : product = os.path.abspath(product[0]) log.infog(" -- Interband Registration with product : "+product+"\n") infra=main_infra.mainInfra() band_type = 'pan' interband = interband(band_type); trigger_interband_processing(product,interband) else: infra=main_infra.mainInfra() infra.checkDirectoryPresence() path_to_product=os.path.join(infra.processing_location,interest,'input','*') product_list=glob.glob(path_to_product) if len(product_list) > 0 :
def trigger_interband_processing(product,interband): #Recall Object infra=main_infra.mainInfra() mtl = metadata_extraction.LandsatMTL(product) scene_id = mtl.landsat_scene_id mtl.set_test_site_information(infra.configuration_site_description_file) country_name=((mtl.test_site[0]).split())[0] site_name=((mtl.test_site[0]).split())[1] repo_ref=os.path.join(infra.reference_data_raster_file_location,country_name,site_name,'ROI'); #Access to assessment manager ? band_list=assessmentManager_get_band_list(infra,mtl) # mtl=None ? cette commande roi_list=[rec for rec in glob.glob(os.path.join(product,'ROI*'))] #http class : interface with web server http = http_side.performance_report(product) band_type = interband.band_type interband.set_main_wd(infra,band_type) #-- MLAB output file initialisation file_label = interband.file_label ext = '.mat' result_mat_file= interband.get_output_filename(mtl,infra,ext) #-- Summary Statistisque file_label - same level of mat file but # just list results in a CSV. ext = '.txt' result_sta_file= interband.get_output_filename(mtl,infra,ext) #-- Loop on all image roi, perform correlation and statistics for roi in roi_list: interband.roi = roi roi_name=os.path.basename(roi).split('_')[1] print ' ' print ' -------------------------' log.infog(' -- Processing of '+roi_name+' : ') mtl = metadata_extraction.LandsatMTL(roi) mtl.set_test_site_information(infra.configuration_site_description_file) mtl.add_roi_name_information(roi_name) #START Image Matching Loop wd_list = [] for band_twin in band_combination[band_type]: interband.set_ref_channel(band_twin) test1=interband.search_image_twin(product) if test1 : if (interband.ref_channel == '8') : #Et mission Landsat alors rescaling a 30 m if interband.ref_pixel_size == 15 : log.infog(' -- Input Image scale is '+str(interband.ref_pixel_size)+' m --') log.infog(' -- Input Image is rescaled '+str(interband.work_pixel_size)+' m --') px_size = str(interband.work_pixel_size) i_file = interband.refImage o_file = interband.refImage.replace('B8.TIF','B8_30.TIF') if not os.path.exists(o_file) : cmd = ' '.join(['gdalwarp -tr ', px_size,px_size, i_file, o_file]) os.system(cmd) interband.refImage = o_file src_ds = None band_type = interband.band_type roi = interband.roi ref_channel = interband.ref_channel work_channel = interband.work_channel wd_name = (mtl.landsat_scene_id+'_'+os.path.basename(roi)+'_BD_REF'+ref_channel+'_BD_WORK'+work_channel) productWorkingDirectory=os.path.join(infra.processing_location,interest,'wd_'+band_type,wd_name) cor = correl_image(os.path.join(productWorkingDirectory,'grille.hdf')) print cor.grille print interband.refImage productWorkingDirectory = image_matching(mtl, infra, interband, cor) wd_list.append(productWorkingDirectory) cor = correl_image(os.path.join(productWorkingDirectory,'grille.hdf')) #GEOCODING log.infog(" -- Geocoded Medicis Results") inputImage = interband.workImage refimageName = interband.workImage #? #Size of input Full Image (correlation undersampled input images ...) src_filename = inputImage src_ds = gdal.Open(str(src_filename)) image_width = src_ds.RasterXSize image_length = src_ds.RasterYSize src_ds = None cor.geocoded(inputImage,refimageName,productWorkingDirectory) if (cor.geocoded_valid) : dx = cor.dx dy = cor.dy dc = cor.dc mask = cor.mask else : log.warning(" -- [ERROR ] No Correlation Results in ") log.warning(" -- [ERROR ] No Correlation Results in ") return #FILTERING : IF MASK AVAILABLE - APPLIED log.infog(" -- Mask Confidence Image with land_sea mask \n") land_sea_mask = os.path.join(product,roi,'land_sea_mask.tif') if os.path.exists(land_sea_mask): log.infog(" --- Land Sea Mask Exist : "+land_sea_mask) input_image = dc mask = land_sea_mask log.infog(" --- Applied Land Sea Mask to confidence image") list_image = [] list_image.append(input_image) im_st = im_p.image(list_image) im_st.maskImage(mask,productWorkingDirectory) log.info(" -- Create "+im_st.masked_image+' \n') cmd = ' '.join(['mv', im_st.masked_image,dc]) os.system(cmd) im_st = None #FILTERING : APPLIED CONFIDENCE THRESHOLD ON DC - can applied to DX,DY,DC list_image = [] list_image.append(dc) im_st = im_p.image(list_image) im_st.histerysisThreshold(confidence,productWorkingDirectory) dc = im_st.masked_image dc_mask = im_st.mask_image #BINARY MASK im_st = None #UPDATE DC / DC MASK WITH 3 SIGMA THRESHOLD ON DX , DY im_st = im_p.image([dc,dc_mask]) n_value = 3 #SIGMA VALUE threshold = confidence #CONFIDENCE VALUE - log.infog(" --- Applied Sigma Threshold to DX ") DX_proc_flag = im_st.sigma_threshold(dx,n_value,confidence) log.infog(" --- Applied Sigma Threshold to DY ") DY_proc_flag = im_st.sigma_threshold(dy,n_value,confidence) dc = im_st.masked_image dc_mask = im_st.mask_image #BINARY MASK cmd = ' '.join(['mv', im_st.masked_image,cor.dc]) #Ecrase DC - original in "OLD" os.system(cmd) im_st = None print "Exit Processing if cannot applied 3 sigma threshold " if ((not DX_proc_flag) or (not DY_proc_flag)) : log.err(" -- Correlation is NOT successfull") log.err(" -- After 3 Sigma threshold ") log.err(" -- There is no sufficient point above the confidence threshold ") log.err(" -- Processing Abort and record not added to the m lab structure ") return #APPLIED DC BINARY MASK TO DX, DY mask = dc_mask list_image = [] list_image.append(dx) im_st = im_p.image(list_image) im_st.maskImage(mask,productWorkingDirectory) log.info(" -- Create "+im_st.masked_image+' \n') dx = im_st.masked_image im_st = None list_image = [] list_image.append(dy) im_st = im_p.image(list_image) im_st.maskImage(mask,productWorkingDirectory) log.info(" -- Create "+im_st.masked_image+' \n') dy = im_st.masked_image im_st = None log.infog(" -- Mask :"+dc_mask) log.infog(" -- DX :"+dx) log.infog(" -- DY :"+dy) log.infog(" -- DC :"+dc+'\n') #COMPUTE DX²+ DY² - RADIAL ERROR list_image = [dx,dy] dst_file_name = os.path.join(productWorkingDirectory,scene_id+ '_B'+interband.ref_channel+ '_B'+interband.work_channel+ '_radialError_'+ str(np.int(confidence*100))+'.tif',) im_st = im_p.image(list_image) if (im_st.computeRadialError(dst_file_name)): cor.radial_error = dst_file_name cor.radial_error_valid = True im_st = None #STATISTICS DX #STATISTICS DY #STATISTICS DC #-- QL Radial Error - Overlayed Image Band with B1 B2 B3 Image #Rescale QL Radial Error to size of B1 , B2 , B3 list_image = [interband.workImage] im1 = cor.radial_error dst_filename = im1.replace('.tif','_rescale.tif') im_st = im_p.image(list_image) im_st.rescaleImage(im1,dst_filename) cor.radial_error_rescaled = dst_filename im1 = dc_mask dst_filename = im1.replace('.TIF','_rescale.tif') im_st.rescaleImage(im1,dst_filename) #Geometric Rescaling cor.dc_mask_rescaled = dst_filename im_st = None #BURN TIF IMAGES WITH RADIAL ERRORS #Convert each image to 8 Bits list_image = [] list_image.append(glob.glob(os.path.join(product,roi,'*B2.TIF'))[0]) # QL Generation list_image.append(glob.glob(os.path.join(product,roi,'*B3.TIF'))[0]) # QL Generation list_image.append(glob.glob(os.path.join(product,roi,'*B4.TIF'))[0]) # QL Generation im_st = im_p.image(list_image) dst_repo = productWorkingDirectory output_list = im_st.byte_rescaling(dst_repo ) im_st = None #Burn each one tif images list_image = output_list output_list = [] for image in list_image: output_list.append(image.replace('.tif','_burn.tif')) im_st = im_p.image(list_image) im_filename = cor.radial_error_rescaled #Radial Error Value map to QL mask_filename = cor.dc_mask_rescaled #DC Mask to select pixel im_st.burn_image(output_list,im_filename,mask_filename) im_st = None #Create the quick look image_data_list = output_list ql_name = os.path.join(productWorkingDirectory,scene_id+ '_B'+interband.ref_channel+ '_B'+interband.work_channel+ '_Radial_Error_QL.jpg') outputdir = productWorkingDirectory quick_look_resolution = 30 im_st = im_p.image(image_data_list) # Create Object instance im_st.createimageQuicklook(ql_name,outputdir,quick_look_resolution) #-- MLAB Production #result file is matlab file (.mat) pixelSpacing=interband.ref_pixel_size #Pixel spacing of the orginal Image #[NFO] Mlab_run - attente des fichiers dx,dy,dc normés # M=dir([obj.repName filesep '*_dx-displacement.TIF']); # M=dir([obj.repName filesep '*_dy-displacement.TIF']); # M=dir([obj.repName filesep '*_dc-confidence.TIF']); Mlab_run(mtl, result_mat_file, productWorkingDirectory, pixelSpacing) ##TEMPS MININIMUM A 30 BIEN ATTENDRE LA FIN DE TRAITEMENT print (' ') log.warn('TIME SLEEP ACTIVIATED - 30 s - Wait end of M LAB PROCESSING') log.warn('TIME SLEEP ACTIVIATED - 30 s - Important to let Completion') log.warn('TIME SLEEP ACTIVIATED - 30 s - of the mat file ') print (' ') time.sleep(30) #--- 2. Repatriate BD2 BD3 BD4 Images #--- 3. Open Images and Burn Values #--- 4. Create QL #-- CP to the HTTP src_directory = productWorkingDirectory #-- Copy MLAB Results (PNG) to repository #-- Copy DC mask image (PNG)to http repository #-- Copy Radial Error image (JPG)to http repository http.updateContent(productWorkingDirectory,interest,band_type) #Remove the hdf file as output from medicis hdf_grille = os.path.join(productWorkingDirectory,'grille.hdf') if os.path.exists(hdf_grille): cmd = ' '.join(['rm -f ',hdf_grille]) os.system(cmd) #[END] of processing the band twin LOOP log.infog(" - End Loop on WD LIST \n") #Generate A single stat/csv for all Report WD in this processing log.infog(" - [STA File] Create STA File : "+result_sta_file) create_statistics_report(wd_list,result_sta_file) #PARTIE EXPORT SUR HTTP A SEPARER DE CETTE FONCTION print ( ' ') log.infog(" - Populate HTTP \n") # POPULATE HTTP WITH ALL RESULTS : #-- MAT FILE : dst_file_name = ''.join([mtl.landsat_scene_id,'_','roi_',roi_name,'_inter_',band_type,'.mat']) if not (http.updateContentWith_FILE(result_mat_file,dst_file_name,interest,band_type)): # log.info(" -- [MAT File] Copy From : "+result_mat_file) # log.info(" -- [MAT File] to : "+dst_file_name) # else : log.err(" -- [ERROR ] Input / Output MAT FILE Missing+'\n' ") #-- STA FILE : dst_file_name = ''.join([mtl.landsat_scene_id,'_','roi_',roi_name,'_inter_',band_type,'.txt']) if not (http.updateContentWith_FILE(result_sta_file,dst_file_name,interest,band_type)): # log.info(" -- [MAT File] Copy From : "+result_sta_file) # log.info(" -- [MAT File] to : "+dst_file_name) # else : log.err(" - [ERROR ] Input / Output TXT-STA FILE Missing+'\n' ") #-- QL listql=[] listql.append(glob.glob(os.path.join(product,roi,'*B2.TIF'))[0]) # QL Generation listql.append(glob.glob(os.path.join(product,roi,'*B3.TIF'))[0]) # QL Generation listql.append(glob.glob(os.path.join(product,roi,'*B4.TIF'))[0]) # QL Generation image_data_list = listql ql_file_name = ''.join([mtl.landsat_scene_id,'_','roi_',roi_name,'_bd234.jpg']) ql_name = os.path.join(interband.main_wd,ql_file_name) log.info(" -- [QL ] Create ROI QL : "+ql_name) dst_dir = productWorkingDirectory quick_look_resolution = 30 outputdir = interband.main_wd im_st = im_p.image(listql) # Create Object instance im_st.createimageQuicklook(ql_name,outputdir,quick_look_resolution) http.updateContentWitQL(infra,interest,band_type,ql_name) #End of processing each ROI log.infog(" - [PROC ] End ")
def makeProductSubview(self, metadata): #make product subview - needed for subsequent processing_location # -- Create repository # -- Copy files # -- Crop data according to ROI # -- Convert to TOA metadata.display_mtl_info() country_list = metadata.test_site site_list = metadata.test_site desc_site_xmldoc = minidom.parse( self.configuration_site_description_file) sites = desc_site_xmldoc.getElementsByTagName('site') #Loop on each site covered by the product #site_name corresponds to "site id" in the xml file, refer to configuration for k, site_chain in enumerate(site_list): country_name = site_chain.split(' ')[0] site_name = site_chain.split(' ')[1] site_interest = [] for site in sites: site_name_att = site.getElementsByTagName( 'id')[0].childNodes[0].data if (site_name_att == site_name): site_interest = site.getElementsByTagName('interest') log.infog(' -- Site Interest Found ') #Une fois que le site est trouver - loop on interest #Check if based on Assessment Manager Applicable to the input product #Pour chaque interet - get processing parameters : # -> list of image a traiter # -> shape file for ROI to be croped #En verifiant que l interet est couvert par le Manager for interest in metadata.interest[k].split(): xmldoc = minidom.parse( self.configuration_assessment_description) missions = xmldoc.getElementsByTagName('mission') for mission in missions: sensor = mission.attributes["sensor"].value plateform = mission.attributes["plateform"].value if (metadata.sensor == sensor) and (metadata.mission == plateform): # -- For interest Get Mission specific Processing paramters print(' ') log.info(' -- ' + interest + ' :') interest_block = ( mission.getElementsByTagName(interest)) band_list = (interest_block[0]. getElementsByTagName('channel') [0].childNodes[0].data).split(' ') # -- Create repository outputrep = os.path.join( self.processing_location, interest, 'input', metadata.landsat_scene_id) if os.path.exists(outputrep) is False: cmd = ' '.join(['mkdir -v', outputrep]) os.system(cmd) # -- Copy initial info inputrep = metadata.product_path metadatafile_name = metadata.mtl_file_name cmd = ' '.join([ 'cp', os.path.join(inputrep, metadatafile_name), outputrep ]) os.system(cmd) # -- Define the image_file_list image_file_list = [] for rec in band_list: reg = ''.join( ['*B', rec.replace(' ', ''), '.TIF']) image_file_list.append( glob.glob(os.path.join(inputrep, reg))) # -- Append Mask if exists msk_file_name = 'land_sea_mask.tif' mask_image = os.path.join( inputrep, msk_file_name) if os.path.exists(mask_image): image_file_list.append( glob.glob(mask_image)) log.info(' --- mask Image :' + mask_image) for rec in image_file_list: log.info(' --- Image List record :' + str(rec)) # -- For each interest, site Crop Data According to roi defined by shapefile country = country_name vector = self.reference_data_vector_file_location ref_type = 'vector' # -- Print self.configuration_reference_description # -- Print self.reference_data_vector_file_location ref_1 = ref.referenceData( self.configuration_reference_description, self.reference_data_vector_file_location) reference_file_list = ref_1.get_data_list( site_name, country, interest, 'vector') if reference_file_list: for ref_file in reference_file_list: log.info(' --- Reference File : ' + os.path.basename( ref_file).split('.')[0]) #Fabrique repertoire ROi_[nom du site] roi_rep = os.path.join( outputrep, ''.join([ 'ROI_', os.path.basename( ref_file).split('.')[0] ])) if os.path.exists(roi_rep) is False: cmd = ' '.join( ['mkdir -v', roi_rep]) os.system(cmd) #Copy le fichier mtl_file_name metadatafile_name = metadata.mtl_file_name if os.path.exists( os.path.join( inputrep, metadatafile_name)): cmd = ' '.join([ 'cp', os.path.join( inputrep, metadatafile_name), roi_rep ]) os.system(cmd) #Copy le fichier mtl_file_name #Pour chaque REF FILE Decoupe suivant la definition de la roi crop_on_roi_gdal_warp( image_file_list, roi_rep, ref_file) log.info(' --- End crop') else: log.info( ' -- Site not appropriate for this interest, no reference found ' ) #Reset la list des fichiers images image_file_list = [] print('\n') log.infog(' - End of makeProductSubview \n')
sys.path.append('../libs3') sys.path.append(ljpath + '/../../libs3') import gstt is_py2 = sys.version[0] == '2' if is_py2: from OSC import OSCServer, OSCClient, OSCMessage else: from OSC3 import OSCServer, OSCClient, OSCMessage import lj23layers as lj import argparse print() log.infog("Aurora v0.1b") OSCinPort = 8090 ljscene = 0 # Useful variables init. white = lj.rgb2int(255, 255, 255) red = lj.rgb2int(255, 0, 0) blue = lj.rgb2int(0, 0, 255) green = lj.rgb2int(0, 255, 0) cyan = lj.rgb2int(255, 0, 255) yellow = lj.rgb2int(255, 255, 0) screen_size = [700, 700] xy_center = [screen_size[0] / 2, screen_size[1] / 2]
def trigger_interband_processing_with_ms(product, interband): #Recall Object infra = main_infra.mainInfra() mtl = metadata_extraction.LandsatMTL(product) scene_id = mtl.landsat_scene_id mtl.set_test_site_information(infra.configuration_site_description_file) country_name = ((mtl.test_site[0]).split())[0] site_name = ((mtl.test_site[0]).split())[1] repo_ref = os.path.join(infra.reference_data_raster_file_location, country_name, site_name, 'ROI') #Access to assessment manager ? band_list = assessmentManager_get_band_list(infra, mtl) # mtl=None ? cette commande roi_list = [rec for rec in glob.glob(os.path.join(product, 'ROI*'))] #http class : interface with web server http = http_side.performance_report(product) band_type = interband.band_type interband.set_main_wd(infra, band_type) #-- MLAB output file initialisation file_label = interband.file_label ext = '.mat' result_mat_file = interband.get_output_filename(mtl, infra, ext) #-- Summary Statistisque file_label - same level of mat file but # just list results in a CSV. ext = '.txt' result_sta_file = interband.get_output_filename(mtl, infra, ext) #-- Loop on all image roi, perform correlation and statistics for roi in roi_list: print 'roi ' + roi interband.roi = roi roi_name = os.path.basename(roi).split('_')[1] print ' ' log.infog(' -------------------------') log.infog(' -- Processing of ' + roi_name + ' -- ') log.infog(' -------------------------') mtl = metadata_extraction.LandsatMTL(roi) mtl.set_test_site_information( infra.configuration_site_description_file) mtl.add_roi_name_information(roi_name) #START Image Matching Loop wd_list = [] for band_twin in band_combination[band_type]: interband.set_ref_channel(band_twin) test1 = interband.search_image_twin(product) if (interband.ref_channel == '8' ): #Et mission Landsat alors rescaling a 30 m if interband.ref_pixel_size == 15: log.infog(' -- Input Image scale is ' + str(interband.ref_pixel_size) + ' m --') log.infog(' -- Input Image is rescaled ' + str(interband.work_pixel_size) + ' m --') px_size = str(interband.work_pixel_size) i_file = interband.refImage o_file = interband.refImage.replace('B8.TIF', 'B8_30.TIF') if not os.path.exists(o_file): cmd = ' '.join([ 'gdalwarp -tr ', px_size, px_size, i_file, o_file ]) os.system(cmd) interband.refImage = o_file src_ds = None
def create_rgb_image(DX, DY, DC, image_rgb): log.infog(' - Create RGB Image \n') log.infog(' - End of Create RGB Image \n')
def threshold_confidence_image(self, land_sea_mask, cor_conf_image, dst_file_name): confidence_threshold = 0.8 log.infog(' - Start mask the confidence image with land sea mask \n') #Name of Rescaled Land Sea Mask input_file = land_sea_mask land_sea_mask_rad = os.path.basename(input_file).split('.')[0] land_sea_mask_path = os.path.dirname(input_file) land_sea_mask_rescale = os.path.join( land_sea_mask_path, land_sea_mask_rad + '_rescale.tif') land_sea_tmp = os.path.join(land_sea_mask_path, land_sea_mask_rad + '_tmp.tif') if os.path.exists(land_sea_mask_rescale): os.remove(glob.glob(land_sea_mask_rescale)[0]) #save input cor_conf_image to old cor_conf_image_old = cor_conf_image.replace('.TIF', '_OLD.TIF') shutil.copy(cor_conf_image, cor_conf_image_old) #cor_conf_image src_ds = gdal.Open(str(cor_conf_image)) geotransform = src_ds.GetGeoTransform() print '-- Origin = (', geotransform[0], ',', geotransform[3], ')' print '-- Input Pixel Size = (', geotransform[1], ',', geotransform[ 5], ')' msk_ds = gdal.Open(str(land_sea_mask)) dc_array = src_ds.GetRasterBand(1).ReadAsArray() msk_array = msk_ds.GetRasterBand(1).ReadAsArray() nb_line = dc_array.shape[0] nb_col = dc_array.shape[1] #Rescale land sea mask to confidence image scale cmd = ' '.join(['gdal_translate -of GTiff','-strict -tr ',str(geotransform[1]),' ', \ str(geotransform[1]), # '-outsize ',str(nb_line),' ',str(nb_col), ' -r nearest', land_sea_mask,land_sea_tmp]) os.system(cmd) cmd = ' '.join([ 'gdal_translate -of GTiff', '-strict ', '-outsize ', str(nb_col), ' ', str(nb_line), ' -r nearest', land_sea_tmp, land_sea_mask_rescale ]) os.system(cmd) #Applied Land Sea Mask to Correlation Confidence Image msk_ds = gdal.Open(str(land_sea_mask_rescale)) msk_array = msk_ds.GetRasterBand(1).ReadAsArray() nb_line_1 = msk_array.shape[0] nb_col_2 = msk_array.shape[1] if (nb_line_1 != nb_line): log.warn('-- The image size of Mask and Confidence Different \n') log.warn(' '.join([ '-- Masque line_nbr x col_nbr : ', str(nb_line_1), 'X', str(nb_col_2), ' \n' ])) log.warn(' '.join([ '-- Confident Image line_nbr x col_nbr : ', str(nb_line), 'X', str(nb_col), ' ', ' \n' ])) return False m1 = dc_array m1[msk_array == 0] = 0 tmp_ds = gdal.GetDriverByName('MEM').CreateCopy('', src_ds, 0) tmp_ds.GetRasterBand(1).WriteArray(m1, 0, 0) #Write output gdal.GetDriverByName('GTiff').CreateCopy(dst_file_name, tmp_ds, 0) tmp_ds = None log.infog( ' - End -> Mask the confidence image with land sea mask \n') log.infog(' - Start -> Create Mask of the confidence image \n') #Create MASK of DC 0 below threshold and 1 above threshold m2 = dc_array m2[m1 <= confidence_threshold] = 0 m2[m1 > confidence_threshold] = 1 tmp_ds = gdal.GetDriverByName('MEM').CreateCopy('', src_ds, 0) tmp_ds.GetRasterBand(1).WriteArray(m2, 0, 0) #Write output dst2_file_name = dst_file_name.replace( '.TIF', ''.join( ['_mask_', str(np.int(confidence_threshold * 100)), '.TIF'])) gdal.GetDriverByName('GTiff').CreateCopy(dst2_file_name, tmp_ds, 0) src_ds = None msk_ds = None tmp_ds = None log.infog(' -- Create : ' + dst2_file_name + '\n') log.infog(' - End -> Create Mask of the confidence image \n') return dst_file_name, dst2_file_name