def main(): # Connect to database - ignore engine information pds_session, pds_engine = db_connect(pds_db) # Connect to database - ignore engine information session, upc_engine = db_connect(upc_db) # ***************** Set up logging ***************** logger = logging.getLogger('UPC_Process') logger.setLevel(logging.INFO) logFileHandle = logging.FileHandler(pds_log + 'Process.log') formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s, %(message)s') logFileHandle.setFormatter(formatter) logger.addHandler(logFileHandle) PDSinfoDICT = json.load(open(pds_info, 'r')) # Redis Queue Objects RQ_main = RedisQueue('UPC_ReadyQueue') logger.info("UPC Processing Queue: %s", RQ_main.id_name) RQ_lock = RedisLock(lock_obj) # If the queue isn't registered, add it and set it to "running" RQ_lock.add({RQ_main.id_name: '1'}) proc_date_tid = get_tid('processdate', session) err_type_tid = get_tid('errortype', session) err_msg_tid = get_tid('errormessage', session) err_flag_tid = get_tid('error', session) isis_footprint_tid = get_tid('isisfootprint', session) isis_centroid_tid = get_tid('isiscentroid', session) start_time_tid = get_tid('starttime', session) stop_time_tid = get_tid('stoptime', session) checksum_tid = get_tid('checksum', session) # while there are items in the redis queue while int(RQ_main.QueueSize()) > 0 and RQ_lock.available(RQ_main.id_name): # get a file from the queue item = literal_eval(RQ_main.QueueGet().decode("utf-8")) inputfile = item[0] fid = item[1] archive = item[2] #inputfile = (RQ_main.QueueGet()).decode('utf-8') if os.path.isfile(inputfile): pass else: print("{} is not a file\n".format(inputfile)) if os.path.isfile(inputfile): logger.info('Starting Process: %s', inputfile) # @TODO refactor this logic. We're using an object to find a path, returning it, # then passing it back to the object so that the object can use it. recipeOBJ = Recipe() recipe_json = recipeOBJ.getRecipeJSON(archive) #recipe_json = recipeOBJ.getRecipeJSON(getMission(str(inputfile))) recipeOBJ.AddJsonFile(recipe_json, 'upc') infile = workarea + os.path.splitext( str(os.path.basename(inputfile)))[0] + '.UPCinput.cub' outfile = workarea + os.path.splitext( str(os.path.basename(inputfile)))[0] + '.UPCoutput.cub' caminfoOUT = workarea + os.path.splitext( str(os.path.basename(inputfile)))[0] + '_caminfo.pvl' EDRsource = inputfile.replace( '/pds_san/PDS_Archive/', 'https://pdsimage.wr.ugs.gov/Missions/') status = 'success' # Iterate through each process listed in the recipe for item in recipeOBJ.getProcesses(): # If any of the processes failed, discontinue processing if status.lower() == 'error': break elif status.lower() == 'success': processOBJ = Process() processOBJ.ProcessFromRecipe(item, recipeOBJ.getRecipe()) # Handle processing based on string description. if '2isis' in item: processOBJ.updateParameter('from_', inputfile) processOBJ.updateParameter('to', outfile) elif item == 'thmproc': processOBJ.updateParameter('from_', inputfile) processOBJ.updateParameter('to', outfile) thmproc_odd = str(workarea) + str( os.path.splitext(os.path.basename(inputfile)) [0]) + '.UPCoutput.raw.odd.cub' thmproc_even = str(workarea) + str( os.path.splitext(os.path.basename(inputfile)) [0]) + '.UPCoutput.raw.even.cub' elif item == 'handmos': processOBJ.updateParameter('from_', thmproc_even) processOBJ.updateParameter('mosaic', thmproc_odd) elif item == 'spiceinit': processOBJ.updateParameter('from_', infile) elif item == 'cubeatt': band_infile = infile + '+' + str(1) processOBJ.updateParameter('from_', band_infile) processOBJ.updateParameter('to', outfile) elif item == 'footprintinit': processOBJ.updateParameter('from_', infile) elif item == 'caminfo': processOBJ.updateParameter('from_', infile) processOBJ.updateParameter('to', caminfoOUT) else: processOBJ.updateParameter('from_', infile) processOBJ.updateParameter('to', outfile) pwd = os.getcwd() # iterate through functions listed in process obj for k, v in processOBJ.getProcess().items(): # load a function into func func = getattr(isis, k) try: os.chdir(workarea) # execute function func(**v) os.chdir(pwd) if item == 'handmos': if os.path.isfile(thmproc_odd): os.rename(thmproc_odd, infile) else: if os.path.isfile(outfile): os.rename(outfile, infile) status = 'success' if '2isis' in item: label = pvl.load(infile) infile_bandlist = label['IsisCube']['BandBin'][ PDSinfoDICT[archive]['bandbinQuery']] infile_centerlist = label['IsisCube'][ 'BandBin']['Center'] elif item == 'thmproc': pass elif item == 'handmos': label = pvl.load(infile) infile_bandlist = label['IsisCube']['BandBin'][ PDSinfoDICT[archive]['bandbinQuery']] infile_centerlist = label['IsisCube'][ 'BandBin']['Center'] except ProcessError as e: print(e) status = 'error' processError = item # keyword definitions keywordsOBJ = None if status.lower() == 'success': try: keywordsOBJ = UPCkeywords(caminfoOUT) except: with open(caminfoOUT, 'r') as f: filedata = f.read() filedata = filedata.replace(';', '-').replace('&', '-') filedata = re.sub(r'\-\s+', r'', filedata, flags=re.M) with open(caminfoOUT, 'w') as f: f.write(filedata) keywordsOBJ = UPCkeywords(caminfoOUT) target_Qobj = session.query(upc_models.Targets).filter( upc_models.Targets.targetname == keywordsOBJ.getKeyword( 'TargetName').upper()).first() instrument_Qobj = session.query(upc_models.Instruments).filter( upc_models.Instruments.instrument == keywordsOBJ.getKeyword('InstrumentId')).first() if session.query(upc_models.DataFiles).filter( upc_models.DataFiles.isisid == keywordsOBJ.getKeyword( 'IsisId')).first() is None: test_input = upc_models.DataFiles( isisid=keywordsOBJ.getKeyword('IsisId'), productid=keywordsOBJ.getKeyword('ProductId'), edr_source=EDRsource, edr_detached_label='', instrumentid=instrument_Qobj.instrumentid, targetid=target_Qobj.targetid) session.merge(test_input) session.commit() Qobj = session.query(upc_models.DataFiles).filter( upc_models.DataFiles.isisid == keywordsOBJ.getKeyword( 'IsisId')).first() UPCid = Qobj.upcid print(UPCid) # block to add band information to meta_bands if isinstance(infile_bandlist, list): index = 0 while index < len(infile_bandlist): B_DBinput = upc_models.MetaBands( upcid=UPCid, filter=str(infile_bandlist[index]), centerwave=infile_centerlist[index]) session.merge(B_DBinput) index = index + 1 else: try: # If infile_centerlist is in "Units" format, grab the value f_centerlist = float(infile_centerlist[0]) except TypeError: f_centerlist = float(infile_centerlist) B_DBinput = upc_models.MetaBands(upcid=UPCid, filter=infile_bandlist, centerwave=f_centerlist) session.merge(B_DBinput) session.commit() # Block to add common keywords testjson = json.load(open(keyword_def, 'r')) for element_1 in testjson['instrument']['COMMON']: keyvalue = "" keytype = testjson['instrument']['COMMON'][element_1][ 'type'] keyword = testjson['instrument']['COMMON'][element_1][ 'keyword'] keyword_Qobj = session.query(upc_models.Keywords).filter( and_(upc_models.Keywords.typename == element_1, upc_models.Keywords.instrumentid == 1)).first() if keyword_Qobj is None: continue else: keyvalue = keywordsOBJ.getKeyword(keyword) if keyvalue is None: continue keyvalue = db2py(keytype, keyvalue) try: DBinput = upc_models.create_table( keytype, upcid=UPCid, typeid=keyword_Qobj.typeid, value=keyvalue) except Exception as e: logger.warn("Unable to enter %s into table\n\n%s", keytype, e) continue session.merge(DBinput) try: session.flush() except: logger.warn("Unable to flush database connection") session.commit() for element_1 in testjson['instrument'][archive]: keyvalue = "" keytype = testjson['instrument'][archive][element_1][ 'type'] keyword = testjson['instrument'][archive][element_1][ 'keyword'] keyword_Qobj = session.query(upc_models.Keywords).filter( and_( upc_models.Keywords.typename == element_1, upc_models.Keywords.instrumentid.in_( (1, instrument_Qobj.instrumentid)))).first() if keyword_Qobj is None: continue else: keyvalue = keywordsOBJ.getKeyword(keyword) if keyvalue is None: logger.debug("Keyword %s not found", keyword) continue keyvalue = db2py(keytype, keyvalue) try: DBinput = upc_models.create_table( keytype, upcid=UPCid, typeid=keyword_Qobj.typeid, value=keyvalue) except Exception as e: logger.warn("Unable to enter %s into database\n\n%s", keytype, e) continue session.merge(DBinput) try: session.flush() except: logger.warn("Unable to flush database connection") session.commit() # geometry stuff G_centroid = 'point ({} {})'.format( str(keywordsOBJ.getKeyword('CentroidLongitude')), str(keywordsOBJ.getKeyword('CentroidLatitude'))) G_keyword_Qobj = session.query( upc_models.Keywords.typeid).filter( upc_models.Keywords.typename == 'isiscentroid').first() G_footprint_Qobj = session.query( upc_models.Keywords.typeid).filter( upc_models.Keywords.typename == 'isisfootprint').first() G_footprint = keywordsOBJ.getKeyword('GisFootprint') G_DBinput = upc_models.MetaGeometry(upcid=UPCid, typeid=G_keyword_Qobj, value=G_centroid) session.merge(G_DBinput) G_DBinput = upc_models.MetaGeometry(upcid=UPCid, typeid=G_footprint_Qobj, value=G_footprint) session.merge(G_DBinput) session.flush() session.commit() f_hash = hashlib.md5() with open(inputfile, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): f_hash.update(chunk) checksum = f_hash.hexdigest() DBinput = upc_models.MetaString(upcid=UPCid, typeid=checksum_tid, value=checksum) session.merge(DBinput) DBinput = upc_models.MetaBoolean(upcid=UPCid, typeid=err_flag_tid, value=False) session.merge(DBinput) session.commit() AddProcessDB(pds_session, fid, True) os.remove(infile) os.remove(caminfoOUT) elif status.lower() == 'error': try: label = pvl.load(infile) except Exception as e: logger.info('%s', e) continue date = datetime.datetime.now( pytz.utc).strftime("%Y-%m-%d %H:%M:%S") if '2isis' in processError or processError == 'thmproc': if session.query(upc_models.DataFiles).filter( upc_models.DataFiles.edr_source == EDRsource.decode("utf-8")).first() is None: target_Qobj = session.query(upc_models.Targets).filter( upc_models.Targets.targetname == str( label['IsisCube']['Instrument'] ['TargetName']).upper()).first() instrument_Qobj = session.query( upc_models.Instruments).filter( upc_models.Instruments.instrument == str( label['IsisCube']['Instrument'] ['InstrumentId'])).first() error1_input = upc_models.DataFiles( isisid='1', edr_source=EDRsource) session.merge(error1_input) session.commit() EQ1obj = session.query(upc_models.DataFiles).filter( upc_models.DataFiles.edr_source == EDRsource).first() UPCid = EQ1obj.upcid errorMSG = 'Error running {} on file {}'.format( processError, inputfile) DBinput = MetaTime(upcid=UPCid, typeid=proc_date_tid, value=date) session.merge(DBinput) DBinput = MetaString(upcid=UPCid, typeid=err_type_tid, value=processError) session.merge(DBinput) DBinput = MetaString(upcid=UPCid, typeid=err_msg_tid, value=errorMSG) session.merge(DBinput) DBinput = MetaBoolean(upcid=UPCid, typeid=err_flag_tid, value=True) session.merge(DBinput) DBinput = MetaGeometry(upcid=UPCid, typeid=isis_footprint_tid, value='POINT(361 0)') session.merge(DBinput) DBinput = MetaGeometry(upcid=UPCid, typeid=isis_centroid_tid, value='POINT(361 0)') session.merge(DBinput) session.commit() else: try: label = pvl.load(infile) except Exception as e: logger.warn('%s', e) continue isisSerial = getISISid(infile) if session.query(upc_models.DataFiles).filter( upc_models.DataFiles.isisid == isisSerial).first() is None: target_Qobj = session.query(upc_models.Targets).filter( upc_models.Targets.targetname == str( label['IsisCube']['Instrument'] ['TargetName']).upper()).first() instrument_Qobj = session.query( upc_models.Instruments).filter( upc_models.Instruments.instrument == str( label['IsisCube']['Instrument'] ['InstrumentId'])).first() if target_Qobj is None or instrument_Qobj is None: continue error2_input = upc_models.DataFiles( isisid=isisSerial, productid=label['IsisCube']['Archive'] ['ProductId'], edr_source=EDRsource, instrumentid=instrument_Qobj.instrumentid, targetid=target_Qobj.targetid) session.merge(error2_input) session.commit() try: EQ2obj = session.query(upc_models.DataFiles).filter( upc_models.DataFiles.isisid == isisSerial).first() UPCid = EQ2obj.upcid errorMSG = 'Error running {} on file {}'.format( processError, inputfile) DBinput = MetaTime(upcid=UPCid, typeid=proc_date_tid, value=date) session.merge(DBinput) DBinput = MetaString(upcid=UPCid, typeid=err_type_tid, value=processError) session.merge(DBinput) DBinput = MetaString(upcid=UPCid, typeid=err_msg_tid, value=errorMSG) session.merge(DBinput) DBinput = MetaBoolean(upcid=UPCid, typeid=err_flag_tid, value=True) session.merge(DBinput) DBinput = MetaGeometry(upcid=UPCid, typeid=isis_footprint_tid, value='POINT(361 0)') session.merge(DBinput) DBinput = MetaGeometry(upcid=UPCid, typeid=isis_centroid_tid, value='POINT(361 0)') session.merge(DBinput) except: pass try: v = label['IsisCube']['Instrument']['StartTime'] except KeyError: v = None except: continue try: DBinput = MetaTime(upcid=UPCid, typeid=start_time_tid, value=v) session.merge(DBinput) except: continue try: v = label['IsisCube']['Instrument']['StopTime'] except KeyError: v = None DBinput = MetaTime(upcid=UPCid, typeid=stop_time_tid, value=v) session.merge(DBinput) session.commit() AddProcessDB(pds_session, fid, False) os.remove(infile) # Disconnect from db sessions pds_session.close() session.close() # Disconnect from the engines pds_engine.dispose() upc_engine.dispose() logger.info("UPC processing exited successfully")
def main(): # pdb.set_trace() DBQO = PDS_DBquery('JOBS') Key = DBQO.jobKey() # Key = '2d7379497fed4c092046b2a06f5471a5' DBQO.setJobsQueued(Key) #*************** Setup logging ****************** logger = logging.getLogger(Key) logger.setLevel(logging.INFO) logFileHandle = logging.FileHandler('/usgs/cdev/PDS/logs/Service.log') formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s, %(message)s') logFileHandle.setFormatter(formatter) logger.addHandler(logFileHandle) logger.info('Starting Process') xmlOBJ = jobXML(DBQO.jobXML4Key(Key)) # ********** Test if Directory exists and make it if not ******* directory = '/scratch/pds_services/' + Key if not os.path.exists(directory): os.makedirs(directory) logger.info('Working Area: %s', directory) # ******************** Setup Redis Hash for ground range ********* RedisH = RedisHash(Key + '_info') RedisH.RemoveAll() RedisErrorH = RedisHash(Key + '_error') RedisErrorH.RemoveAll() RedisH_DICT = {} RedisH_DICT['service'] = xmlOBJ.getProcess() RedisH_DICT['fileformat'] = xmlOBJ.getOutFormat() RedisH_DICT['outbit'] = xmlOBJ.getOutBit() if xmlOBJ.getRangeType() is not None: RedisH_DICT['grtype'] = xmlOBJ.getRangeType() RedisH_DICT['minlat'] = xmlOBJ.getMinLat() RedisH_DICT['maxlat'] = xmlOBJ.getMaxLat() RedisH_DICT['minlon'] = xmlOBJ.getMinLon() RedisH_DICT['maxlon'] = xmlOBJ.getMaxLon() if RedisH.IsInHash('service'): pass else: RedisH.AddHash(RedisH_DICT) if RedisH.IsInHash('service'): logger.info('Redis info Hash: Success') else: logger.error('Redis info Hash Not Found') # ***end ground range ** RQ_recipe = RedisQueue(Key + '_recipe') RQ_recipe.RemoveAll() RQ_file = RedisQueue(Key + '_FileQueue') RQ_file.RemoveAll() RQ_WorkQueue = RedisQueue(Key + '_WorkQueue') RQ_WorkQueue.RemoveAll() RQ_loggy = RedisQueue(Key + '_loggy') RQ_loggy.RemoveAll() RQ_zip = RedisQueue(Key + '_ZIP') RQ_zip.RemoveAll() if xmlOBJ.getProcess() == 'POW': fileList = xmlOBJ.getFileListWB() elif xmlOBJ.getProcess() == 'MAP2': fileList = xmlOBJ.getMFileListWB() for List_file in fileList: ######### Input and output file naming and path stuff ############ if xmlOBJ.getProcess() == 'POW': if xmlOBJ.getInst() == 'THEMIS_IR': Input_file = List_file.replace('odtie1_', 'odtir1_') Input_file = Input_file.replace('xxedr', 'xxrdr') Input_file = Input_file.replace('EDR.QUB', 'RDR.QUB') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', '/pds_san/PDS_Archive/') elif xmlOBJ.getInst() == 'ISSNA': Input_file = List_file.replace('.IMG', '.LBL') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', '/pds_san/PDS_Archive/') elif xmlOBJ.getInst() == 'ISSWA': Input_file = List_file.replace('.IMG', '.LBL') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', '/pds_san/PDS_Archive/') elif xmlOBJ.getInst() == 'SOLID STATE IMAGING SYSTEM': Input_file = List_file.replace('.img', '.lbl') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', '/pds_san/PDS_Archive/') else: Input_file = List_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', '/pds_san/PDS_Archive/') elif xmlOBJ.getProcess() == 'MAP2': Input_file = List_file.replace('file://pds_san', '/pds_san') if '+' in Input_file: tempsplit = Input_file.split('+') tempFile = tempsplit[0] else: tempFile = Input_file label = pvl.load(tempFile) #*********Output final file naming ************** Tbasename = os.path.splitext(os.path.basename(tempFile))[0] splitBase = Tbasename.split('_') labP = xmlOBJ.getProjection() if labP == 'INPUT': lab_proj = label['IsisCube']['Mapping']['ProjectionName'][0:4] else: lab_proj = labP[0:4] if xmlOBJ.getClat() is None or xmlOBJ.getClon() is None: basefinal = splitBase[0] + splitBase[1] + \ splitBase[2] + '_MAP2_' + lab_proj.upper() else: lab_clat = float(xmlOBJ.getClat()) if lab_clat >= 0: labH = 'N' elif lab_clat < 0: labH = 'S' lab_clon = float(xmlOBJ.getClon()) basefinal = splitBase[0] + splitBase[1] + splitBase[ 2] + '_MAP2_' + str(lab_clat) + labH + str( lab_clon) + '_' + lab_proj.upper() RedisH.MAPname(basefinal) try: RQ_file.QueueAdd(Input_file) logger.info('File %s Added to Redis Queue', Input_file) except Exception as e: logger.warn('File %s NOT Added to Redis Queue', Input_file) print('Redis Queue Error', e) RedisH.FileCount(RQ_file.QueueSize()) logger.info('Count of Files Queue: %s', str(RQ_file.QueueSize())) # ************* Map Template Stuff ****************** logger.info('Making Map File') mapOBJ = MakeMap() if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.getProjection() == 'INPUT': proj = label['IsisCube']['Mapping']['ProjectionName'] mapOBJ.Projection(proj) else: mapOBJ.Projection(xmlOBJ.getProjection()) if xmlOBJ.getClon() is not None: mapOBJ.CLon(float(xmlOBJ.getClon())) if xmlOBJ.getClat() is not None: mapOBJ.CLat(float(xmlOBJ.getClat())) if xmlOBJ.getFirstParallel() is not None: mapOBJ.FirstParallel(float(xmlOBJ.getFirstParallel())) if xmlOBJ.getSecondParallel() is not None: mapOBJ.SecondParallel(float(xmlOBJ.getSecondParallel())) if xmlOBJ.getResolution() is not None: mapOBJ.PixelRes(float(xmlOBJ.getResolution())) if xmlOBJ.getTargetName() is not None: mapOBJ.Target(xmlOBJ.getTargetName()) if xmlOBJ.getERadius() is not None: mapOBJ.ERadius(float(xmlOBJ.getERadius())) if xmlOBJ.getPRadius() is not None: mapOBJ.PRadius(float(xmlOBJ.getPRadius())) if xmlOBJ.getLatType() is not None: mapOBJ.LatType(xmlOBJ.getLatType()) if xmlOBJ.getLonDirection() is not None: mapOBJ.LonDirection(xmlOBJ.getLonDirection()) if xmlOBJ.getLonDomain() is not None: mapOBJ.LonDomain(int(xmlOBJ.getLonDomain())) if xmlOBJ.getProcess() == 'MAP2': if xmlOBJ.getMinLat() is not None: mapOBJ.MinLat(float(xmlOBJ.getMinLat())) if xmlOBJ.getMaxLat() is not None: mapOBJ.MaxLat(float(xmlOBJ.getMaxLat())) if xmlOBJ.getMinLon() is not None: mapOBJ.MinLon(float(xmlOBJ.getMinLon())) if xmlOBJ.getMaxLon() is not None: mapOBJ.MaxLon(float(xmlOBJ.getMaxLon())) mapOBJ.Map2pvl() MAPfile = directory + "/" + Key + '.map' mapOBJ.Map2File(MAPfile) try: f = open(MAPfile) f.close logger.info('Map File Creation: Success') except IOError as e: logger.error('Map File %s Not Found', MAPfile) # ** End Map Template Stuff ** # ************************************************* logger.info('Building Recipe') recipeOBJ = Recipe() if xmlOBJ.getProcess() == 'POW': recipeOBJ.AddJsonFile(recipe_dict[xmlOBJ.getInst()]) elif xmlOBJ.getProcess() == 'MAP2': recipeOBJ.AddJsonFile(recipe_dict['MAP']) # ************** Test for stretch and add to recipe ********************** # if MAP2 and 8 or 16 bit run stretch to set range if xmlOBJ.getOutBit() == 'input': testBitType = str(label['IsisCube']['Core']['Pixels']['Type']).upper() else: testBitType = xmlOBJ.getOutBit().upper() if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.STR_Type() is None: if str(label['IsisCube']['Core']['Pixels']['Type']).upper( ) != xmlOBJ.getOutBit().upper() and str( label['IsisCube']['Core']['Pixels']['Type']).upper() != 'REAL': if str(label['IsisCube']['Core']['Pixels'] ['Type']).upper() == 'SIGNEDWORD': strpairs = '0:-32765 0:-32765 100:32765 100:32765' elif str(label['IsisCube']['Core']['Pixels'] ['Type']).upper() == 'UNSIGNEDBYTE': strpairs = '0:1 0:1 100:254 100:254' STRprocessOBJ = Process() STRprocessOBJ.newProcess('stretch') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') STRprocessOBJ.AddParameter('usepercentages', 'yes') STRprocessOBJ.AddParameter('pairs', strpairs) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) strType = xmlOBJ.STR_Type() if strType == 'StretchPercent' and xmlOBJ.STR_PercentMin( ) is not None and xmlOBJ.STR_PercentMax( ) is not None and testBitType != 'REAL': if float(xmlOBJ.STR_PercentMin()) != 0 and float( xmlOBJ.STR_PercentMax()) != 100: if testBitType == 'UNSIGNEDBYTE': strpairs = '0:1 ' + xmlOBJ.STR_PercentMin() + ':1 ' + \ xmlOBJ.STR_PercentMax() + ':254 100:254' elif testBitType == 'SIGNEDWORD': strpairs = '0:-32765 ' + xmlOBJ.STR_PercentMin() + ':-32765 ' + \ xmlOBJ.STR_PercentMax() + ':32765 100:32765' STRprocessOBJ = Process() STRprocessOBJ.newProcess('stretch') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') STRprocessOBJ.AddParameter('usepercentages', 'yes') STRprocessOBJ.AddParameter('pairs', strpairs) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) elif strType == 'GaussStretch': STRprocessOBJ = Process() STRprocessOBJ.newProcess('gaussstretch') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') STRprocessOBJ.AddParameter('gsigma', xmlOBJ.STR_GaussSigma()) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) elif strType == 'HistogramEqualization': STRprocessOBJ = Process() STRprocessOBJ.newProcess('histeq') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') if xmlOBJ.STR_PercentMin() is None: STRprocessOBJ.AddParameter('minper', '0') else: STRprocessOBJ.AddParameter('minper', xmlOBJ.STR_PercentMin()) if xmlOBJ.STR_PercentMax() is None: STRprocessOBJ.AddParameter('maxper', '100') else: STRprocessOBJ.AddParameter('maxper', xmlOBJ.STR_PercentMax()) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) elif strType == 'SigmaStretch': STRprocessOBJ = Process() STRprocessOBJ.newProcess('sigmastretch') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') STRprocessOBJ.AddParameter('variance', xmlOBJ.STR_SigmaVariance()) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) # ************* Test for output bit type and add to recipe ************* if xmlOBJ.getProcess() == 'POW': if xmlOBJ.getOutBit().upper() == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit( ).upper() == 'SIGNEDWORD': CAprocessOBJ = Process() CAprocessOBJ.newProcess('cubeatt-bit') CAprocessOBJ.AddParameter('from_', 'value') CAprocessOBJ.AddParameter('to', 'value') recipeOBJ.AddProcess(CAprocessOBJ.getProcess()) elif xmlOBJ.getProcess() == 'MAP2': if xmlOBJ.getOutBit().upper() != 'INPUT': if xmlOBJ.getOutBit().upper( ) == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit().upper() == 'SIGNEDWORD': if str(label['IsisCube']['Core']['Pixels'] ['Type']).upper() != xmlOBJ.getOutBit().upper(): CAprocessOBJ = Process() CAprocessOBJ.newProcess('cubeatt-bit') CAprocessOBJ.AddParameter('from_', 'value') CAprocessOBJ.AddParameter('to', 'value') recipeOBJ.AddProcess(CAprocessOBJ.getProcess()) # **************** Add Grid(MAP2) ************* if xmlOBJ.getGridInterval() is not None: GprocessOBJ = Process() GprocessOBJ.newProcess('grid') GprocessOBJ.AddParameter('from_', 'value') GprocessOBJ.AddParameter('to', 'value') GprocessOBJ.AddParameter('latinc', xmlOBJ.getGridInterval()) GprocessOBJ.AddParameter('loninc', xmlOBJ.getGridInterval()) GprocessOBJ.AddParameter('outline', 'yes') GprocessOBJ.AddParameter('boundary', 'yes') GprocessOBJ.AddParameter('linewidth', '3') recipeOBJ.AddProcess(GprocessOBJ.getProcess()) # ********OUTPUT FORMAT *************** # ************* Test for GDAL and add to recipe ************************* Oformat = xmlOBJ.getOutFormat() if Oformat == 'GeoTiff-BigTiff' or Oformat == 'GeoJPEG-2000' or Oformat == 'JPEG' or Oformat == 'PNG': if Oformat == 'GeoJPEG-2000': Oformat = 'JP2KAK' if Oformat == 'GeoTiff-BigTiff': Oformat = 'GTiff' GDALprocessOBJ = Process() # GDALprocessOBJ.newProcess('/usgs/dev/contrib/bin/FWTools-linux-x86_64-3.0.3/bin_safe/gdal_translate') GDALprocessOBJ.newProcess('/usgs/apps/anaconda/bin/gdal_translate') if xmlOBJ.getOutBit() != 'input': GDALprocessOBJ.AddParameter( '-ot', GDALprocessOBJ.GDAL_OBit(xmlOBJ.getOutBit())) GDALprocessOBJ.AddParameter('-of', Oformat) if Oformat == 'GTiff' or Oformat == 'JP2KAK' or Oformat == 'JPEG': GDALprocessOBJ.AddParameter('-co', GDALprocessOBJ.GDAL_Creation(Oformat)) recipeOBJ.AddProcess(GDALprocessOBJ.getProcess()) # **************** set up pds2isis and add to recipe elif Oformat == 'PDS': pdsProcessOBJ = Process() pdsProcessOBJ.newProcess('isis2pds') pdsProcessOBJ.AddParameter('from_', 'value') pdsProcessOBJ.AddParameter('to', 'value') if xmlOBJ.getOutBit() == 'unsignedbyte': pdsProcessOBJ.AddParameter('bittype', '8bit') elif xmlOBJ.getOutBit() == 'signedword': pdsProcessOBJ.AddParameter('bittype', 's16bit') recipeOBJ.AddProcess(pdsProcessOBJ.getProcess()) for item in recipeOBJ.getProcesses(): processOBJ = Process() processOBJ.ProcessFromRecipe(item, recipeOBJ.getRecipe()) if item == 'cam2map': processOBJ.updateParameter('map', MAPfile) if xmlOBJ.getResolution() is None: processOBJ.updateParameter('pixres', 'CAMERA') else: processOBJ.updateParameter('pixres', 'MAP') if xmlOBJ.getRangeType() is None: processOBJ.updateParameter('defaultrange', 'MINIMIZE') elif xmlOBJ.getRangeType() == 'smart' or xmlOBJ.getRangeType( ) == 'fill': processOBJ.updateParameter('defaultrange', 'CAMERA') processOBJ.AddParameter('trim', 'YES') elif item == 'map2map': processOBJ.updateParameter('map', MAPfile) if xmlOBJ.getResolution() is None: processOBJ.updateParameter('pixres', 'FROM') else: processOBJ.updateParameter('pixres', 'MAP') if xmlOBJ.OutputGeometry() is not None: processOBJ.updateParameter('defaultrange', 'MAP') processOBJ.AddParameter('trim', 'YES') else: processOBJ.updateParameter('defaultrange', 'FROM') processJSON = processOBJ.Process2JSON() try: RQ_recipe.QueueAdd(processJSON) logger.info('Recipe Element Added to Redis: %s : Success', item) except Exception as e: logger.warn('Recipe Element NOT Added to Redis: %s', item) # ** *************** HPC job stuff *********************** logger.info('HPC Cluster job Submission Starting') jobOBJ = HPCjob() jobOBJ.setJobName(Key + '_Service') jobOBJ.setStdOut('/usgs/cdev/PDS/output/' + Key + '_%A_%a.out') jobOBJ.setStdError('/usgs/cdev/PDS/output/' + Key + '_%A_%a.err') jobOBJ.setWallClock('24:00:00') # jobOBJ.setMemory('8192') # jobOBJ.setMemory('16384') jobOBJ.setMemory('24576') jobOBJ.setPartition('pds') JAsize = RQ_file.QueueSize() jobOBJ.setJobArray(JAsize) logger.info('Job Array Size : %s', str(JAsize)) jobOBJ.addPath('/usgs/apps/anaconda/bin') if xmlOBJ.getProcess() == 'POW': cmd = '/usgs/cdev/PDS/bin/POWprocess.py ' + Key elif xmlOBJ.getProcess() == 'MAP2': cmd = '/usgs/cdev/PDS/bin/MAPprocess.py ' + Key logger.info('HPC Command: %s', cmd) jobOBJ.setCommand(cmd) SBfile = directory + '/' + Key + '.sbatch' jobOBJ.MakeJobFile(SBfile) try: sb = open(SBfile) sb.close logger.info('SBATCH File Creation: Success') except IOError as e: logger.error('SBATCH File %s Not Found', SBfile) try: jobOBJ.Run() logger.info('Job Submission to HPC: Success') DBQO.setJobsStarted(Key) except IOError as e: logger.error('Jobs NOT Submitted to HPC')
def main(): # pdb.set_trace() ##***************** Set up logging ***************** logger = logging.getLogger('Browse_Process') logger.setLevel(logging.INFO) logFileHandle = logging.FileHandler(pds_log + 'Process.log') formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s, %(message)s') logFileHandle.setFormatter(formatter) logger.addHandler(logFileHandle) RQ_main = RedisQueue('Browse_ReadyQueue') PDSinfoDICT = json.load(open(pds_info, 'r')) pds_session, _ = db_connect(pds_db) upc_session, _ = db_connect(upc_db) tid = get_tid('fullimageurl', upc_session) while int(RQ_main.QueueSize()) > 0: item = literal_eval(RQ_main.QueueGet().decode("utf-8")) inputfile = item[0] fid = item[1] archive = item[2] if os.path.isfile(inputfile): logger.info('Starting Process: %s', inputfile) finalpath = makedir(inputfile) recipeOBJ = Recipe() recip_json = recipeOBJ.getRecipeJSON(archive) recipeOBJ.AddJsonFile(recip_json, 'reduced') infile = workarea + os.path.splitext( os.path.basename(inputfile))[0] + '.Binput.cub' outfile = workarea + os.path.splitext( os.path.basename(inputfile))[0] + '.Boutput.cub' status = 'success' for item in recipeOBJ.getProcesses(): if status == 'error': break elif status == 'success': processOBJ = Process() processR = processOBJ.ProcessFromRecipe( item, recipeOBJ.getRecipe()) if '2isis' in item: processOBJ.updateParameter('from_', inputfile) processOBJ.updateParameter('to', outfile) elif item == 'spiceinit': processOBJ.updateParameter('from_', infile) elif item == 'cubeatt': label = pvl.load(infile) bands = PDSinfoDICT[archive]['bandorder'] query_bands = label['IsisCube']['BandBin'][ PDSinfoDICT[archive]['bandbinQuery']] # Create a set from the list / single value try: query_band_set = set(query_bands) except: query_band_set = set([query_bands]) # Iterate through 'bands' and grab the first value that is present in the # set defined by 'bandbinquery' -- if not present, default to 1 exband = next( (band for band in bands if band in query_band_set), 1) band_infile = infile + '+' + str(exband) processOBJ.updateParameter('from_', band_infile) processOBJ.updateParameter('to', outfile) elif item == 'ctxevenodd': label = pvl.load(infile) SS = label['IsisCube']['Instrument']['SpatialSumming'] if SS != 1: break else: processOBJ.updateParameter('from_', infile) processOBJ.updateParameter('to', outfile) elif item == 'reduce': label = pvl.load(infile) Nline = label['IsisCube']['Core']['Dimensions'][ 'Lines'] Nsample = label['IsisCube']['Core']['Dimensions'][ 'Samples'] Nline = int(Nline) Nsample = int(Nsample) Sfactor = scaleFactor(Nline, Nsample, recip_json) processOBJ.updateParameter('lscale', Sfactor) processOBJ.updateParameter('sscale', Sfactor) processOBJ.updateParameter('from_', infile) processOBJ.updateParameter('to', outfile) elif item == 'isis2std': final_outfile = finalpath + '/' + os.path.splitext( os.path.basename(inputfile))[0] + '.browse.jpg' processOBJ.updateParameter('from_', infile) processOBJ.updateParameter('to', final_outfile) else: processOBJ.updateParameter('from_', infile) processOBJ.updateParameter('to', outfile) for k, v in processOBJ.getProcess().items(): func = getattr(isis, k) try: func(**v) logger.info('Process %s :: Success', k) if os.path.isfile(outfile): if '.cub' in outfile: os.rename(outfile, infile) status = 'success' if '2isis' in item: isisSerial = getISISid(infile) except ProcessError as e: print(e) logger.error('Process %s :: Error', k) status = 'error' if status == 'success': DB_addURL(upc_session, isisSerial, final_outfile, tid) os.remove(infile) logger.info('Browse Process Success: %s', inputfile) AddProcessDB(pds_session, fid, 't') else: logger.error('File %s Not Found', inputfile)
def main(): args = Args() args.parse_args() key = args.key namespace = args.namespace if namespace is None: namespace = default_namespace # Set up logging logger = logging.getLogger(key) logger.setLevel(logging.INFO) logFileHandle = logging.FileHandler(pds_log + 'Service.log') formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s, %(message)s') logFileHandle.setFormatter(formatter) logger.addHandler(logFileHandle) # Connect to database and access 'jobs' table DBQO = PDS_DBquery('JOBS') if key is None: # If no key is specified, grab the first key key = DBQO.jobKey() try: # Set the 'queued' column to current time i.e. prep for processing DBQO.setJobsQueued(key) except KeyError as e: logger.error('%s', e) exit(1) logger.info('Starting Process') xmlOBJ = jobXML(DBQO.jobXML4Key(key)) # Make directory if it doesn't exist directory = scratch + key if not os.path.exists(directory): os.makedirs(directory) logger.info('Working Area: %s', directory) # Set up Redis Hash for ground range RedisH = RedisHash(key + '_info') RedisH.RemoveAll() RedisErrorH = RedisHash(key + '_error') RedisErrorH.RemoveAll() RedisH_DICT = {} RedisH_DICT['service'] = xmlOBJ.getProcess() RedisH_DICT['fileformat'] = xmlOBJ.getOutFormat() RedisH_DICT['outbit'] = xmlOBJ.getOutBit() if xmlOBJ.getRangeType() is not None: RedisH_DICT['grtype'] = xmlOBJ.getRangeType() RedisH_DICT['minlat'] = xmlOBJ.getMinLat() RedisH_DICT['maxlat'] = xmlOBJ.getMaxLat() RedisH_DICT['minlon'] = xmlOBJ.getMinLon() RedisH_DICT['maxlon'] = xmlOBJ.getMaxLon() if RedisH.IsInHash('service'): pass else: RedisH.AddHash(RedisH_DICT) if RedisH.IsInHash('service'): logger.info('Redis info Hash: Success') else: logger.error('Redis info Hash Not Found') # End ground range RQ_recipe = RedisQueue(key + '_recipe', namespace) RQ_recipe.RemoveAll() RQ_file = RedisQueue(key + '_FileQueue', namespace) RQ_file.RemoveAll() RQ_WorkQueue = RedisQueue(key + '_WorkQueue', namespace) RQ_WorkQueue.RemoveAll() RQ_loggy = RedisQueue(key + '_loggy', namespace) RQ_loggy.RemoveAll() RQ_zip = RedisQueue(key + '_ZIP', namespace) RQ_zip.RemoveAll() if xmlOBJ.getProcess() == 'POW': fileList = xmlOBJ.getFileListWB() elif xmlOBJ.getProcess() == 'MAP2': fileList = xmlOBJ.getMFileListWB() for List_file in fileList: # Input and output file naming and path stuff if xmlOBJ.getProcess() == 'POW': if xmlOBJ.getInst() == 'THEMIS_IR': Input_file = List_file.replace('odtie1_', 'odtir1_') Input_file = Input_file.replace('xxedr', 'xxrdr') Input_file = Input_file.replace('EDR.QUB', 'RDR.QUB') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) elif xmlOBJ.getInst() == 'ISSNA': Input_file = List_file.replace('.IMG', '.LBL') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) elif xmlOBJ.getInst() == 'ISSWA': Input_file = List_file.replace('.IMG', '.LBL') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) elif xmlOBJ.getInst() == 'SOLID STATE IMAGING SYSTEM': Input_file = List_file.replace('.img', '.lbl') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) else: Input_file = List_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) elif xmlOBJ.getProcess() == 'MAP2': Input_file = List_file.replace('file://pds_san', '/pds_san') if '+' in Input_file: tempsplit = Input_file.split('+') tempFile = tempsplit[0] else: tempFile = Input_file label = pvl.load(tempFile) # Output final file naming Tbasename = os.path.splitext(os.path.basename(tempFile))[0] splitBase = Tbasename.split('_') labP = xmlOBJ.getProjection() if labP == 'INPUT': lab_proj = label['IsisCube']['Mapping']['ProjectionName'][0:4] else: lab_proj = labP[0:4] if xmlOBJ.getClat() is None or xmlOBJ.getClon() is None: basefinal = splitBase[0] + splitBase[1] + \ splitBase[2] + '_MAP2_' + lab_proj.upper() else: lab_clat = float(xmlOBJ.getClat()) if lab_clat >= 0: labH = 'N' elif lab_clat < 0: labH = 'S' lab_clon = float(xmlOBJ.getClon()) basefinal = splitBase[0] + splitBase[1] + splitBase[ 2] + '_MAP2_' + str(lab_clat) + labH + str( lab_clon) + '_' + lab_proj.upper() RedisH.MAPname(basefinal) try: RQ_file.QueueAdd(Input_file) logger.info('File %s Added to Redis Queue', Input_file) except Exception as e: logger.warn('File %s NOT Added to Redis Queue', Input_file) print('Redis Queue Error', e) RedisH.FileCount(RQ_file.QueueSize()) logger.info('Count of Files Queue: %s', str(RQ_file.QueueSize())) # Map Template Stuff logger.info('Making Map File') mapOBJ = MakeMap() if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.getProjection() == 'INPUT': proj = label['IsisCube']['Mapping']['ProjectionName'] mapOBJ.Projection(proj) else: mapOBJ.Projection(xmlOBJ.getProjection()) if xmlOBJ.getClon() is not None: mapOBJ.CLon(float(xmlOBJ.getClon())) if xmlOBJ.getClat() is not None: mapOBJ.CLat(float(xmlOBJ.getClat())) if xmlOBJ.getFirstParallel() is not None: mapOBJ.FirstParallel(float(xmlOBJ.getFirstParallel())) if xmlOBJ.getSecondParallel() is not None: mapOBJ.SecondParallel(float(xmlOBJ.getSecondParallel())) if xmlOBJ.getResolution() is not None: mapOBJ.PixelRes(float(xmlOBJ.getResolution())) if xmlOBJ.getTargetName() is not None: mapOBJ.Target(xmlOBJ.getTargetName()) if xmlOBJ.getERadius() is not None: mapOBJ.ERadius(float(xmlOBJ.getERadius())) if xmlOBJ.getPRadius() is not None: mapOBJ.PRadius(float(xmlOBJ.getPRadius())) if xmlOBJ.getLatType() is not None: mapOBJ.LatType(xmlOBJ.getLatType()) if xmlOBJ.getLonDirection() is not None: mapOBJ.LonDirection(xmlOBJ.getLonDirection()) if xmlOBJ.getLonDomain() is not None: mapOBJ.LonDomain(int(xmlOBJ.getLonDomain())) if xmlOBJ.getProcess() == 'MAP2': if xmlOBJ.getMinLat() is not None: mapOBJ.MinLat(float(xmlOBJ.getMinLat())) if xmlOBJ.getMaxLat() is not None: mapOBJ.MaxLat(float(xmlOBJ.getMaxLat())) if xmlOBJ.getMinLon() is not None: mapOBJ.MinLon(float(xmlOBJ.getMinLon())) if xmlOBJ.getMaxLon() is not None: mapOBJ.MaxLon(float(xmlOBJ.getMaxLon())) mapOBJ.Map2pvl() MAPfile = directory + "/" + key + '.map' mapOBJ.Map2File(MAPfile) try: f = open(MAPfile) f.close logger.info('Map File Creation: Success') except IOError as e: logger.error('Map File %s Not Found', MAPfile) # ** End Map Template Stuff ** logger.info('Building Recipe') recipeOBJ = Recipe() if xmlOBJ.getProcess() == 'POW': recipeOBJ.AddJsonFile(recipe_base + xmlOBJ.getCleanName() + '.json', "pow") elif xmlOBJ.getProcess() == 'MAP2': recipeOBJ.AddJsonFile(recipe_base + "map2_process.json", "map") # Test for stretch and add to recipe # if MAP2 and 8 or 16 bit run stretch to set range if xmlOBJ.getOutBit() == 'input': testBitType = str(label['IsisCube']['Core']['Pixels']['Type']).upper() else: testBitType = xmlOBJ.getOutBit().upper() if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.STR_Type() is None: if str(label['IsisCube']['Core']['Pixels']['Type']).upper( ) != xmlOBJ.getOutBit().upper() and str( label['IsisCube']['Core']['Pixels']['Type']).upper() != 'REAL': if str(label['IsisCube']['Core']['Pixels'] ['Type']).upper() == 'SIGNEDWORD': strpairs = '0:-32765 0:-32765 100:32765 100:32765' elif str(label['IsisCube']['Core']['Pixels'] ['Type']).upper() == 'UNSIGNEDBYTE': strpairs = '0:1 0:1 100:254 100:254' STRprocessOBJ = Process() STRprocessOBJ.newProcess('stretch') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') STRprocessOBJ.AddParameter('usepercentages', 'yes') STRprocessOBJ.AddParameter('pairs', strpairs) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) strType = xmlOBJ.STR_Type() if strType == 'StretchPercent' and xmlOBJ.STR_PercentMin( ) is not None and xmlOBJ.STR_PercentMax( ) is not None and testBitType != 'REAL': if float(xmlOBJ.STR_PercentMin()) != 0 and float( xmlOBJ.STR_PercentMax()) != 100: if testBitType == 'UNSIGNEDBYTE': strpairs = '0:1 ' + xmlOBJ.STR_PercentMin() + ':1 ' + \ xmlOBJ.STR_PercentMax() + ':254 100:254' elif testBitType == 'SIGNEDWORD': strpairs = '0:-32765 ' + xmlOBJ.STR_PercentMin() + ':-32765 ' + \ xmlOBJ.STR_PercentMax() + ':32765 100:32765' STRprocessOBJ = Process() STRprocessOBJ.newProcess('stretch') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') STRprocessOBJ.AddParameter('usepercentages', 'yes') STRprocessOBJ.AddParameter('pairs', strpairs) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) elif strType == 'GaussStretch': STRprocessOBJ = Process() STRprocessOBJ.newProcess('gaussstretch') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') STRprocessOBJ.AddParameter('gsigma', xmlOBJ.STR_GaussSigma()) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) elif strType == 'HistogramEqualization': STRprocessOBJ = Process() STRprocessOBJ.newProcess('histeq') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') if xmlOBJ.STR_PercentMin() is None: STRprocessOBJ.AddParameter('minper', '0') else: STRprocessOBJ.AddParameter('minper', xmlOBJ.STR_PercentMin()) if xmlOBJ.STR_PercentMax() is None: STRprocessOBJ.AddParameter('maxper', '100') else: STRprocessOBJ.AddParameter('maxper', xmlOBJ.STR_PercentMax()) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) elif strType == 'SigmaStretch': STRprocessOBJ = Process() STRprocessOBJ.newProcess('sigmastretch') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') STRprocessOBJ.AddParameter('variance', xmlOBJ.STR_SigmaVariance()) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) # Test for output bit type and add to recipe if xmlOBJ.getProcess() == 'POW': if xmlOBJ.getOutBit().upper() == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit( ).upper() == 'SIGNEDWORD': CAprocessOBJ = Process() CAprocessOBJ.newProcess('cubeatt-bit') CAprocessOBJ.AddParameter('from_', 'value') CAprocessOBJ.AddParameter('to', 'value') recipeOBJ.AddProcess(CAprocessOBJ.getProcess()) elif xmlOBJ.getProcess() == 'MAP2': if xmlOBJ.getOutBit().upper() != 'INPUT': if xmlOBJ.getOutBit().upper( ) == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit().upper() == 'SIGNEDWORD': if str(label['IsisCube']['Core']['Pixels'] ['Type']).upper() != xmlOBJ.getOutBit().upper(): CAprocessOBJ = Process() CAprocessOBJ.newProcess('cubeatt-bit') CAprocessOBJ.AddParameter('from_', 'value') CAprocessOBJ.AddParameter('to', 'value') recipeOBJ.AddProcess(CAprocessOBJ.getProcess()) # Add Grid(MAP2) if xmlOBJ.getGridInterval() is not None: GprocessOBJ = Process() GprocessOBJ.newProcess('grid') GprocessOBJ.AddParameter('from_', 'value') GprocessOBJ.AddParameter('to', 'value') GprocessOBJ.AddParameter('latinc', xmlOBJ.getGridInterval()) GprocessOBJ.AddParameter('loninc', xmlOBJ.getGridInterval()) GprocessOBJ.AddParameter('outline', 'yes') GprocessOBJ.AddParameter('boundary', 'yes') GprocessOBJ.AddParameter('linewidth', '3') recipeOBJ.AddProcess(GprocessOBJ.getProcess()) # OUTPUT FORMAT # Test for GDAL and add to recipe Oformat = xmlOBJ.getOutFormat() if Oformat == 'GeoTiff-BigTiff' or Oformat == 'GeoJPEG-2000' or Oformat == 'JPEG' or Oformat == 'PNG': if Oformat == 'GeoJPEG-2000': Oformat = 'JP2KAK' if Oformat == 'GeoTiff-BigTiff': Oformat = 'GTiff' GDALprocessOBJ = Process() # @TODO remove hard-coded path in favor of using whichever utilities are found within the conda environment -- # we need more information here to ensure that whichever utilities are found are capable of supporting GeoJPEG-2000. GDALprocessOBJ.newProcess('/usgs/apps/anaconda/bin/gdal_translate') if xmlOBJ.getOutBit() != 'input': GDALprocessOBJ.AddParameter( '-ot', GDALprocessOBJ.GDAL_OBit(xmlOBJ.getOutBit())) GDALprocessOBJ.AddParameter('-of', Oformat) if Oformat == 'GTiff' or Oformat == 'JP2KAK' or Oformat == 'JPEG': GDALprocessOBJ.AddParameter('-co', GDALprocessOBJ.GDAL_Creation(Oformat)) recipeOBJ.AddProcess(GDALprocessOBJ.getProcess()) # set up pds2isis and add to recipe elif Oformat == 'PDS': pdsProcessOBJ = Process() pdsProcessOBJ.newProcess('isis2pds') pdsProcessOBJ.AddParameter('from_', 'value') pdsProcessOBJ.AddParameter('to', 'value') if xmlOBJ.getOutBit() == 'unsignedbyte': pdsProcessOBJ.AddParameter('bittype', '8bit') elif xmlOBJ.getOutBit() == 'signedword': pdsProcessOBJ.AddParameter('bittype', 's16bit') recipeOBJ.AddProcess(pdsProcessOBJ.getProcess()) for item in recipeOBJ.getProcesses(): processOBJ = Process() processOBJ.ProcessFromRecipe(item, recipeOBJ.getRecipe()) if item == 'cam2map': processOBJ.updateParameter('map', MAPfile) if xmlOBJ.getResolution() is None: processOBJ.updateParameter('pixres', 'CAMERA') else: processOBJ.updateParameter('pixres', 'MAP') if xmlOBJ.getRangeType() is None: processOBJ.updateParameter('defaultrange', 'MINIMIZE') elif xmlOBJ.getRangeType() == 'smart' or xmlOBJ.getRangeType( ) == 'fill': processOBJ.updateParameter('defaultrange', 'CAMERA') processOBJ.AddParameter('trim', 'YES') elif item == 'map2map': processOBJ.updateParameter('map', MAPfile) if xmlOBJ.getResolution() is None: processOBJ.updateParameter('pixres', 'FROM') else: processOBJ.updateParameter('pixres', 'MAP') if xmlOBJ.OutputGeometry() is not None: processOBJ.updateParameter('defaultrange', 'MAP') processOBJ.AddParameter('trim', 'YES') else: processOBJ.updateParameter('defaultrange', 'FROM') processJSON = processOBJ.Process2JSON() try: RQ_recipe.QueueAdd(processJSON) logger.info('Recipe Element Added to Redis: %s : Success', item) except Exception as e: logger.warn('Recipe Element NOT Added to Redis: %s', item) # HPC job stuff logger.info('HPC Cluster job Submission Starting') jobOBJ = HPCjob() jobOBJ.setJobName(key + '_Service') jobOBJ.setStdOut(slurm_log + key + '_%A_%a.out') jobOBJ.setStdError(slurm_log + key + '_%A_%a.err') jobOBJ.setWallClock('24:00:00') jobOBJ.setMemory('24576') jobOBJ.setPartition('pds') JAsize = RQ_file.QueueSize() jobOBJ.setJobArray(JAsize) logger.info('Job Array Size : %s', str(JAsize)) # @TODO replace with source activate <env> #jobOBJ.addPath('/usgs/apps/anaconda/bin') # Whether or not we use the default namespace, this guarantees that the POW/MAP queues will match the namespace # used in the job manager. if xmlOBJ.getProcess() == 'POW': cmd = cmd_dir + "POWprocess.py -k {} -n {}".format(key, namespace) elif xmlOBJ.getProcess() == 'MAP2': cmd = cmd_dir + "MAPprocess.py -k {} -n {}".format(key, namespace) logger.info('HPC Command: %s', cmd) jobOBJ.setCommand(cmd) SBfile = directory + '/' + key + '.sbatch' jobOBJ.MakeJobFile(SBfile) try: sb = open(SBfile) sb.close logger.info('SBATCH File Creation: Success') except IOError as e: logger.error('SBATCH File %s Not Found', SBfile) try: jobOBJ.Run() logger.info('Job Submission to HPC: Success') DBQO.setJobsStarted(key) except IOError as e: logger.error('Jobs NOT Submitted to HPC')