def main(user_args): key = user_args.key namespace = user_args.namespace if namespace is None: namespace = default_namespace log_level = user_args.log_level #***************** Setup Logging ************** logger = logging.getLogger('service_final' + key) level = logging.getLevelName(log_level) logger.setLevel(level) logFileHandle = logging.FileHandler(pds_log + 'Service.log') formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s, %(message)s') logFileHandle.setFormatter(formatter) logger.addHandler(logFileHandle) logger.info('Starting Final Process') #************Set up REDIS Queues **************** zipQueue = RedisQueue(key + '_ZIP', namespace) loggyQueue = RedisQueue(key + '_loggy', namespace) infoHash = RedisHash(key + '_info') recipeQueue = RedisQueue(key + '_recipe', namespace) errorHash = RedisHash(key + '_error') DBQO = PDS_DBquery('JOBS') if errorHash.HashCount() > 0: root = ET.Element('errors') test = errorHash.getKeys() for item in test: sub = ET.Element('error') root.append(sub) field1 = ET.SubElement(sub, 'file') if isinstance(item, bytes): item = item.decode('utf-8') field1.text = item Eval = errorHash.getError(item) if isinstance(Eval, bytes): Eval = Eval.decode('utf-8') field2 = ET.SubElement(sub, 'message') field2.text = Eval tree = ET.ElementTree(root) # testfile = 'test.xml' # with open(testfile, "w") as fh: fh = BytesIO() tree.write(fh, encoding='utf-8', xml_declaration=True) errorxml = str(fh.getvalue(), 'utf-8').replace("\n", "") testval = DBQO.addErrors(key, errorxml) if testval == 'Success': logger.info('Error XML added to JOBS DB') elif testval == 'Error': logger.error('Adding Error XML to JOBS DB: Error') print(errorxml) Fdir = os.path.join(pow_map2_base, infoHash.Service(), key) Wpath = os.path.join(workarea, key) # Make final directory if not os.path.exists(Fdir): try: os.makedirs(Fdir) logger.info('Final Location Success: %s', Fdir) except: logger.error('Error Making Final Directory') # Block to build job log file outputLOG = Wpath + "/" + key + '.log' logOBJ = open(outputLOG, "w") logOBJ.write(" U.S. Geological Survey Cloud Processing Services\n") logOBJ.write(" http://astrocloud.wr.usgs.gov\n\n") if infoHash.Service() == 'POW': logOBJ.write(" Processing On the Web(POW)\n\n") logOBJ.write(" Processing Provided by ASTROGEOLOGY USGS Flagstaff\n") logOBJ.write(" Contact Information: [email protected]\n\n") logOBJ.write( "____________________________________________________________________\n\n" ) logOBJ.write("JOB INFORMATION\n\n") logOBJ.write(" SERVICE: " + infoHash.Service() + "\n") logOBJ.write(" JOB KEY: " + key + "\n") logOBJ.write(" PROCESSING DATE: " + datetime.datetime.now().strftime("%Y-%m-%d %H:%M") + "\n") logOBJ.write(" ISIS VERSION: " + isis_version) if infoHash.getStatus() == 'ERROR': logOBJ.write(" JOB STATUS: " + infoHash.getStatus() + " See Details Below\n") else: logOBJ.write(" JOB STATUS: " + infoHash.getStatus() + "\n") logOBJ.write(" FILE COUNT: " + infoHash.getFileCount() + "\n\n") logOBJ.write( "_____________________________________________________________________\n\n" ) logOBJ.write("PROCESSING INFORMATION\n\n") for element in loggyQueue.ListGet(): procDICT = json.loads(element, object_pairs_hook=OrderedDict) for infile in procDICT: logOBJ.write(" IMAGE: " + infile + "\n") for proc, _ in procDICT[infile].items(): logOBJ.write(" PROCESS: " + str(proc) + "\n") for k, val in procDICT[infile][proc].items(): if k == 'status': logOBJ.write(" STATUS: " + val + "\n") elif k == 'parameters': logOBJ.write(" PARAMETERS: " + val + "\n") elif k == 'helplink': logOBJ.write(" HELP LINK: " + val + "\n\n") elif k == 'error': logOBJ.write(" ERROR: " + val + "\n\n") logOBJ.write("END-PROCESSING\n") logOBJ.close() #******** Block for to copy and zip files to final directory ****** map_file = os.path.join(Wpath, key + '.map') Zfile = os.path.join(Wpath, key + '.zip') final_file_list = zipQueue.ListGet() final_file_list.append(outputLOG) final_file_list.append(map_file) logger.info('Making Zip File %s', Zfile) with zipfile.ZipFile(Zfile, 'w') as out_zip: for item in final_file_list: try: logger.info('File %s added to zip file: success', item) out_zip.write(item, os.path.basename(item)) except: logger.error('File %s NOT added to zip file', item) logger.info('Copying output files to %s', Fdir) for item in final_file_list: try: shutil.copyfile(item, os.path.join(Fdir, os.path.basename(item))) logger.info('Copied file %s to download directory: Success', item) except: logger.error('File %s NOT COPIED to download directory', item) logger.error(e) try: shutil.copy(Zfile, os.path.join(Fdir, os.path.basename(Zfile))) logger.info('Copied zip file %s to download directory', Zfile) except IOError as e: logger.error( 'Error while attempting to copy zip file %s to download area', Zfile) logger.error(e) #************** Clean up ******************* try: shutil.rmtree(Wpath) logger.info('Working Directory Removed: Success') except: logger.error('Working Directory NOT Removed') DBQO2 = PDS_DBquery('JOBS') DBQO2.setJobsFinished(key) errorHash.RemoveAll() infoHash.RemoveAll() loggyQueue.RemoveAll() zipQueue.RemoveAll() recipeQueue.RemoveAll() logger.info('Job %s is Complete', key)
def main(user_args): key = user_args.key namespace = user_args.namespace log_level = user_args.log_level #***************** Setup Logging ************** logger = logging.getLogger('service_final' + key) level = logging.getLevelName(log_level) logger.setLevel(level) logFileHandle = logging.FileHandler(pds_log+'Service.log') formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s, %(message)s') logFileHandle.setFormatter(formatter) logger.addHandler(logFileHandle) logger.info('Starting Final Process') #************Set up REDIS Queues **************** zipQueue = RedisQueue(key + '_ZIP', namespace) loggyQueue = RedisQueue(key + '_loggy', namespace) infoHash = RedisHash(key + '_info') recipeQueue = RedisQueue(key + '_recipe', namespace) errorHash = RedisHash(key + '_error') DBQO = PDS_DBquery('JOBS') if errorHash.HashCount() > 0: root = ET.Element('errors') test = errorHash.getKeys() for key in test: sub = ET.Element('error') root.append(sub) field1 = ET.SubElement(sub, 'file') field1.text = key Eval = errorHash.getError(key) field2 = ET.SubElement(sub, 'message') field2.text = Eval tree = ET.ElementTree(root) # testfile = 'test.xml' # with open(testfile, "w") as fh: fh = BytesIO() tree.write(fh, encoding='utf-8', xml_declaration=True) testval = DBQO.addErrors(key , fh.getvalue()) if testval == 'Success': logger.info('Error XML add to JOBS DB') elif testval == 'Error': logger.error('Addin Error XML to JOBS DB: Error') print(fh.getvalue()) Fdir = pow_map2_base + infoHash.Service() + '/' + key Wpath = scratch + key # Make final directory if not os.path.exists(Fdir): try: os.makedirs(Fdir) logger.info('Final Location Success: %s', Fdir) except: logger.error('Error Making Final Directory') # Block to build job log file outputLOG = Wpath + "/" + key + '.log' logOBJ = open(outputLOG, "w") logOBJ.write(" U.S. Geological Survey Cloud Processing Services\n") logOBJ.write(" http://astrocloud.wr.usgs.gov\n\n") if infoHash.Service() == 'POW': logOBJ.write(" Processing On the Web(POW)\n\n") logOBJ.write(" Processing Provided by ASTROGEOLOGY USGS Flagstaff\n") logOBJ.write(" Contact Information: [email protected]\n\n") logOBJ.write( "____________________________________________________________________\n\n") logOBJ.write("JOB INFORMATION\n\n") logOBJ.write(" SERVICE: " + infoHash.Service() + "\n") logOBJ.write(" JOB KEY: " + key + "\n") logOBJ.write(" PROCESSING DATE: " + datetime.datetime.now().strftime("%Y-%m-%d %H:%M") + "\n") logOBJ.write(" ISIS VERSION: " + isis_version) if infoHash.getStatus() == 'ERROR': logOBJ.write(" JOB STATUS: " + infoHash.getStatus() + " See Details Below\n") else: logOBJ.write(" JOB STATUS: " + infoHash.getStatus() + "\n") logOBJ.write(" FILE COUNT: " + infoHash.getFileCount() + "\n\n") logOBJ.write( "_____________________________________________________________________\n\n") logOBJ.write("PROCESSING INFORMATION\n\n") for element in loggyQueue.ListGet(): procDICT = json.loads(element, object_pairs_hook=OrderedDict) for infile in procDICT: logOBJ.write(" IMAGE: " + infile + "\n") for proc, _ in procDICT[infile].items(): logOBJ.write(" PROCESS: " + str(proc) + "\n") for k, val in procDICT[infile][proc].items(): if k == 'status': logOBJ.write( " STATUS: " + val + "\n") elif k == 'command': logOBJ.write( " COMMAND: " + val + "\n") elif k == 'helplink': logOBJ.write( " HELP LINK: " + val + "\n\n") elif k == 'error': logOBJ.write( " ERROR: " + val + "\n\n") logOBJ.write("END-PROCESSING\n") logOBJ.close() #******** Block for to copy and zip files to final directory ****** Zfile = Wpath + '/' + key + '.zip' logger.info('Making Zip File %s', Zfile) # log file stuff try: Lfile = key + '.log' Zcmd = 'zip -j ' + Zfile + " -q " + outputLOG process = subprocess.Popen( Zcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) (stdout, stderr) = process.communicate() # zOBJ.write(outputLOG, arcname=Lfile) logger.info('Log file %s Added to Zip File: Success', Lfile) logger.info('zip stdout: ' + stdout) logger.info('zip stderr: ' + stderr) except: logger.error('Log File %s NOT Added to Zip File', Lfile) try: shutil.copyfile(outputLOG, Fdir + "/" + Lfile) logger.info('Copied Log File %s to Final Area: Success', Lfile) os.remove(outputLOG) except IOError as e: logger.error('Log File %s NOT COPIED to Final Area', Lfile) logger.error(e) # Add map file to zip try: map_file = Wpath + "/" + key + '.map' Zcmd = 'zip -j ' + Zfile + " -q " + map_file process = subprocess.Popen( Zcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) (stdout, stderr) = process.communicate() logger.info('Map file %s added to zip file: Success', map_file) logger.info('zip stdout: ' + stdout) logger.info('zip stderr: ' + stderr) except: logger.error('Map File %s NOT added to Zip File', map_file) try: shutil.copyfile(map_file, Fdir + "/" + key + '.map') logger.info('Copied map file %s to final area: success', key + '.map') except IOError as e: logger.error('Map file %s NOT COPIED to final area', key + '.map') logger.error(e) # file stuff for Lelement in zipQueue.ListGet(): Pfile = os.path.basename(Lelement) # auxfile = os.path.basename(Lelement) + '.aux.xml' auxfile = Wpath + '/' + os.path.basename(Lelement) + '.aux.xml' try: Zcmd = 'zip -j ' + Zfile + " -q " + Lelement process = subprocess.Popen( Zcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) (stdout, stderr) = process.communicate() logger.info('File %s Added to Zip File: Success', Pfile) if os.path.isfile(auxfile): Zcmd = 'zip -j ' + Zfile + " -q " + auxfile process = subprocess.Popen( Zcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) (stdout, stderr) = process.communicate() logger.info('File %s Added to Zip File: Success', os.path.basename(Lelement) + '.aux.xml') except: logger.error('Error During Zip Operation') try: shutil.copyfile(Wpath + '/' + Pfile, Fdir + '/' + Pfile) logger.info('Copy File %s : Success', Pfile) os.remove(Wpath + "/" + Pfile) if os.path.isfile(auxfile): shutil.copyfile(auxfile, Fdir + '/' + os.path.basename(Lelement) + '.aux.xml') logger.info('Copy File %s : Success', os.path.basename(Lelement) + '.aux.xml') os.remove(auxfile) except IOError as e: logger.error('Error During File Copy Operation') logger.error(e) # zOBJ.close() try: shutil.copy(Zfile, Fdir + '/' + key + '.zip') os.remove(Zfile) logger.info('Zip File Copied to Final Directory') except IOError as e: logger.error('Error During Zip File Copy Operation') logger.error(e) #************** Clean up ******************* os.remove(Wpath + '/' + key + '.map') os.remove(Wpath + '/' + key + '.sbatch') try: # os.rmdir(Wpath) shutil.rmtree(Wpath) logger.info('Working Directory Removed: Success') except: logger.error('Working Directory NOT Removed') DBQO2 = PDS_DBquery('JOBS') DBQO2.setJobsFinished(key ) infoHash.RemoveAll() loggyQueue.RemoveAll() zipQueue.RemoveAll() recipeQueue.RemoveAll() logger.info('Job %s is Complete', key)
def main(user_args): key = user_args.key norun = user_args.norun namespace = user_args.namespace if namespace is None: namespace = default_namespace # Set up logging logger = logging.getLogger(key) logger.setLevel(logging.INFO) logFileHandle = logging.FileHandler(pds_log + 'Service.log') formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s, %(message)s') logFileHandle.setFormatter(formatter) logger.addHandler(logFileHandle) RQ_lock = RedisLock(lock_obj) RQ_lock.add({'Services': '1'}) if not RQ_lock.available('Services'): exit() # Connect to database and access 'jobs' table DBQO = PDS_DBquery('JOBS') if key is None: # If no key is specified, grab the first key try: key = DBQO.jobKey() try: key = key.decode('utf-8') except: pass # If the queue is empty, it'll throw a type error. except TypeError: logger.debug('No keys found in clusterjobs database') exit(1) try: # Set the 'queued' column to current time i.e. prep for processing DBQO.setJobsQueued(key) except KeyError as e: logger.error('%s', e) exit(1) logger.info('Starting Process') xmlOBJ = jobXML(DBQO.jobXML4Key(key)) # Make directory if it doesn't exist directory = scratch + key if not os.path.exists(directory): os.makedirs(directory) logger.info('Working Area: %s', directory) # Set up Redis Hash for ground range RedisH = RedisHash(key + '_info') RedisH.RemoveAll() RedisErrorH = RedisHash(key + '_error') RedisErrorH.RemoveAll() RedisH_DICT = {} RedisH_DICT['service'] = xmlOBJ.getProcess() RedisH_DICT['fileformat'] = xmlOBJ.getOutFormat() RedisH_DICT['outbit'] = xmlOBJ.getOutBit() if xmlOBJ.getRangeType() is not None: RedisH_DICT['grtype'] = xmlOBJ.getRangeType() RedisH_DICT['minlat'] = xmlOBJ.getMinLat() RedisH_DICT['maxlat'] = xmlOBJ.getMaxLat() RedisH_DICT['minlon'] = xmlOBJ.getMinLon() RedisH_DICT['maxlon'] = xmlOBJ.getMaxLon() if RedisH.IsInHash('service'): pass else: RedisH.AddHash(RedisH_DICT) if RedisH.IsInHash('service'): logger.info('Redis info Hash: Success') else: logger.error('Redis info Hash Not Found') # End ground range RQ_recipe = RedisQueue(key + '_recipe', namespace) RQ_recipe.RemoveAll() RQ_file = RedisQueue(key + '_FileQueue', namespace) RQ_file.RemoveAll() RQ_WorkQueue = RedisQueue(key + '_WorkQueue', namespace) RQ_WorkQueue.RemoveAll() RQ_loggy = RedisQueue(key + '_loggy', namespace) RQ_loggy.RemoveAll() RQ_zip = RedisQueue(key + '_ZIP', namespace) RQ_zip.RemoveAll() if xmlOBJ.getProcess() == 'POW': fileList = xmlOBJ.getFileListWB() elif xmlOBJ.getProcess() == 'MAP2': fileList = xmlOBJ.getMFileListWB() for List_file in fileList: # Input and output file naming and path stuff if xmlOBJ.getProcess() == 'POW': if xmlOBJ.getInst() == 'THEMIS_IR': Input_file = List_file.replace('odtie1_', 'odtir1_') Input_file = Input_file.replace('xxedr', 'xxrdr') Input_file = Input_file.replace('EDR.QUB', 'RDR.QUB') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) elif xmlOBJ.getInst() == 'ISSNA': Input_file = List_file.replace('.IMG', '.LBL') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) elif xmlOBJ.getInst() == 'ISSWA': Input_file = List_file.replace('.IMG', '.LBL') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) elif xmlOBJ.getInst() == 'SOLID STATE IMAGING SYSTEM': Input_file = List_file.replace('.img', '.lbl') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) else: Input_file = List_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) elif xmlOBJ.getProcess() == 'MAP2': Input_file = List_file.replace('file://pds_san', '/pds_san') if '+' in Input_file: tempsplit = Input_file.split('+') tempFile = tempsplit[0] else: tempFile = Input_file label = pvl.load(tempFile) # Output final file naming Tbasename = os.path.splitext(os.path.basename(tempFile))[0] splitBase = Tbasename.split('_') labP = xmlOBJ.getProjection() if labP == 'INPUT': lab_proj = label['IsisCube']['Mapping']['ProjectionName'][0:4] else: lab_proj = labP[0:4] if xmlOBJ.getClat() is None or xmlOBJ.getClon() is None: basefinal = splitBase[0] + splitBase[1] + \ splitBase[2] + '_MAP2_' + lab_proj.upper() else: lab_clat = float(xmlOBJ.getClat()) if lab_clat >= 0: labH = 'N' elif lab_clat < 0: labH = 'S' lab_clon = float(xmlOBJ.getClon()) basefinal = splitBase[0] + splitBase[1] + splitBase[ 2] + '_MAP2_' + str(lab_clat) + labH + str( lab_clon) + '_' + lab_proj.upper() RedisH.MAPname(basefinal) try: RQ_file.QueueAdd(Input_file) logger.info('File %s Added to Redis Queue', Input_file) except Exception as e: logger.warn('File %s NOT Added to Redis Queue', Input_file) print('Redis Queue Error', e) RedisH.FileCount(RQ_file.QueueSize()) logger.info('Count of Files Queue: %s', str(RQ_file.QueueSize())) # Map Template Stuff logger.info('Making Map File') mapOBJ = MakeMap() if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.getProjection() == 'INPUT': proj = label['IsisCube']['Mapping']['ProjectionName'] mapOBJ.Projection(proj) else: mapOBJ.Projection(xmlOBJ.getProjection()) if xmlOBJ.getClon() is not None: mapOBJ.CLon(float(xmlOBJ.getClon())) if xmlOBJ.getClat() is not None: mapOBJ.CLat(float(xmlOBJ.getClat())) if xmlOBJ.getFirstParallel() is not None: mapOBJ.FirstParallel(float(xmlOBJ.getFirstParallel())) if xmlOBJ.getSecondParallel() is not None: mapOBJ.SecondParallel(float(xmlOBJ.getSecondParallel())) if xmlOBJ.getResolution() is not None: mapOBJ.PixelRes(float(xmlOBJ.getResolution())) if xmlOBJ.getTargetName() is not None: mapOBJ.Target(xmlOBJ.getTargetName()) if xmlOBJ.getERadius() is not None: mapOBJ.ERadius(float(xmlOBJ.getERadius())) if xmlOBJ.getPRadius() is not None: mapOBJ.PRadius(float(xmlOBJ.getPRadius())) if xmlOBJ.getLatType() is not None: mapOBJ.LatType(xmlOBJ.getLatType()) if xmlOBJ.getLonDirection() is not None: mapOBJ.LonDirection(xmlOBJ.getLonDirection()) if xmlOBJ.getLonDomain() is not None: mapOBJ.LonDomain(int(xmlOBJ.getLonDomain())) if xmlOBJ.getProcess() == 'MAP2': if xmlOBJ.getMinLat() is not None: mapOBJ.MinLat(float(xmlOBJ.getMinLat())) if xmlOBJ.getMaxLat() is not None: mapOBJ.MaxLat(float(xmlOBJ.getMaxLat())) if xmlOBJ.getMinLon() is not None: mapOBJ.MinLon(float(xmlOBJ.getMinLon())) if xmlOBJ.getMaxLon() is not None: mapOBJ.MaxLon(float(xmlOBJ.getMaxLon())) mapOBJ.Map2pvl() MAPfile = directory + "/" + key + '.map' mapOBJ.Map2File(MAPfile) try: f = open(MAPfile) f.close logger.info('Map File Creation: Success') except IOError as e: logger.error('Map File %s Not Found', MAPfile) # ** End Map Template Stuff ** logger.info('Building Recipe') recipeOBJ = Recipe() if xmlOBJ.getProcess() == 'POW': recipeOBJ.AddJsonFile(recipe_base + xmlOBJ.getCleanName() + '.json', "pow") elif xmlOBJ.getProcess() == 'MAP2': recipeOBJ.AddJsonFile(recipe_base + "map2_process.json", "map") # Test for stretch and add to recipe # if MAP2 and 8 or 16 bit run stretch to set range if xmlOBJ.getOutBit() == 'input': testBitType = str(label['IsisCube']['Core']['Pixels']['Type']).upper() else: testBitType = xmlOBJ.getOutBit().upper() if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.STR_Type() is None: if str(label['IsisCube']['Core']['Pixels']['Type']).upper( ) != xmlOBJ.getOutBit().upper() and str( label['IsisCube']['Core']['Pixels']['Type']).upper() != 'REAL': if str(label['IsisCube']['Core']['Pixels'] ['Type']).upper() == 'SIGNEDWORD': strpairs = '0:-32765 0:-32765 100:32765 100:32765' elif str(label['IsisCube']['Core']['Pixels'] ['Type']).upper() == 'UNSIGNEDBYTE': strpairs = '0:1 0:1 100:254 100:254' STRprocessOBJ = Process() STRprocessOBJ.newProcess('stretch') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') STRprocessOBJ.AddParameter('usepercentages', 'yes') STRprocessOBJ.AddParameter('pairs', strpairs) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) strType = xmlOBJ.STR_Type() if strType == 'StretchPercent' and xmlOBJ.STR_PercentMin( ) is not None and xmlOBJ.STR_PercentMax( ) is not None and testBitType != 'REAL': if float(xmlOBJ.STR_PercentMin()) != 0 and float( xmlOBJ.STR_PercentMax()) != 100: if testBitType == 'UNSIGNEDBYTE': strpairs = '0:1 ' + xmlOBJ.STR_PercentMin() + ':1 ' + \ xmlOBJ.STR_PercentMax() + ':254 100:254' elif testBitType == 'SIGNEDWORD': strpairs = '0:-32765 ' + xmlOBJ.STR_PercentMin() + ':-32765 ' + \ xmlOBJ.STR_PercentMax() + ':32765 100:32765' STRprocessOBJ = Process() STRprocessOBJ.newProcess('stretch') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') STRprocessOBJ.AddParameter('usepercentages', 'yes') STRprocessOBJ.AddParameter('pairs', strpairs) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) elif strType == 'GaussStretch': STRprocessOBJ = Process() STRprocessOBJ.newProcess('gaussstretch') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') STRprocessOBJ.AddParameter('gsigma', xmlOBJ.STR_GaussSigma()) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) elif strType == 'HistogramEqualization': STRprocessOBJ = Process() STRprocessOBJ.newProcess('histeq') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') if xmlOBJ.STR_PercentMin() is None: STRprocessOBJ.AddParameter('minper', '0') else: STRprocessOBJ.AddParameter('minper', xmlOBJ.STR_PercentMin()) if xmlOBJ.STR_PercentMax() is None: STRprocessOBJ.AddParameter('maxper', '100') else: STRprocessOBJ.AddParameter('maxper', xmlOBJ.STR_PercentMax()) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) elif strType == 'SigmaStretch': STRprocessOBJ = Process() STRprocessOBJ.newProcess('sigmastretch') STRprocessOBJ.AddParameter('from_', 'value') STRprocessOBJ.AddParameter('to', 'value') STRprocessOBJ.AddParameter('variance', xmlOBJ.STR_SigmaVariance()) recipeOBJ.AddProcess(STRprocessOBJ.getProcess()) # Test for output bit type and add to recipe if xmlOBJ.getProcess() == 'POW': if xmlOBJ.getOutBit().upper() == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit( ).upper() == 'SIGNEDWORD': CAprocessOBJ = Process() CAprocessOBJ.newProcess('cubeatt-bit') CAprocessOBJ.AddParameter('from_', 'value') CAprocessOBJ.AddParameter('to', 'value') recipeOBJ.AddProcess(CAprocessOBJ.getProcess()) elif xmlOBJ.getProcess() == 'MAP2': if xmlOBJ.getOutBit().upper() != 'INPUT': if xmlOBJ.getOutBit().upper( ) == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit().upper() == 'SIGNEDWORD': if str(label['IsisCube']['Core']['Pixels'] ['Type']).upper() != xmlOBJ.getOutBit().upper(): CAprocessOBJ = Process() CAprocessOBJ.newProcess('cubeatt-bit') CAprocessOBJ.AddParameter('from_', 'value') CAprocessOBJ.AddParameter('to', 'value') recipeOBJ.AddProcess(CAprocessOBJ.getProcess()) # Add Grid(MAP2) if xmlOBJ.getGridInterval() is not None: GprocessOBJ = Process() GprocessOBJ.newProcess('grid') GprocessOBJ.AddParameter('from_', 'value') GprocessOBJ.AddParameter('to', 'value') GprocessOBJ.AddParameter('latinc', xmlOBJ.getGridInterval()) GprocessOBJ.AddParameter('loninc', xmlOBJ.getGridInterval()) GprocessOBJ.AddParameter('outline', 'yes') GprocessOBJ.AddParameter('boundary', 'yes') GprocessOBJ.AddParameter('linewidth', '3') recipeOBJ.AddProcess(GprocessOBJ.getProcess()) # OUTPUT FORMAT # Test for GDAL and add to recipe Oformat = xmlOBJ.getOutFormat() if Oformat == 'GeoTiff-BigTiff' or Oformat == 'GeoJPEG-2000' or Oformat == 'JPEG' or Oformat == 'PNG': if Oformat == 'GeoJPEG-2000': Oformat = 'JP2KAK' if Oformat == 'GeoTiff-BigTiff': Oformat = 'GTiff' GDALprocessOBJ = Process() GDALprocessOBJ.newProcess('gdal_translate') if xmlOBJ.getOutBit() != 'input': GDALprocessOBJ.AddParameter( '-ot', GDALprocessOBJ.GDAL_OBit(xmlOBJ.getOutBit())) GDALprocessOBJ.AddParameter('-of', Oformat) if Oformat == 'GTiff' or Oformat == 'JP2KAK' or Oformat == 'JPEG': GDALprocessOBJ.AddParameter('-co', GDALprocessOBJ.GDAL_Creation(Oformat)) recipeOBJ.AddProcess(GDALprocessOBJ.getProcess()) # set up pds2isis and add to recipe elif Oformat == 'PDS': pdsProcessOBJ = Process() pdsProcessOBJ.newProcess('isis2pds') pdsProcessOBJ.AddParameter('from_', 'value') pdsProcessOBJ.AddParameter('to', 'value') if xmlOBJ.getOutBit() == 'unsignedbyte': pdsProcessOBJ.AddParameter('bittype', '8bit') elif xmlOBJ.getOutBit() == 'signedword': pdsProcessOBJ.AddParameter('bittype', 's16bit') recipeOBJ.AddProcess(pdsProcessOBJ.getProcess()) for item in recipeOBJ.getProcesses(): processOBJ = Process() processOBJ.ProcessFromRecipe(item, recipeOBJ.getRecipe()) if item == 'cam2map': processOBJ.updateParameter('map', MAPfile) if xmlOBJ.getResolution() is None: processOBJ.updateParameter('pixres', 'CAMERA') else: processOBJ.updateParameter('pixres', 'MAP') if xmlOBJ.getRangeType() is None: processOBJ.updateParameter('defaultrange', 'MINIMIZE') elif xmlOBJ.getRangeType() == 'smart' or xmlOBJ.getRangeType( ) == 'fill': processOBJ.updateParameter('defaultrange', 'CAMERA') processOBJ.AddParameter('trim', 'YES') elif item == 'map2map': processOBJ.updateParameter('map', MAPfile) if xmlOBJ.getResolution() is None: processOBJ.updateParameter('pixres', 'FROM') else: processOBJ.updateParameter('pixres', 'MAP') if xmlOBJ.OutputGeometry() is not None: processOBJ.updateParameter('defaultrange', 'MAP') processOBJ.AddParameter('trim', 'YES') else: processOBJ.updateParameter('defaultrange', 'FROM') processJSON = processOBJ.Process2JSON() try: RQ_recipe.QueueAdd(processJSON) logger.info('Recipe Element Added to Redis: %s : Success', item) except Exception as e: logger.warn('Recipe Element NOT Added to Redis: %s', item) # HPC job stuff logger.info('HPC Cluster job Submission Starting') jobOBJ = HPCjob() jobOBJ.setJobName(key + '_Service') jobOBJ.setStdOut(slurm_log + key + '_%A_%a.out') jobOBJ.setStdError(slurm_log + key + '_%A_%a.err') jobOBJ.setWallClock('24:00:00') jobOBJ.setMemory('24576') jobOBJ.setPartition('pds') JAsize = RQ_file.QueueSize() jobOBJ.setJobArray(JAsize) logger.info('Job Array Size : %s', str(JAsize)) # @TODO replace with source activate <env> #jobOBJ.addPath('/usgs/apps/anaconda/bin') # Whether or not we use the default namespace, this guarantees that the POW/MAP queues will match the namespace # used in the job manager. if xmlOBJ.getProcess() == 'POW': cmd = cmd_dir + "pow_process.py -k {} -n {}".format(key, namespace) elif xmlOBJ.getProcess() == 'MAP2': cmd = cmd_dir + "map_process.py -k {} -n {}".format(key, namespace) logger.info('HPC Command: %s', cmd) jobOBJ.setCommand(cmd) SBfile = directory + '/' + key + '.sbatch' jobOBJ.MakeJobFile(SBfile) try: sb = open(SBfile) sb.close logger.info('SBATCH File Creation: Success') except IOError as e: logger.error('SBATCH File %s Not Found', SBfile) if norun: logger.info('No-run mode, will not submit HPC job.') else: try: jobOBJ.Run() logger.info('Job Submission to HPC: Success') DBQO.setJobsStarted(key) except IOError as e: logger.error('Jobs NOT Submitted to HPC')
def main(user_args): key = user_args.key norun = user_args.norun namespace = user_args.namespace if namespace is None: namespace = default_namespace # Set up logging logger = logging.getLogger(key) logger.setLevel(logging.INFO) logFileHandle = logging.FileHandler(pds_log + 'Service.log') formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s, %(message)s') logFileHandle.setFormatter(formatter) logger.addHandler(logFileHandle) RQ_lock = RedisLock(lock_obj) RQ_lock.add({'Services': '1'}) if not RQ_lock.available('Services'): exit() # Connect to database and access 'jobs' table DBQO = PDS_DBquery('JOBS') if key is None: # If no key is specified, grab the first key try: key = DBQO.jobKey() try: key = key.decode('utf-8') except: pass # If the queue is empty, it'll throw a type error. except TypeError: logger.debug('No keys found in clusterjobs database') exit(1) try: # Set the 'queued' column to current time i.e. prep for processing DBQO.setJobsQueued(key) except KeyError as e: logger.error('%s', e) exit(1) logger.info('Starting Process') xmlOBJ = jobXML(DBQO.jobXML4Key(key)) # Make directory if it doesn't exist directory = os.path.join(workarea, key) if not os.path.exists(directory): os.makedirs(directory) logger.info('Working Area: %s', directory) # Set up Redis Hash for ground range RedisH = RedisHash(key + '_info') RedisH.RemoveAll() RedisErrorH = RedisHash(key + '_error') RedisErrorH.RemoveAll() RedisH_DICT = {} RedisH_DICT['service'] = xmlOBJ.getProcess() RedisH_DICT['fileformat'] = xmlOBJ.getOutFormat() RedisH_DICT['outbit'] = xmlOBJ.getOutBit() if xmlOBJ.getRangeType() is not None: RedisH_DICT['grtype'] = xmlOBJ.getRangeType() RedisH_DICT['minlat'] = xmlOBJ.getMinLat() RedisH_DICT['maxlat'] = xmlOBJ.getMaxLat() RedisH_DICT['minlon'] = xmlOBJ.getMinLon() RedisH_DICT['maxlon'] = xmlOBJ.getMaxLon() if RedisH.IsInHash('service'): pass else: RedisH.AddHash(RedisH_DICT) if RedisH.IsInHash('service'): logger.info('Redis info Hash: Success') else: logger.error('Redis info Hash Not Found') # End ground range RQ_recipe = RedisQueue(key + '_recipe', namespace) RQ_recipe.RemoveAll() RQ_file = RedisQueue(key + '_FileQueue', namespace) RQ_file.RemoveAll() RQ_WorkQueue = RedisQueue(key + '_WorkQueue', namespace) RQ_WorkQueue.RemoveAll() RQ_loggy = RedisQueue(key + '_loggy', namespace) RQ_loggy.RemoveAll() RQ_zip = RedisQueue(key + '_ZIP', namespace) RQ_zip.RemoveAll() if xmlOBJ.getProcess() == 'POW': fileList = xmlOBJ.getFileListWB() elif xmlOBJ.getProcess() == 'MAP2': fileList = xmlOBJ.getMFileListWB() for List_file in fileList: # Input and output file naming and path stuff if xmlOBJ.getProcess() == 'POW': if xmlOBJ.getInst() == 'THEMIS_IR': Input_file = List_file.replace('odtie1_', 'odtir1_') Input_file = Input_file.replace('xxedr', 'xxrdr') Input_file = Input_file.replace('EDR.QUB', 'RDR.QUB') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) elif xmlOBJ.getInst() == 'ISSNA': Input_file = List_file.replace('.IMG', '.LBL') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) elif xmlOBJ.getInst() == 'ISSWA': Input_file = List_file.replace('.IMG', '.LBL') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) elif xmlOBJ.getInst() == 'SOLID STATE IMAGING SYSTEM': Input_file = List_file.replace('.img', '.lbl') Input_file = Input_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) else: Input_file = List_file.replace( 'http://pdsimage.wr.usgs.gov/Missions/', archive_base) elif xmlOBJ.getProcess() == 'MAP2': Input_file = List_file.replace('file://pds_san', '/pds_san') if '+' in Input_file: tempsplit = Input_file.split('+') tempFile = tempsplit[0] else: tempFile = Input_file # Output final file naming Tbasename = os.path.splitext(os.path.basename(tempFile))[0] splitBase = Tbasename.split('_') labP = xmlOBJ.getProjection() isis_label = pvl.load(tempFile) if labP == 'INPUT': lab_proj = isis_label['IsisCube']['Mapping']['ProjectionName'][ 0:4] else: lab_proj = labP[0:4] if xmlOBJ.getClat() is None or xmlOBJ.getClon() is None: basefinal = splitBase[0] + splitBase[1] + \ splitBase[2] + '_MAP2_' + lab_proj.upper() else: lab_clat = float(xmlOBJ.getClat()) if lab_clat >= 0: labH = 'N' elif lab_clat < 0: labH = 'S' lab_clon = float(xmlOBJ.getClon()) basefinal = splitBase[0] + splitBase[1] + splitBase[ 2] + '_MAP2_' + str(lab_clat) + labH + str( lab_clon) + '_' + lab_proj.upper() RedisH.MAPname(basefinal) try: basename = os.path.splitext(os.path.basename(Input_file))[0] RQ_file.QueueAdd(Input_file) logger.info('File %s Added to Redis Queue', Input_file) except Exception as e: logger.warning('File %s NOT Added to Redis Queue', Input_file) print('Redis Queue Error', e) RedisH.FileCount(RQ_file.QueueSize()) logger.info('Count of Files Queue: %s', str(RQ_file.QueueSize())) # Map Template Stuff logger.info('Making Map File') mapOBJ = MakeMap() if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.getProjection() == 'INPUT': proj = isis_label['IsisCube']['Mapping']['ProjectionName'] mapOBJ.Projection(proj) else: mapOBJ.Projection(xmlOBJ.getProjection()) if xmlOBJ.getClon() is not None: mapOBJ.CLon(float(xmlOBJ.getClon())) if xmlOBJ.getClat() is not None: mapOBJ.CLat(float(xmlOBJ.getClat())) if xmlOBJ.getFirstParallel() is not None: mapOBJ.FirstParallel(float(xmlOBJ.getFirstParallel())) if xmlOBJ.getSecondParallel() is not None: mapOBJ.SecondParallel(float(xmlOBJ.getSecondParallel())) if xmlOBJ.getResolution() is not None: mapOBJ.PixelRes(float(xmlOBJ.getResolution())) if xmlOBJ.getTargetName() is not None: mapOBJ.Target(xmlOBJ.getTargetName()) if xmlOBJ.getERadius() is not None: mapOBJ.ERadius(float(xmlOBJ.getERadius())) if xmlOBJ.getPRadius() is not None: mapOBJ.PRadius(float(xmlOBJ.getPRadius())) if xmlOBJ.getLatType() is not None: mapOBJ.LatType(xmlOBJ.getLatType()) if xmlOBJ.getLonDirection() is not None: mapOBJ.LonDirection(xmlOBJ.getLonDirection()) if xmlOBJ.getLonDomain() is not None: mapOBJ.LonDomain(int(xmlOBJ.getLonDomain())) if xmlOBJ.getProcess() == 'MAP2': if xmlOBJ.getMinLat() is not None: mapOBJ.MinLat(float(xmlOBJ.getMinLat())) if xmlOBJ.getMaxLat() is not None: mapOBJ.MaxLat(float(xmlOBJ.getMaxLat())) if xmlOBJ.getMinLon() is not None: mapOBJ.MinLon(float(xmlOBJ.getMinLon())) if xmlOBJ.getMaxLon() is not None: mapOBJ.MaxLon(float(xmlOBJ.getMaxLon())) mapOBJ.Map2pvl() MAPfile = directory + "/" + key + '.map' mapOBJ.Map2File(MAPfile) try: f = open(MAPfile) f.close logger.info('Map File Creation: Success') except IOError as e: logger.error('Map File %s Not Found', MAPfile) # ** End Map Template Stuff ** logger.info('Building Recipe') if xmlOBJ.getProcess() == 'POW': try: pds_label = pvl.load(Input_file.split('+')[0]) except: # If pvl fails to load a label, chances are that is because the input file is in a pre-PDS format, # in which case generate_pow_process() doesn't need the label anyway. # Catch all exceptions because pvl can throw many different types of errors when attempting to read such files logger.warning('PVL was unable to parse PDS label for %s', Input_file.split('+')[0]) pds_label = None recipeOBJ = generate_pow_recipe(xmlOBJ, pds_label, MAPfile) elif xmlOBJ.getProcess() == 'MAP2': recipeOBJ = generate_map2_recipe(xmlOBJ, isis_label, MAPfile) # OUTPUT FORMAT # Test for GDAL and add to recipe Oformat = xmlOBJ.getOutFormat() if Oformat == 'GeoTiff-BigTiff' or Oformat == 'GeoJPEG-2000' or Oformat == 'JPEG' or Oformat == 'PNG': if Oformat == 'GeoJPEG-2000': Oformat = 'JP2KAK' if Oformat == 'GeoTiff-BigTiff': Oformat = 'GTiff' gdal_translate_dict = {} def GDAL_OBit(ibit): bitDICT = { 'unsignedbyte': 'Byte', 'signedword': 'Int16', 'real': 'Float32' } try: return bitDICT[ibit] except KeyError: raise Exception( f"Unsupported ibit type given {ibit}. " + f"Currently supported bit types are {list(bitDICT.keys())}" ) def GDAL_Creation(format): cDICT = { 'JPEG': 'quality=100', 'JP2KAK': 'quality=100', 'GTiff': 'bigtiff=if_safer' } try: return cDICT[format] except KeyError: raise Exception( f"Unsupported format {format}. " + f"Currently supported bit types are {list(cDICT.keys())}") if xmlOBJ.getOutBit() != 'input': gdal_translate_dict['outputType'] = GDAL_OBit(xmlOBJ.getOutBit()) gdal_translate_dict['format'] = Oformat if Oformat == 'GTiff' or Oformat == 'JP2KAK' or Oformat == 'JPEG': gdal_translate_dict['creationOptions'] = [GDAL_Creation(Oformat)] frmt = xmlOBJ.getOutFormat() if frmt == 'GeoTiff-BigTiff': fileext = 'tif' elif frmt == 'GeoJPEG-2000': fileext = 'jp2' elif frmt == 'JPEG': fileext = 'jpg' elif frmt == 'PNG': fileext = 'png' elif frmt == 'GIF': fileext = 'gif' gdal_translate_dict['src'] = list( recipeOBJ.items())[-1][-1]['to'].split('+')[0] gdal_translate_dict[ 'dest'] = "{{no_extension_inputfile}}_final." + fileext recipeOBJ['gdal_translate'] = gdal_translate_dict # set up pds2isis and add to recipe elif Oformat == 'PDS': isis2pds_dict = {} isis2pds_dict['from_'] = list(recipeOBJ.items())[-1][-1]['to'] isis2pds_dict['to'] = "{{no_extension_inputfile}}_final.img" if xmlOBJ.getOutBit() == 'unsignedbyte': isis2pds_dict['bittype'] = '8bit' elif xmlOBJ.getOutBit() == 'signedword': isis2pds_dict['bittype'] = 's16bit' recipeOBJ['isis.isis2pds'] = isis2pds_dict try: RQ_recipe.QueueAdd(json.dumps(recipeOBJ)) logger.info('Recipe Added to Redis') except Exception as e: logger.warning('Recipe NOT Added to Redis: %s', recipeOBJ) # HPC job stuff logger.info('HPC Cluster job Submission Starting') jobOBJ = HPCjob() jobOBJ.setJobName(key + '_Service') jobOBJ.setStdOut(slurm_log + key + '_%A_%a.out') jobOBJ.setStdError(slurm_log + key + '_%A_%a.err') jobOBJ.setWallClock('24:00:00') jobOBJ.setMemory('24576') jobOBJ.setPartition('pds') JAsize = RQ_file.QueueSize() jobOBJ.setJobArray(JAsize) logger.info('Job Array Size : %s', str(JAsize)) # Whether or not we use the default namespace, this guarantees that the POW/MAP queues will match the namespace # used in the job manager. if xmlOBJ.getProcess() == 'POW': cmd = cmd_dir + "pow_process.py -k {} -n {}".format(key, namespace) elif xmlOBJ.getProcess() == 'MAP2': cmd = cmd_dir + "map_process.py -k {} -n {}".format(key, namespace) logger.info('HPC Command: %s', cmd) jobOBJ.setCommand(cmd) SBfile = directory + '/' + key + '.sbatch' jobOBJ.MakeJobFile(SBfile) try: sb = open(SBfile) sb.close logger.info('SBATCH File Creation: Success') except IOError as e: logger.error('SBATCH File %s Not Found', SBfile) if norun: logger.info('No-run mode, will not submit HPC job.') else: try: jobOBJ.Run() logger.info('Job Submission to HPC: Success') DBQO.setJobsStarted(key) except IOError as e: logger.error('Jobs NOT Submitted to HPC')