Exemple #1
0
def main():
    PDS_info = json.load(open(pds_info, 'r'))
    reddis_queue = RedisQueue('DI_ReadyQueue')

    try:
        session, _ = db_connect(pds_db)
    except Exception as e:
        print(e)
        return 1

    for target in PDS_info:
        archive_id = PDS_info[target]['archiveid']
        td = (datetime.datetime.now(pytz.utc) -
              datetime.timedelta(days=30)).strftime("%Y-%m-%d %H:%M:%S")
        testing_date = datetime.datetime.strptime(str(td), "%Y-%m-%d %H:%M:%S")
        expired = archive_expired(session, archive_id, testing_date)
        # If any files within the archive are expired, send them to the queue
        if expired.count():
            # @TODO get rid of print statements or enable with --verbose?
            for f in expired:
                reddis_queue.QueueAdd((f.filename, target))
            print('Archive {} DI Ready: {} Files'.format(
                target, str(expired.count())))
        else:
            print('Archive {} DI Current'.format(target))
    return 0
def main():

    #    pdb.set_trace()

    args = Args()
    args.parse_args()

    RQ = RedisQueue('ChecksumUpdate_Queue')

    # @TODO Remove/replace "archiveID"
    archiveID = {
        'cassiniISS': 'cassini_iss_edr',
        'mroCTX': 16,
        'mroHIRISE_EDR': '124',
        'LROLRC_EDR': 74
    }

    # ********* Set up logging *************
    logger = logging.getLogger('ChecksumUpdate_Queueing.' + args.archive)
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'DI.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info('Starting %s Checksum update Queueing', args.archive)
    if args.volume:
        logger.info('Queueing %s Volume', args.volume)

    try:
        # Throws away engine information
        session, _ = db_connect(pds_db)
        logger.info('DataBase Connecton: Success')
    except:
        logger.error('DataBase Connection: Error')
        return 1

    if args.volume:
        volstr = '%' + args.volume + '%'
        QueryOBJ = session.query(Files).filter(
            Files.archiveid == archiveID[args.archive],
            Files.filename.like(volstr))
    else:
        QueryOBJ = session.query(Files).filter(
            Files.archiveid == archiveID[args.archive])
    addcount = 0
    for element in QueryOBJ:
        try:
            RQ.QueueAdd(element.filename)
            addcount = addcount + 1
        except:
            logger.error('File %s Not Added to DI_ReadyQueue',
                         element.filename)

    logger.info('Files Added to Queue %s', addcount)

    logger.info('DI Queueing Complete')
def main():

    #    pdb.set_trace()
    args = Args()
    args.parse_args()

    logger = logging.getLogger('UPC_Queueing.' + args.archive)
    logger.setLevel(logging.INFO)
    # logFileHandle = logging.FileHandler('/usgs/cdev/PDS/logs/Process.log')
    logFileHandle = logging.FileHandler(pds_log + 'Process.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info('Starting Process')

    PDSinfoDICT = json.load(open(pds_info, 'r'))
    try:
        archiveID = PDSinfoDICT[args.archive]['archiveid']
    except KeyError:
        print("\nArchive '{}' not found in {}\n".format(
            args.archive, pds_info))
        print("The following archives are available:")
        for k in PDSinfoDICT.keys():
            print("\t{}".format(k))
        exit()

    RQ = RedisQueue('UPC_ReadyQueue')

    try:
        session, _ = db_connect(pds_db)
        print('Database Connection Success')
    except Exception as e:
        print(e)
        print('Database Connection Error')

    if args.volume:
        volstr = '%' + args.volume + '%'
        qOBJ = session.query(Files).filter(Files.archiveid == archiveID,
                                           Files.filename.like(volstr),
                                           Files.upc_required == 't')
    else:
        qOBJ = session.query(Files).filter(Files.archiveid == archiveID,
                                           Files.upc_required == 't')
    if qOBJ:
        addcount = 0
        for element in qOBJ:
            fname = PDSinfoDICT[args.archive]['path'] + element.filename
            fid = element.fileid
            RQ.QueueAdd((fname, fid, args.archive))
            addcount = addcount + 1

        logger.info('Files Added to UPC Queue: %s', addcount)

    print("Done")
Exemple #4
0
def main():
    args = Args()
    args.parse_args()

    PDS_info = json.load(open(pds_info, 'r'))
    reddis_queue = RedisQueue('UPC_ReadyQueue')
    logger = logging.getLogger('UPC_Queueing')
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'Process.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info("UPC Queue: %s", reddis_queue.id_name)

    try:
        session, _ = db_connect(pds_db)
    except Exception as e:
        logger.error("%s", e)
        return 1

    # For each archive in the db, test if there are files that are ready to
    #  process
    for archive_id in session.query(Files.archiveid).distinct():
        result = session.query(Files).filter(Files.archiveid == archive_id,
                                             Files.upc_required == 't')

        # Get filepath from archive id
        archive_name = session.query(Archives.archive_name).filter(
            Archives.archiveid == archive_id).first()

        # No archive name = no path.  Skip these values.
        if (archive_name is None):
            logger.warn("No archive name found for archive id: %s", archive_id)
            continue
        try:
            # Since results are returned as lists, we have to access the 0th
            #  element to pull out the string archive name.
            fpath = PDS_info[archive_name[0]]['path']
        except KeyError:
            logger.warn("Unable to locate file path for archive id %s",
                        archive_id)
            continue

        # Add each file in the archive to the redis queue.
        for element in result:
            fname = fpath + element.filename
            fid = element.fileid
            reddis_queue.QueueAdd((fname, fid, archive_name[0]))

        logger.info("Added %s files from %s", result.count(), archive_name)
    return 0
def main():

    #    pdb.set_trace()
    args = Args()
    args.parse_args()

    logger = logging.getLogger('Browse_Queueing.' + args.archive)
    logger.setLevel(logging.INFO)
    logFileHandle = logging.FileHandler(pds_log + 'Process.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info('Starting Process')

    PDSinfoDICT = json.load(open(pds_info, 'r'))
    archiveID = PDSinfoDICT[args.archive]['archiveid']

    RQ = RedisQueue('Browse_ReadyQueue')

    try:
        session, _ = db_connect(pds_db)
        logger.info('Database Connection Success')
    except:
        logger.error('Database Connection Error')

    if args.volume:
        volstr = '%' + args.volume + '%'
        qOBJ = session.query(Files).filter(Files.archiveid == archiveID,
                                           Files.filename.like(volstr),
                                           Files.upc_required == 't')
    else:
        qOBJ = session.query(Files).filter(Files.archiveid == archiveID,
                                           Files.upc_required == 't')
    if qOBJ:
        addcount = 0
        for element in qOBJ:
            fname = PDSinfoDICT[args.archive]['path'] + element.filename
            fid = element.fileid
            RQ.QueueAdd((fname, fid, args.archive))
            addcount = addcount + 1

        logger.info('Files Added to UPC Queue: %s', addcount)
Exemple #6
0
def main():
    PDS_info = json.load(open(pds_info, 'r'))
    reddis_queue = RedisQueue('UPC_ReadyQueue')

    try:
        # Safe to use prd database here because there are no writes/edits.
        session, _ = db_connect(pds_db)

    # @TODO Catch exceptions by type.  Bad practice to 'except Exception,' but
    #   I don't know what exception could happen here.
    except Exception as e:
        print(e)
        return 1

    # For each archive in the db, test if there are files that are ready to
    #  process
    for archive_id in session.query(Files.archiveid).distinct():
        result = session.query(Files).filter(Files.archiveid == archive_id,
                                             Files.upc_required == 't')

        # Get filepath from archive id
        archive_name = session.query(Archives.archive_name).filter(
            Archives.archiveid == archive_id).first()

        # No archive name = no path.  Skip these values.
        if (archive_name is None):
            # @TODO log an error
            continue

        try:
            # Since results are returned as lists, we have to access the 0th
            #  element to pull out the string archive name.
            fpath = PDS_info[archive_name[0]]['path']
        except KeyError:
            continue

        # Add each file in the archive to the redis queue.
        for element in result:
            fname = fpath + element.filename
            fid = element.fileid
            reddis_queue.QueueAdd((fname, fid, archive_name[0]))
    return 0
Exemple #7
0
def main():
    args = Args()
    args.parse_args()

    logger = logging.getLogger('DI_Queueing')
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'DI.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    PDS_info = json.load(open(pds_info, 'r'))
    reddis_queue = RedisQueue('DI_ReadyQueue')

    logger.info("DI Queue: %s", reddis_queue.id_name)

    try:
        session, _ = db_connect(pds_db)
    except Exception as e:
        logger.error("%s", e)
        return 1

    for target in PDS_info:
        archive_id = PDS_info[target]['archiveid']
        td = (datetime.datetime.now(pytz.utc) -
              datetime.timedelta(days=30)).strftime("%Y-%m-%d %H:%M:%S")
        testing_date = datetime.datetime.strptime(str(td), "%Y-%m-%d %H:%M:%S")
        expired = archive_expired(session, archive_id, testing_date)
        # If any files within the archive are expired, send them to the queue
        if expired.count():
            for f in expired:
                reddis_queue.QueueAdd((f.filename, target))
            logger.info('Archive %s DI Ready: %s Files', target,
                        str(expired.count()))
        else:
            logger.info('Archive %s DI Current', target)
    return 0
def main():
    args = Args()
    args.parse_args()

    RQ = RedisQueue('DI_ReadyQueue')

    PDSinfoDICT = json.load(open(pds_info, 'r'))
    try:
        archiveID = PDSinfoDICT[args.archive]['archiveid']
    except KeyError:
        print("\nArchive '{}' not found in {}\n".format(
            args.archive, pds_info))
        print("The following archives are available:")
        for k in PDSinfoDICT.keys():
            print("\t{}".format(k))
        exit()

    logger = logging.getLogger('DI_Queueing.' + args.archive)
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'DI.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info("DI Queue: %s", RQ.id_name)

    logger.info('Starting %s DI Queueing', args.archive)
    if args.volume:
        logger.info('Queueing %s Volume', args.volume)

    try:
        session, _ = db_connect(pds_db)
        logger.info('DataBase Connecton: Success')
    except:
        logger.error('DataBase Connection: Error')
        return 1

    td = (datetime.datetime.now(pytz.utc) -
          datetime.timedelta(days=30)).strftime("%Y-%m-%d %H:%M:%S")
    testing_date = datetime.datetime.strptime(str(td), "%Y-%m-%d %H:%M:%S")

    if args.volume:
        volstr = '%' + args.volume + '%'
        testQ = session.query(Files).filter(
            Files.archiveid == archiveID, Files.filename.like(volstr)).filter(
                or_(
                    cast(Files.di_date, Date) < testing_date,
                    cast(Files.di_date, Date) is None))
    else:
        testQ = session.query(Files).filter(
            Files.archiveid == archiveID).filter(
                or_(
                    cast(Files.di_date, Date) < testing_date,
                    cast(Files.di_date, Date) is None))

    addcount = 0
    for element in testQ:
        try:
            RQ.QueueAdd((element.filename, args.archive))
            addcount = addcount + 1
        except:
            logger.warn('File %s Not Added to DI_ReadyQueue', element.filename)

    logger.info('Files Added to Queue %s', addcount)
    logger.info('DI Queueing Complete')
def main():

    #    pdb.set_trace()

    Key = sys.argv[-1]

    workarea = '/scratch/pds_services/' + Key + '/'

    RQ_file = RedisQueue(Key + '_FileQueue')
    RQ_work = RedisQueue(Key + '_WorkQueue')
    RQ_zip = RedisQueue(Key + '_ZIP')
    RQ_loggy = RedisQueue(Key + '_loggy')
    RQ_final = RedisQueue('FinalQueue')
    RHash = RedisHash(Key + '_info')
    RHerror = RedisHash(Key + '_error')

    if int(RQ_file.QueueSize()) == 0:

        print "No Files Found in Redis Queue"

    else:
        print RQ_file.getQueueName()
        jobFile = RQ_file.Qfile2Qwork(
            RQ_file.getQueueName(), RQ_work.getQueueName())

        # Setup system logging
        basename = os.path.splitext(os.path.basename(jobFile))[0]
        logger = logging.getLogger(Key + '.' + basename)
        logger.setLevel(logging.INFO)

        logFileHandle = logging.FileHandler('/usgs/cdev/PDS/logs/Service.log')
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
        logFileHandle.setFormatter(formatter)
        logger.addHandler(logFileHandle)

        logger.info('Starting POW Processing')

        # set up loggy
        loggyOBJ = Loggy(basename)


        # File Naming 
        if '+' in jobFile:
            bandSplit = jobFile.split('+')
            inputFile = bandSplit[0]
        else:
            inputFile = jobFile

        infile = workarea + \
            os.path.splitext(os.path.basename(jobFile))[0] + '.input.cub'
        outfile = workarea + \
            os.path.splitext(os.path.basename(jobFile))[0] + '.output.cub'

        RQ_recipe = RedisQueue(Key + '_recipe')

        status = 'success'
        for element in RQ_recipe.RecipeGet():
            if status == 'error':
                break
            elif status == 'success':
                processOBJ = Process()
                process = processOBJ.JSON2Process(element)

                if 'gdal_translate' not in processOBJ.getProcessName():
                    print processOBJ.getProcessName()
                    if '2isis' in processOBJ.getProcessName():
                        processOBJ.updateParameter('from_', inputFile)
                        processOBJ.updateParameter('to', outfile)
                    elif 'cubeatt-band' in processOBJ.getProcessName():
                        if '+' in jobFile:
                            infileB = infile + '+' + bandSplit[1]
                            processOBJ.updateParameter('from_', infileB)
                            processOBJ.updateParameter('to', outfile)
                            processOBJ.ChangeProcess('cubeatt')
                        else:
                            continue
                    elif 'cubeatt-bit' in processOBJ.getProcessName():
                        if RHash.OutBit() == 'unsignedbyte':
                            temp_outfile = outfile + '+lsb+tile+attached+unsignedbyte+1:254'
                        elif RHash.OutBit() == 'signedword':
                            temp_outfile = outfile + '+lsb+tile+attached+signedword+-32765:32765'
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', temp_outfile)
                        processOBJ.ChangeProcess('cubeatt')

                    elif 'spice' in processOBJ.getProcessName():
                        processOBJ.updateParameter('from_', infile)

                    elif 'ctxevenodd' in processOBJ.getProcessName():
                        label = pvl.load(infile)
                        SS = label['IsisCube']['Instrument']['SpatialSumming']
                        print SS
                        if SS != 1:
                            continue
                        else:
                            processOBJ.updateParameter('from_', infile)
                            processOBJ.updateParameter('to', outfile)

                    elif 'mocevenodd' in processOBJ.getProcessName():
                        label = pvl.load(infile)
                        CTS = label['IsisCube']['Instrument']['CrosstrackSumming']
                        print CTS
                        if CTS != 1:
                            continue
                        else:
                            processOBJ.updateParameter('from_', infile)
                            processOBJ.updateParameter('to', outfile)
                    elif 'mocnoise50' in processOBJ.getProcessName():
                        label = pvl.load(infile)
                        CTS = label['IsisCube']['Instrument']['CrosstrackSumming']
                        if CTS != 1:
                            continue
                        else:
                            processOBJ.updateParameter('from_', infile)
                            processOBJ.updateParameter('to', outfile)
                    elif 'cam2map' in processOBJ.getProcessName():
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', outfile)

                        if RHash.getGRtype() == 'smart' or RHash.getGRtype() == 'fill':
                            subloggyOBJ = SubLoggy('cam2map')
                            camrangeOUT = workarea + basename + '_camrange.txt'
                            isis.camrange(from_=infile,
                                          to=camrangeOUT)

                            cam = pvl.load(camrangeOUT)

                            if cam['UniversalGroundRange']['MaximumLatitude'] < float(RHash.getMinLat()) or \
                               cam['UniversalGroundRange']['MinimumLatitude'] > float(RHash.getMaxLat()) or \
                               cam['UniversalGroundRange']['MaximumLongitude'] < float(RHash.getMinLon()) or \
                               cam['UniversalGroundRange']['MinimumLongitude'] > float(RHash.getMaxLon()):

                                status = 'error'
                                eSTR = "Error Ground Range Outside Extent Range"
                                RHerror.addError(os.path.splitext(
                                    os.path.basename(jobFile))[0], eSTR)
                                subloggyOBJ.setStatus('ERROR')
                                subloggyOBJ.errorOut(eSTR)
                                loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())
                                break

                            elif RHash.getGRtype() == 'smart':
                                if cam['UniversalGroundRange']['MinimumLatitude'] > float(RHash.getMinLat()):
                                    minlat = cam['UniversalGroundRange']['MinimumLatitude']
                                else:
                                    minlat = RHash.getMinLat()

                                if cam['UniversalGroundRange']['MaximumLatitude'] < float(RHash.getMaxLat()):
                                    maxlat = cam['UniversalGroundRange']['MaximumLatitude']
                                else:
                                    maxlat = RHash.getMaxLat()

                                if cam['UniversalGroundRange']['MinimumLongitude'] > float(RHash.getMinLon()):
                                    minlon = cam['UniversalGroundRange']['MinimumLongitude']
                                else:
                                    minlon = RHash.getMinLon()

                                if cam['UniversalGroundRange']['MaximumLongitude'] < float(RHash.getMaxLon()):
                                    maxlon = cam['UniversalGroundRange']['MaximumLongitude']
                                else:
                                    maxlon = RHash.getMaxLon()
                            elif RHash.getGRtype() == 'fill':
                                minlat = RHash.getMinLat()
                                maxlat = RHash.getMaxLat()
                                minlon = RHash.getMinLon()
                                maxlon = RHash.getMaxLon()

                            processOBJ.AddParameter('minlat', minlat)
                            processOBJ.AddParameter('maxlat', maxlat)
                            processOBJ.AddParameter('minlon', minlon)
                            processOBJ.AddParameter('maxlon', maxlon)

                            os.remove(camrangeOUT)

                    elif 'isis2pds' in processOBJ.getProcessName():
                        finalfile = infile.replace('.input.cub', '_final.img')
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', finalfile)

                    else:
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', outfile)

                    print processOBJ.getProcess()

                    for k, v in processOBJ.getProcess().items():
                        func = getattr(isis, k)
                        subloggyOBJ = SubLoggy(k)
                        try:
                            func(**v)
                            logger.info('Process %s :: Success', k)
                            subloggyOBJ.setStatus('SUCCESS')
                            subloggyOBJ.setCommand(processOBJ.LogCommandline())
                            subloggyOBJ.setHelpLink(processOBJ.LogHelpLink())
                            loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())

                            if os.path.isfile(outfile):
                                os.rename(outfile, infile)
                            status = 'success'

                        except ProcessError as e:
                            logger.error('Process %s :: Error', k)
                            logger.error(e)
                            status = 'error'
                            eSTR = 'Error Executing ' + k + \
                                ' Standard Error: ' + str(e)
                            RHerror.addError(os.path.splitext(
                                os.path.basename(jobFile))[0], eSTR)
                            subloggyOBJ.setStatus('ERROR')
                            subloggyOBJ.setCommand(processOBJ.LogCommandline())
                            subloggyOBJ.setHelpLink(processOBJ.LogHelpLink())
                            subloggyOBJ.errorOut(eSTR)
                            loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())

                else:
                    GDALcmd = ""
                    for process, v, in processOBJ.getProcess().items():
                        subloggyOBJ = SubLoggy(process)
                        GDALcmd += process
                        for key, value in v.items():
                            GDALcmd += ' ' + key + ' ' + value

                    if RHash.Format() == 'GeoTiff-BigTiff':
                        fileext = 'tif'
                    elif RHash.Format() == 'GeoJPEG-2000':
                        fileext = 'jp2'
                    elif RHash.Format() == 'JPEG':
                        fileext = 'jpg'
                    elif RHash.Format() == 'PNG':
                        fileext = 'png'
                    elif RHash.Format() == 'GIF':
                        fileext = 'gif'

                    logGDALcmd = GDALcmd + ' ' + basename + \
                        '.input.cub ' + basename + '_final.' + fileext
                    finalfile = infile.replace(
                        '.input.cub', '_final.' + fileext)
                    GDALcmd += ' ' + infile + ' ' + finalfile
                    print GDALcmd

                    result = subprocess.call(GDALcmd, shell=True)
                    if result == 0:
                        logger.info('Process GDAL translate :: Success')
                        status = 'success'
                        subloggyOBJ.setStatus('SUCCESS')
                        subloggyOBJ.setCommand(logGDALcmd)
                        subloggyOBJ.setHelpLink(
                            'http://www.gdal.org/gdal_translate.html')
                        loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())
                        os.remove(infile)
                    else:
                        errmsg = 'Error Executing GDAL translate: Error'
                        logger.error(errmsg)
                        status = 'error'
                        RHerror.addError(os.path.splitext(
                            os.path.basename(jobFile))[0], errmsg)
                        subloggyOBJ.setStatus('ERROR')
                        subloggyOBJ.setCommand(logGDALcmd)
                        subloggyOBJ.setHelpLink(
                            'http://www.gdal.org/gdal_translate.html')
                        subloggyOBJ.errorOut('Process GDAL translate :: Error')
                        loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())

        if status == 'success':

            if RHash.Format() == 'ISIS3':
                finalfile = infile.replace('.input.cub', '_final.cub')
                shutil.move(infile, finalfile)
            if RHash.getStatus() != 'ERROR':
                RHash.Status('SUCCESS')

            try:
                RQ_zip.QueueAdd(finalfile)
                logger.info('File Added to ZIP Queue')
            except:
                logger.error('File NOT Added to ZIP Queue')

        elif status == 'error':
            RHash.Status('ERROR')
            if os.path.isfile(infile):
                os.remove(infile)

        try:
            RQ_loggy.QueueAdd(loggyOBJ.Loggy2json())
            RQ_work.QueueRemove(jobFile)
            logger.info('JSON Added to Loggy Queue')
        except:
            logger.error('JSON NOT Added to Loggy Queue')

        if RQ_file.QueueSize() == 0 and RQ_work.QueueSize() == 0:
            try:
                RQ_final.QueueAdd(Key)
                logger.info('Key %s Added to Final Queue: Success', Key)
                logger.info('Both Queues Empty: filequeue = %s  work queue = %s', str(
                    RQ_file.QueueSize()), str(RQ_work.QueueSize()))
                logger.info('JOB Complete')
            except:
                logger.error('Key NOT Added to Final Queue')
        elif RQ_file.QueueSize() == 0 and RQ_work.QueueSize() != 0:
            logger.warning('Work Queue Not Empty: filequeue = %s  work queue = %s', str(
                RQ_file.QueueSize()), str(RQ_work.QueueSize()))
def main():

    # pdb.set_trace()

    args = Args()
    args.parse_args()
    logging.basicConfig(level=args.loglevel)
    RQ_ingest = RedisQueue('Ingest_ReadyQueue')
    RQ_linking = RedisQueue('LinkQueue')

    # Set up logging

    logger = logging.getLogger(args.archive + '_INGEST')
    logger.setLevel(logging.INFO)
    logFileHandle = logging.FileHandler(pds_log + 'Ingest.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    if args.loglevel == logging.INFO:
        print("Log File: {}Ingest.log".format(pds_log))

    PDSinfoDICT = json.load(open(pds_info, 'r'))
    try:
        archivepath = PDSinfoDICT[args.archive]['path'][:-1]
    except KeyError:
        print("\nArchive '{}' not found in {}\n".format(
            args.archive, pds_info))
        print("The following archives are available:")
        for k in PDSinfoDICT.keys():
            print("\t{}".format(k))
        logging.error("Unable to locate {}".format(args.archive))
        exit()

    if args.volume:
        archivepath = archivepath + '/' + args.volume

    logger.info('Starting Ingest for: %s', archivepath)
    logger.info('Ingest Queue: {}'.format(str(RQ_ingest.id_name)))
    logger.info('Linking Queue: {}'.format(str(RQ_linking.id_name)))

    # Possible bug in RQ?  Can't add to queue in "if fname == voldesc"
    queue_size = RQ_ingest.QueueSize()
    voldescs = []
    for dirpath, dirs, files in os.walk(archivepath):
        for filename in files:
            fname = os.path.join(dirpath, filename)
            if args.search:
                if args.search in fname:
                    try:
                        if os.path.basename(fname) == "voldesc.cat":
                            voldescs.append(fname)
                        if args.ingest:
                            RQ_ingest.QueueAdd((fname, args.archive))
                    except:
                        logger.error('File %s NOT added to Ingest Queue',
                                     fname)
                else:
                    continue
            else:
                try:
                    if os.path.basename(fname) == "voldesc.cat":
                        voldescs.append(fname)
                    if args.ingest:
                        RQ_ingest.QueueAdd((fname, args.archive))
                except:
                    logger.error('File %s NOT added to Ingest Queue', fname)

    n_added = RQ_ingest.QueueSize() - queue_size
    for fpath in voldescs:
        RQ_linking.QueueAdd((fpath, args.archive))
    logger.info('Files added to Ingest Queue: %s', n_added)
Exemple #11
0
def main():

    args = Args()
    args.parse_args()
    override = args.override
    logger = logging.getLogger('Ingest_Process')
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'Ingest.log')
    print("Log File: {}Ingest.log".format(pds_log))
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info("Starting Ingest Process")
    PDSinfoDICT = json.load(open(pds_info, 'r'))

    RQ_main = RedisQueue('Ingest_ReadyQueue')
    RQ_lock = RedisLock(lock_obj)
    RQ_lock.add({RQ_main.id_name: '1'})
    RQ_work = RedisQueue('Ingest_WorkQueue')

    RQ_upc = RedisQueue('UPC_ReadyQueue')
    RQ_thumb = RedisQueue('Thumbnail_ReadyQueue')
    RQ_browse = RedisQueue('Browse_ReadyQueue')

    logger.info("UPC Queue: %s", RQ_upc.id_name)
    logger.info("Thumbnail Queue: %s", RQ_thumb.id_name)
    logger.info("Browse Queue: %s", RQ_browse.id_name)

    try:
        session, engine = db_connect(pds_db)
        logger.info('DataBase Connecton: Success')
    except:
        logger.error('DataBase Connection: Error')
        return 1

    index = 1

    while int(RQ_main.QueueSize()) > 0 and RQ_lock.available(RQ_main.id_name):

        item = literal_eval(RQ_main.QueueGet().decode("utf-8"))
        inputfile = item[0]
        archive = item[1]
        RQ_work.QueueAdd(inputfile)

        subfile = inputfile.replace(PDSinfoDICT[archive]['path'], '')
        # Calculate checksum in chunks of 4096
        f_hash = hashlib.md5()
        with open(inputfile, "rb") as f:
            for chunk in iter(lambda: f.read(4096), b""):
                f_hash.update(chunk)
        filechecksum = f_hash.hexdigest()

        QOBJ = session.query(Files).filter_by(filename=subfile).first()

        runflag = False
        if QOBJ is None or filechecksum != QOBJ.checksum:
            runflag = True

        if runflag or override:
            date = datetime.datetime.now(
                pytz.utc).strftime("%Y-%m-%d %H:%M:%S")
            fileURL = inputfile.replace(archive_base, web_base)

            # If all upc requirements are in 'inputfile,' flag for upc
            upcflag = all(x in inputfile
                          for x in PDSinfoDICT[archive]['upc_reqs'])
            filesize = os.path.getsize(inputfile)

            try:
                # If we found an existing file and want to overwrite the data
                if QOBJ is not None and override:
                    ingest_entry = QOBJ
                # If the file was not found, create a new entry
                else:
                    ingest_entry = Files()
                    ingest_entry.archiveid = PDSinfoDICT[archive]['archiveid']
                    ingest_entry.filename = subfile
                    ingest_entry.entry_date = date
                    ingest_entry.checksum = filechecksum
                    ingest_entry.upc_required = upcflag
                    ingest_entry.validation_required = True
                    ingest_entry.header_only = False
                    ingest_entry.release_date = date
                    ingest_entry.file_url = fileURL
                    ingest_entry.file_size = filesize
                    ingest_entry.di_pass = True
                    ingest_entry.di_date = date

                session.merge(ingest_entry)
                session.flush()

                if upcflag:
                    RQ_upc.QueueAdd((inputfile, ingest_entry.fileid, archive))
                    RQ_thumb.QueueAdd(
                        (inputfile, ingest_entry.fileid, archive))
                    RQ_browse.QueueAdd(
                        (inputfile, ingest_entry.fileid, archive))
                    #RQ_pilotB.QueueAdd((inputfile,ingest_entry.fileid, archive))

                RQ_work.QueueRemove(inputfile)

                index = index + 1

            except Exception as e:
                logger.error("Error During File Insert %s : %s", str(subfile),
                             str(e))

        elif not runflag and not override:
            RQ_work.QueueRemove(inputfile)
            logger.warn(
                "Not running ingest: file %s already present"
                " in database and no override flag supplied", inputfile)

        if index >= 250:
            try:
                session.commit()
                logger.info("Commit 250 files to Database: Success")
                index = 1
            except Exception as e:
                session.rollback()
                logger.warn("Unable to commit to database: %s", str(e))
    else:
        logger.info("No Files Found in Ingest Queue")
        try:
            session.commit()
            logger.info("Commit to Database: Success")
        except Exception as e:
            logger.error("Unable to commit to database: %s", str(e))
            session.rollback()

    # Close connection to database
    session.close()
    engine.dispose()

    if RQ_main.QueueSize() == 0 and RQ_work.QueueSize() == 0:
        logger.info("Process Complete All Queues Empty")
    elif RQ_main.QueueSize() == 0 and RQ_work.QueueSize() != 0:
        logger.warning("Process Done Work Queue NOT Empty Contains %s Files",
                       str(RQ_work.QueueSize()))

    logger.info("Ingest Complete")
def main():
    args = Args()
    args.parse_args()
    key = args.key
    namespace = args.namespace

    if namespace is None:
        namespace is default_namespace

    workarea = scratch + args.key + '/'
    RQ_file = RedisQueue(key + '_FileQueue', namespace)
    RQ_work = RedisQueue(key + '_WorkQueue', namespace)
    RQ_zip = RedisQueue(key + '_ZIP', namespace)
    RQ_loggy = RedisQueue(key + '_loggy', namespace)
    RQ_final = RedisQueue('FinalQueue', namespace)
    RHash = RedisHash(key + '_info')
    RHerror = RedisHash(key + '_error')
    RQ_lock = RedisLock(lock_obj)
    RQ_lock.add({'MAP':'1'})

    if int(RQ_file.QueueSize()) == 0 and RQ_lock.available('MAP'):
        print("No Files Found in Redis Queue")
    else:
        jobFile = RQ_file.Qfile2Qwork(
            RQ_file.getQueueName(), RQ_work.getQueueName()).decode('utf-8')

        # Setup system logging
        basename = os.path.splitext(os.path.basename(jobFile))[0]
        logger = logging.getLogger(key + '.' + basename)
        logger.setLevel(logging.INFO)

        logFileHandle = logging.FileHandler(pds_log + '/Service.log')

        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
        logFileHandle.setFormatter(formatter)
        logger.addHandler(logFileHandle)

        logger.info('Starting MAP Processing')

        loggyOBJ = Loggy(basename)

        # File Naming
        infile = workarea + \
            os.path.splitext(os.path.basename(jobFile))[0] + '.input.cub'
        outfile = workarea + \
            os.path.splitext(os.path.basename(jobFile))[0] + '.output.cub'

        # Recipe Stuff

        RQ_recipe = RedisQueue(key + '_recipe')

        status = 'success'

        for element in RQ_recipe.RecipeGet():

            if status == 'error':
                break
            elif status == 'success':
                processOBJ = Process()
                process = processOBJ.JSON2Process(element)
                if 'gdal_translate' not in processOBJ.getProcessName():
                    if 'cubeatt-band' in processOBJ.getProcessName():
                        if '+' in jobFile:
                            processOBJ.updateParameter('from_', jobFile)
                            processOBJ.updateParameter('to', outfile)
                            processOBJ.ChangeProcess('cubeatt')
                        else:
                            continue

                    elif 'map2map' in processOBJ.getProcessName():
                        if '+' in jobFile:
                            processOBJ.updateParameter('from_', infile)
                        else:
                            processOBJ.updateParameter('from_', jobFile)
                        processOBJ.updateParameter('to', outfile)

                    elif 'cubeatt-bit' in processOBJ.getProcessName():
                        if RHash.OutBit().decode('utf-8') == 'unsignedbyte':
                            temp_outfile = outfile + '+lsb+tile+attached+unsignedbyte+1:254'
                        elif RHash.OutBit().decode('utf-8') == 'signedword':
                            temp_outfile = outfile + '+lsb+tile+attached+signedword+-32765:32765'
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', temp_outfile)
                        processOBJ.ChangeProcess('cubeatt')

                    elif 'isis2pds' in processOBJ.getProcessName():
                        # finalfile = infile.replace('.input.cub', '_final.img')
                        finalfile = workarea + RHash.getMAPname().decode('utf-8') + '.img'
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', finalfile)

                    else:
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', outfile)

                    print(processOBJ.getProcess())

                    for k, v in processOBJ.getProcess().items():
                        func = getattr(isis, k)
                        subloggyOBJ = SubLoggy(k)
                        try:
                            func(**v)
                            logger.info('Process %s :: Success', k)
                            subloggyOBJ.setStatus('SUCCESS')
                            subloggyOBJ.setCommand(processOBJ.LogCommandline())
                            subloggyOBJ.setHelpLink(processOBJ.LogHelpLink())
                            loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())

                            if os.path.isfile(outfile):
                                os.rename(outfile, infile)
                            status = 'success'

                        except ProcessError as e:
                            logger.error('Process %s :: Error', k)
                            logger.error(e)
                            status = 'error'
                            eSTR = 'Error Executing ' + k + \
                                ' Standard Error: ' + str(e)
                            RHerror.addError(os.path.splitext(
                                os.path.basename(jobFile))[0], eSTR)
                            subloggyOBJ.setStatus('ERROR')
                            subloggyOBJ.setCommand(processOBJ.LogCommandline())
                            subloggyOBJ.setHelpLink(processOBJ.LogHelpLink())
                            subloggyOBJ.errorOut(eSTR)
                            loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())

                else:

                    GDALcmd = ""
                    for process, v, in processOBJ.getProcess().items():
                        subloggyOBJ = SubLoggy(process)
                        GDALcmd += process
                        for key, value in v.items():
                            GDALcmd += ' ' + key + ' ' + value

                    img_format = RHash.Format().decode('utf-8')

                    if img_format == 'GeoTiff-BigTiff':
                        fileext = 'tif'
                    elif img_format == 'GeoJPEG-2000':
                        fileext = 'jp2'
                    elif img_format == 'JPEG':
                        fileext = 'jpg'
                    elif img_format == 'PNG':
                        fileext = 'png'
                    elif img_format == 'GIF':
                        fileext = 'gif'

                    logGDALcmd = GDALcmd + ' ' + basename + '.input.cub ' + RHash.getMAPname().decode('utf-8') + '.' + fileext
                    finalfile = workarea + RHash.getMAPname().decode('utf-8') + '.' + fileext
                    GDALcmd += ' ' + infile + ' ' + finalfile
                    print(GDALcmd)
                    try:
                        subprocess.call(GDALcmd, shell=True)
                        logger.info('Process GDAL translate :: Success')
                        status = 'success'
                        subloggyOBJ.setStatus('SUCCESS')
                        subloggyOBJ.setCommand(logGDALcmd)
                        subloggyOBJ.setHelpLink(
                            'www.gdal.org/gdal_translate.html')
                        loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())
                        os.remove(infile)
                    except OSError as e:
                        logger.error('Process GDAL translate :: Error')
                        logger.error(e)
                        status = 'error'
                        RHerror.addError(os.path.splitext(os.path.basename(jobFile))[0],
                                         'Process GDAL translate :: Error')
                        subloggyOBJ.setStatus('ERROR')
                        subloggyOBJ.setCommand(logGDALcmd)
                        subloggyOBJ.setHelpLink(
                            'http://www.gdal.org/gdal_translate.html')
                        subloggyOBJ.errorOut(e)
                        loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())

        if status == 'success':
            if RHash.Format().decode('utf-8') == 'ISIS3':
                finalfile = workarea + RHash.getMAPname().decode('utf-8') + '.cub'
                shutil.move(infile, finalfile)
            if RHash.getStatus() != b'ERROR':
                RHash.Status('SUCCESS')

            try:
                RQ_zip.QueueAdd(finalfile)
                logger.info('File Added to ZIP Queue')
            except:
                logger.error('File NOT Added to ZIP Queue')

            try:
                RQ_loggy.QueueAdd(loggyOBJ.Loggy2json())
                logger.info('JSON Added to Loggy Queue')
            except:
                logger.error('JSON NOT Added to Loggy Queue')

            RQ_work.QueueRemove(jobFile)
        elif status == 'error':
            RHash.Status('ERROR')
            if os.path.isfile(infile):
                os.remove(infile)

        if RQ_file.QueueSize() == 0 and RQ_work.QueueSize() == 0:
            try:
                RQ_final.QueueAdd(key)
                logger.info('Key %s Added to Final Queue: Success', key)
                logger.info('Job Complete')
            except:
                logger.error('Key NOT Added to Final Queue')
        else:
            logger.warning('Queues Not Empty: filequeue = %s  work queue = %s', str(
                RQ_file.QueueSize()), str(RQ_work.QueueSize()))
def main():

    #    pdb.set_trace()

    DBQO = PDS_DBquery('JOBS')
    Key = DBQO.jobKey()
    #    Key = '2d7379497fed4c092046b2a06f5471a5'
    DBQO.setJobsQueued(Key)

    #*************** Setup logging ******************
    logger = logging.getLogger(Key)
    logger.setLevel(logging.INFO)

    logFileHandle = logging.FileHandler('/usgs/cdev/PDS/logs/Service.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info('Starting Process')

    xmlOBJ = jobXML(DBQO.jobXML4Key(Key))

    # ********** Test if Directory exists and make it if not *******

    directory = '/scratch/pds_services/' + Key
    if not os.path.exists(directory):
        os.makedirs(directory)

    logger.info('Working Area: %s', directory)

    # ******************** Setup Redis Hash for ground range *********

    RedisH = RedisHash(Key + '_info')
    RedisH.RemoveAll()
    RedisErrorH = RedisHash(Key + '_error')
    RedisErrorH.RemoveAll()
    RedisH_DICT = {}
    RedisH_DICT['service'] = xmlOBJ.getProcess()
    RedisH_DICT['fileformat'] = xmlOBJ.getOutFormat()
    RedisH_DICT['outbit'] = xmlOBJ.getOutBit()
    if xmlOBJ.getRangeType() is not None:
        RedisH_DICT['grtype'] = xmlOBJ.getRangeType()
        RedisH_DICT['minlat'] = xmlOBJ.getMinLat()
        RedisH_DICT['maxlat'] = xmlOBJ.getMaxLat()
        RedisH_DICT['minlon'] = xmlOBJ.getMinLon()
        RedisH_DICT['maxlon'] = xmlOBJ.getMaxLon()

    if RedisH.IsInHash('service'):
        pass
    else:
        RedisH.AddHash(RedisH_DICT)
    if RedisH.IsInHash('service'):
        logger.info('Redis info Hash: Success')
    else:
        logger.error('Redis info Hash Not Found')

# ***end ground range **

    RQ_recipe = RedisQueue(Key + '_recipe')
    RQ_recipe.RemoveAll()
    RQ_file = RedisQueue(Key + '_FileQueue')
    RQ_file.RemoveAll()
    RQ_WorkQueue = RedisQueue(Key + '_WorkQueue')
    RQ_WorkQueue.RemoveAll()
    RQ_loggy = RedisQueue(Key + '_loggy')
    RQ_loggy.RemoveAll()
    RQ_zip = RedisQueue(Key + '_ZIP')
    RQ_zip.RemoveAll()

    if xmlOBJ.getProcess() == 'POW':
        fileList = xmlOBJ.getFileListWB()
    elif xmlOBJ.getProcess() == 'MAP2':
        fileList = xmlOBJ.getMFileListWB()

    for List_file in fileList:

        ######### Input and output file naming and path stuff ############

        if xmlOBJ.getProcess() == 'POW':
            if xmlOBJ.getInst() == 'THEMIS_IR':
                Input_file = List_file.replace('odtie1_', 'odtir1_')
                Input_file = Input_file.replace('xxedr', 'xxrdr')
                Input_file = Input_file.replace('EDR.QUB', 'RDR.QUB')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/',
                    '/pds_san/PDS_Archive/')
            elif xmlOBJ.getInst() == 'ISSNA':
                Input_file = List_file.replace('.IMG', '.LBL')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/',
                    '/pds_san/PDS_Archive/')
            elif xmlOBJ.getInst() == 'ISSWA':
                Input_file = List_file.replace('.IMG', '.LBL')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/',
                    '/pds_san/PDS_Archive/')
            elif xmlOBJ.getInst() == 'SOLID STATE IMAGING SYSTEM':
                Input_file = List_file.replace('.img', '.lbl')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/',
                    '/pds_san/PDS_Archive/')
            else:
                Input_file = List_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/',
                    '/pds_san/PDS_Archive/')

        elif xmlOBJ.getProcess() == 'MAP2':
            Input_file = List_file.replace('file://pds_san', '/pds_san')

            if '+' in Input_file:
                tempsplit = Input_file.split('+')
                tempFile = tempsplit[0]
            else:
                tempFile = Input_file
            label = pvl.load(tempFile)
            #*********Output final file naming **************
            Tbasename = os.path.splitext(os.path.basename(tempFile))[0]
            splitBase = Tbasename.split('_')

            labP = xmlOBJ.getProjection()
            if labP == 'INPUT':
                lab_proj = label['IsisCube']['Mapping']['ProjectionName'][0:4]
            else:
                lab_proj = labP[0:4]

            if xmlOBJ.getClat() is None or xmlOBJ.getClon() is None:
                basefinal = splitBase[0] + splitBase[1] + \
                    splitBase[2] + '_MAP2_' + lab_proj.upper()
            else:
                lab_clat = float(xmlOBJ.getClat())
                if lab_clat >= 0:
                    labH = 'N'
                elif lab_clat < 0:
                    labH = 'S'
                lab_clon = float(xmlOBJ.getClon())

                basefinal = splitBase[0] + splitBase[1] + splitBase[
                    2] + '_MAP2_' + str(lab_clat) + labH + str(
                        lab_clon) + '_' + lab_proj.upper()
            RedisH.MAPname(basefinal)

        try:
            RQ_file.QueueAdd(Input_file)
            logger.info('File %s Added to Redis Queue', Input_file)
        except Exception as e:
            logger.warn('File %s NOT Added to Redis Queue', Input_file)
            print('Redis Queue Error', e)
    RedisH.FileCount(RQ_file.QueueSize())
    logger.info('Count of Files Queue: %s', str(RQ_file.QueueSize()))

    # ************* Map Template Stuff ******************
    logger.info('Making Map File')
    mapOBJ = MakeMap()

    if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.getProjection() == 'INPUT':
        proj = label['IsisCube']['Mapping']['ProjectionName']
        mapOBJ.Projection(proj)
    else:
        mapOBJ.Projection(xmlOBJ.getProjection())

    if xmlOBJ.getClon() is not None:
        mapOBJ.CLon(float(xmlOBJ.getClon()))
    if xmlOBJ.getClat() is not None:
        mapOBJ.CLat(float(xmlOBJ.getClat()))
    if xmlOBJ.getFirstParallel() is not None:
        mapOBJ.FirstParallel(float(xmlOBJ.getFirstParallel()))
    if xmlOBJ.getSecondParallel() is not None:
        mapOBJ.SecondParallel(float(xmlOBJ.getSecondParallel()))
    if xmlOBJ.getResolution() is not None:
        mapOBJ.PixelRes(float(xmlOBJ.getResolution()))
    if xmlOBJ.getTargetName() is not None:
        mapOBJ.Target(xmlOBJ.getTargetName())
    if xmlOBJ.getERadius() is not None:
        mapOBJ.ERadius(float(xmlOBJ.getERadius()))
    if xmlOBJ.getPRadius() is not None:
        mapOBJ.PRadius(float(xmlOBJ.getPRadius()))
    if xmlOBJ.getLatType() is not None:
        mapOBJ.LatType(xmlOBJ.getLatType())
    if xmlOBJ.getLonDirection() is not None:
        mapOBJ.LonDirection(xmlOBJ.getLonDirection())
    if xmlOBJ.getLonDomain() is not None:
        mapOBJ.LonDomain(int(xmlOBJ.getLonDomain()))

    if xmlOBJ.getProcess() == 'MAP2':
        if xmlOBJ.getMinLat() is not None:
            mapOBJ.MinLat(float(xmlOBJ.getMinLat()))
        if xmlOBJ.getMaxLat() is not None:
            mapOBJ.MaxLat(float(xmlOBJ.getMaxLat()))
        if xmlOBJ.getMinLon() is not None:
            mapOBJ.MinLon(float(xmlOBJ.getMinLon()))
        if xmlOBJ.getMaxLon() is not None:
            mapOBJ.MaxLon(float(xmlOBJ.getMaxLon()))

    mapOBJ.Map2pvl()

    MAPfile = directory + "/" + Key + '.map'
    mapOBJ.Map2File(MAPfile)

    try:
        f = open(MAPfile)
        f.close
        logger.info('Map File Creation: Success')
    except IOError as e:
        logger.error('Map File %s Not Found', MAPfile)

# ** End Map Template Stuff **

# *************************************************
    logger.info('Building Recipe')
    recipeOBJ = Recipe()
    if xmlOBJ.getProcess() == 'POW':
        recipeOBJ.AddJsonFile(recipe_dict[xmlOBJ.getInst()])
    elif xmlOBJ.getProcess() == 'MAP2':
        recipeOBJ.AddJsonFile(recipe_dict['MAP'])
# ************** Test for stretch and add to recipe **********************
# if MAP2 and 8 or 16 bit run stretch to set range

    if xmlOBJ.getOutBit() == 'input':
        testBitType = str(label['IsisCube']['Core']['Pixels']['Type']).upper()
    else:
        testBitType = xmlOBJ.getOutBit().upper()

    if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.STR_Type() is None:
        if str(label['IsisCube']['Core']['Pixels']['Type']).upper(
        ) != xmlOBJ.getOutBit().upper() and str(
                label['IsisCube']['Core']['Pixels']['Type']).upper() != 'REAL':
            if str(label['IsisCube']['Core']['Pixels']
                   ['Type']).upper() == 'SIGNEDWORD':
                strpairs = '0:-32765 0:-32765 100:32765 100:32765'
            elif str(label['IsisCube']['Core']['Pixels']
                     ['Type']).upper() == 'UNSIGNEDBYTE':
                strpairs = '0:1 0:1 100:254 100:254'

            STRprocessOBJ = Process()
            STRprocessOBJ.newProcess('stretch')
            STRprocessOBJ.AddParameter('from_', 'value')
            STRprocessOBJ.AddParameter('to', 'value')
            STRprocessOBJ.AddParameter('usepercentages', 'yes')
            STRprocessOBJ.AddParameter('pairs', strpairs)
            recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    strType = xmlOBJ.STR_Type()
    if strType == 'StretchPercent' and xmlOBJ.STR_PercentMin(
    ) is not None and xmlOBJ.STR_PercentMax(
    ) is not None and testBitType != 'REAL':
        if float(xmlOBJ.STR_PercentMin()) != 0 and float(
                xmlOBJ.STR_PercentMax()) != 100:
            if testBitType == 'UNSIGNEDBYTE':
                strpairs = '0:1 ' + xmlOBJ.STR_PercentMin() + ':1 ' + \
                    xmlOBJ.STR_PercentMax() + ':254 100:254'
            elif testBitType == 'SIGNEDWORD':
                strpairs = '0:-32765 ' + xmlOBJ.STR_PercentMin() + ':-32765 ' + \
                    xmlOBJ.STR_PercentMax() + ':32765 100:32765'

            STRprocessOBJ = Process()
            STRprocessOBJ.newProcess('stretch')
            STRprocessOBJ.AddParameter('from_', 'value')
            STRprocessOBJ.AddParameter('to', 'value')
            STRprocessOBJ.AddParameter('usepercentages', 'yes')
            STRprocessOBJ.AddParameter('pairs', strpairs)
            recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    elif strType == 'GaussStretch':
        STRprocessOBJ = Process()
        STRprocessOBJ.newProcess('gaussstretch')
        STRprocessOBJ.AddParameter('from_', 'value')
        STRprocessOBJ.AddParameter('to', 'value')
        STRprocessOBJ.AddParameter('gsigma', xmlOBJ.STR_GaussSigma())
        recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    elif strType == 'HistogramEqualization':
        STRprocessOBJ = Process()
        STRprocessOBJ.newProcess('histeq')
        STRprocessOBJ.AddParameter('from_', 'value')
        STRprocessOBJ.AddParameter('to', 'value')
        if xmlOBJ.STR_PercentMin() is None:
            STRprocessOBJ.AddParameter('minper', '0')
        else:
            STRprocessOBJ.AddParameter('minper', xmlOBJ.STR_PercentMin())
        if xmlOBJ.STR_PercentMax() is None:
            STRprocessOBJ.AddParameter('maxper', '100')
        else:
            STRprocessOBJ.AddParameter('maxper', xmlOBJ.STR_PercentMax())
        recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    elif strType == 'SigmaStretch':
        STRprocessOBJ = Process()
        STRprocessOBJ.newProcess('sigmastretch')
        STRprocessOBJ.AddParameter('from_', 'value')
        STRprocessOBJ.AddParameter('to', 'value')
        STRprocessOBJ.AddParameter('variance', xmlOBJ.STR_SigmaVariance())
        recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

# ************* Test for output bit type and add to recipe *************
    if xmlOBJ.getProcess() == 'POW':
        if xmlOBJ.getOutBit().upper() == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit(
        ).upper() == 'SIGNEDWORD':
            CAprocessOBJ = Process()
            CAprocessOBJ.newProcess('cubeatt-bit')
            CAprocessOBJ.AddParameter('from_', 'value')
            CAprocessOBJ.AddParameter('to', 'value')
            recipeOBJ.AddProcess(CAprocessOBJ.getProcess())
    elif xmlOBJ.getProcess() == 'MAP2':
        if xmlOBJ.getOutBit().upper() != 'INPUT':
            if xmlOBJ.getOutBit().upper(
            ) == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit().upper() == 'SIGNEDWORD':
                if str(label['IsisCube']['Core']['Pixels']
                       ['Type']).upper() != xmlOBJ.getOutBit().upper():
                    CAprocessOBJ = Process()
                    CAprocessOBJ.newProcess('cubeatt-bit')
                    CAprocessOBJ.AddParameter('from_', 'value')
                    CAprocessOBJ.AddParameter('to', 'value')
                    recipeOBJ.AddProcess(CAprocessOBJ.getProcess())

# **************** Add Grid(MAP2) *************
    if xmlOBJ.getGridInterval() is not None:
        GprocessOBJ = Process()
        GprocessOBJ.newProcess('grid')
        GprocessOBJ.AddParameter('from_', 'value')
        GprocessOBJ.AddParameter('to', 'value')
        GprocessOBJ.AddParameter('latinc', xmlOBJ.getGridInterval())
        GprocessOBJ.AddParameter('loninc', xmlOBJ.getGridInterval())
        GprocessOBJ.AddParameter('outline', 'yes')
        GprocessOBJ.AddParameter('boundary', 'yes')
        GprocessOBJ.AddParameter('linewidth', '3')
        recipeOBJ.AddProcess(GprocessOBJ.getProcess())

# ********OUTPUT FORMAT ***************
# ************* Test for GDAL and add to recipe *************************
    Oformat = xmlOBJ.getOutFormat()
    if Oformat == 'GeoTiff-BigTiff' or Oformat == 'GeoJPEG-2000' or Oformat == 'JPEG' or Oformat == 'PNG':
        if Oformat == 'GeoJPEG-2000':
            Oformat = 'JP2KAK'
        if Oformat == 'GeoTiff-BigTiff':
            Oformat = 'GTiff'
        GDALprocessOBJ = Process()
        #        GDALprocessOBJ.newProcess('/usgs/dev/contrib/bin/FWTools-linux-x86_64-3.0.3/bin_safe/gdal_translate')
        GDALprocessOBJ.newProcess('/usgs/apps/anaconda/bin/gdal_translate')
        if xmlOBJ.getOutBit() != 'input':
            GDALprocessOBJ.AddParameter(
                '-ot', GDALprocessOBJ.GDAL_OBit(xmlOBJ.getOutBit()))
        GDALprocessOBJ.AddParameter('-of', Oformat)

        if Oformat == 'GTiff' or Oformat == 'JP2KAK' or Oformat == 'JPEG':
            GDALprocessOBJ.AddParameter('-co',
                                        GDALprocessOBJ.GDAL_Creation(Oformat))

        recipeOBJ.AddProcess(GDALprocessOBJ.getProcess())
# **************** set up pds2isis and add to recipe
    elif Oformat == 'PDS':
        pdsProcessOBJ = Process()
        pdsProcessOBJ.newProcess('isis2pds')
        pdsProcessOBJ.AddParameter('from_', 'value')
        pdsProcessOBJ.AddParameter('to', 'value')
        if xmlOBJ.getOutBit() == 'unsignedbyte':
            pdsProcessOBJ.AddParameter('bittype', '8bit')
        elif xmlOBJ.getOutBit() == 'signedword':
            pdsProcessOBJ.AddParameter('bittype', 's16bit')

        recipeOBJ.AddProcess(pdsProcessOBJ.getProcess())

    for item in recipeOBJ.getProcesses():
        processOBJ = Process()
        processOBJ.ProcessFromRecipe(item, recipeOBJ.getRecipe())

        if item == 'cam2map':

            processOBJ.updateParameter('map', MAPfile)

            if xmlOBJ.getResolution() is None:
                processOBJ.updateParameter('pixres', 'CAMERA')
            else:
                processOBJ.updateParameter('pixres', 'MAP')

            if xmlOBJ.getRangeType() is None:
                processOBJ.updateParameter('defaultrange', 'MINIMIZE')
            elif xmlOBJ.getRangeType() == 'smart' or xmlOBJ.getRangeType(
            ) == 'fill':
                processOBJ.updateParameter('defaultrange', 'CAMERA')
                processOBJ.AddParameter('trim', 'YES')

        elif item == 'map2map':
            processOBJ.updateParameter('map', MAPfile)
            if xmlOBJ.getResolution() is None:
                processOBJ.updateParameter('pixres', 'FROM')
            else:
                processOBJ.updateParameter('pixres', 'MAP')

            if xmlOBJ.OutputGeometry() is not None:
                processOBJ.updateParameter('defaultrange', 'MAP')
                processOBJ.AddParameter('trim', 'YES')
            else:
                processOBJ.updateParameter('defaultrange', 'FROM')

        processJSON = processOBJ.Process2JSON()
        try:
            RQ_recipe.QueueAdd(processJSON)
            logger.info('Recipe Element Added to Redis: %s : Success', item)
        except Exception as e:
            logger.warn('Recipe Element NOT Added to Redis: %s', item)


# ** *************** HPC job stuff ***********************
    logger.info('HPC Cluster job Submission Starting')
    jobOBJ = HPCjob()
    jobOBJ.setJobName(Key + '_Service')
    jobOBJ.setStdOut('/usgs/cdev/PDS/output/' + Key + '_%A_%a.out')
    jobOBJ.setStdError('/usgs/cdev/PDS/output/' + Key + '_%A_%a.err')
    jobOBJ.setWallClock('24:00:00')
    #    jobOBJ.setMemory('8192')
    #    jobOBJ.setMemory('16384')
    jobOBJ.setMemory('24576')
    jobOBJ.setPartition('pds')
    JAsize = RQ_file.QueueSize()
    jobOBJ.setJobArray(JAsize)
    logger.info('Job Array Size : %s', str(JAsize))

    jobOBJ.addPath('/usgs/apps/anaconda/bin')

    if xmlOBJ.getProcess() == 'POW':
        cmd = '/usgs/cdev/PDS/bin/POWprocess.py ' + Key
    elif xmlOBJ.getProcess() == 'MAP2':
        cmd = '/usgs/cdev/PDS/bin/MAPprocess.py ' + Key

    logger.info('HPC Command: %s', cmd)
    jobOBJ.setCommand(cmd)

    SBfile = directory + '/' + Key + '.sbatch'
    jobOBJ.MakeJobFile(SBfile)

    try:
        sb = open(SBfile)
        sb.close
        logger.info('SBATCH File Creation: Success')
    except IOError as e:
        logger.error('SBATCH File %s Not Found', SBfile)

    try:
        jobOBJ.Run()
        logger.info('Job Submission to HPC: Success')
        DBQO.setJobsStarted(Key)
    except IOError as e:
        logger.error('Jobs NOT Submitted to HPC')
def main():
    args = Args()
    args.parse_args()
    key = args.key
    namespace = args.namespace

    if namespace is None:
        namespace = default_namespace

    # Set up logging
    logger = logging.getLogger(key)
    logger.setLevel(logging.INFO)

    logFileHandle = logging.FileHandler(pds_log + 'Service.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    # Connect to database and access 'jobs' table
    DBQO = PDS_DBquery('JOBS')
    if key is None:
        # If no key is specified, grab the first key
        key = DBQO.jobKey()
    try:
        # Set the 'queued' column to current time i.e. prep for processing
        DBQO.setJobsQueued(key)
    except KeyError as e:
        logger.error('%s', e)
        exit(1)

    logger.info('Starting Process')

    xmlOBJ = jobXML(DBQO.jobXML4Key(key))

    # Make directory if it doesn't exist
    directory = scratch + key
    if not os.path.exists(directory):
        os.makedirs(directory)

    logger.info('Working Area: %s', directory)

    # Set up Redis Hash for ground range
    RedisH = RedisHash(key + '_info')
    RedisH.RemoveAll()
    RedisErrorH = RedisHash(key + '_error')
    RedisErrorH.RemoveAll()
    RedisH_DICT = {}
    RedisH_DICT['service'] = xmlOBJ.getProcess()
    RedisH_DICT['fileformat'] = xmlOBJ.getOutFormat()
    RedisH_DICT['outbit'] = xmlOBJ.getOutBit()
    if xmlOBJ.getRangeType() is not None:
        RedisH_DICT['grtype'] = xmlOBJ.getRangeType()
        RedisH_DICT['minlat'] = xmlOBJ.getMinLat()
        RedisH_DICT['maxlat'] = xmlOBJ.getMaxLat()
        RedisH_DICT['minlon'] = xmlOBJ.getMinLon()
        RedisH_DICT['maxlon'] = xmlOBJ.getMaxLon()

    if RedisH.IsInHash('service'):
        pass
    else:
        RedisH.AddHash(RedisH_DICT)
    if RedisH.IsInHash('service'):
        logger.info('Redis info Hash: Success')
    else:
        logger.error('Redis info Hash Not Found')

    # End ground range

    RQ_recipe = RedisQueue(key + '_recipe', namespace)
    RQ_recipe.RemoveAll()
    RQ_file = RedisQueue(key + '_FileQueue', namespace)
    RQ_file.RemoveAll()
    RQ_WorkQueue = RedisQueue(key + '_WorkQueue', namespace)
    RQ_WorkQueue.RemoveAll()
    RQ_loggy = RedisQueue(key + '_loggy', namespace)
    RQ_loggy.RemoveAll()
    RQ_zip = RedisQueue(key + '_ZIP', namespace)
    RQ_zip.RemoveAll()

    if xmlOBJ.getProcess() == 'POW':
        fileList = xmlOBJ.getFileListWB()
    elif xmlOBJ.getProcess() == 'MAP2':
        fileList = xmlOBJ.getMFileListWB()

    for List_file in fileList:

        # Input and output file naming and path stuff
        if xmlOBJ.getProcess() == 'POW':
            if xmlOBJ.getInst() == 'THEMIS_IR':
                Input_file = List_file.replace('odtie1_', 'odtir1_')
                Input_file = Input_file.replace('xxedr', 'xxrdr')
                Input_file = Input_file.replace('EDR.QUB', 'RDR.QUB')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/', archive_base)
            elif xmlOBJ.getInst() == 'ISSNA':
                Input_file = List_file.replace('.IMG', '.LBL')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/', archive_base)
            elif xmlOBJ.getInst() == 'ISSWA':
                Input_file = List_file.replace('.IMG', '.LBL')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/', archive_base)
            elif xmlOBJ.getInst() == 'SOLID STATE IMAGING SYSTEM':
                Input_file = List_file.replace('.img', '.lbl')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/', archive_base)
            else:
                Input_file = List_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/', archive_base)

        elif xmlOBJ.getProcess() == 'MAP2':
            Input_file = List_file.replace('file://pds_san', '/pds_san')

            if '+' in Input_file:
                tempsplit = Input_file.split('+')
                tempFile = tempsplit[0]
            else:
                tempFile = Input_file
            label = pvl.load(tempFile)
            # Output final file naming
            Tbasename = os.path.splitext(os.path.basename(tempFile))[0]
            splitBase = Tbasename.split('_')

            labP = xmlOBJ.getProjection()
            if labP == 'INPUT':
                lab_proj = label['IsisCube']['Mapping']['ProjectionName'][0:4]
            else:
                lab_proj = labP[0:4]

            if xmlOBJ.getClat() is None or xmlOBJ.getClon() is None:
                basefinal = splitBase[0] + splitBase[1] + \
                    splitBase[2] + '_MAP2_' + lab_proj.upper()
            else:
                lab_clat = float(xmlOBJ.getClat())
                if lab_clat >= 0:
                    labH = 'N'
                elif lab_clat < 0:
                    labH = 'S'
                lab_clon = float(xmlOBJ.getClon())

                basefinal = splitBase[0] + splitBase[1] + splitBase[
                    2] + '_MAP2_' + str(lab_clat) + labH + str(
                        lab_clon) + '_' + lab_proj.upper()
            RedisH.MAPname(basefinal)

        try:
            RQ_file.QueueAdd(Input_file)
            logger.info('File %s Added to Redis Queue', Input_file)
        except Exception as e:
            logger.warn('File %s NOT Added to Redis Queue', Input_file)
            print('Redis Queue Error', e)
    RedisH.FileCount(RQ_file.QueueSize())
    logger.info('Count of Files Queue: %s', str(RQ_file.QueueSize()))

    # Map Template Stuff
    logger.info('Making Map File')
    mapOBJ = MakeMap()

    if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.getProjection() == 'INPUT':
        proj = label['IsisCube']['Mapping']['ProjectionName']
        mapOBJ.Projection(proj)
    else:
        mapOBJ.Projection(xmlOBJ.getProjection())

    if xmlOBJ.getClon() is not None:
        mapOBJ.CLon(float(xmlOBJ.getClon()))
    if xmlOBJ.getClat() is not None:
        mapOBJ.CLat(float(xmlOBJ.getClat()))
    if xmlOBJ.getFirstParallel() is not None:
        mapOBJ.FirstParallel(float(xmlOBJ.getFirstParallel()))
    if xmlOBJ.getSecondParallel() is not None:
        mapOBJ.SecondParallel(float(xmlOBJ.getSecondParallel()))
    if xmlOBJ.getResolution() is not None:
        mapOBJ.PixelRes(float(xmlOBJ.getResolution()))
    if xmlOBJ.getTargetName() is not None:
        mapOBJ.Target(xmlOBJ.getTargetName())
    if xmlOBJ.getERadius() is not None:
        mapOBJ.ERadius(float(xmlOBJ.getERadius()))
    if xmlOBJ.getPRadius() is not None:
        mapOBJ.PRadius(float(xmlOBJ.getPRadius()))
    if xmlOBJ.getLatType() is not None:
        mapOBJ.LatType(xmlOBJ.getLatType())
    if xmlOBJ.getLonDirection() is not None:
        mapOBJ.LonDirection(xmlOBJ.getLonDirection())
    if xmlOBJ.getLonDomain() is not None:
        mapOBJ.LonDomain(int(xmlOBJ.getLonDomain()))

    if xmlOBJ.getProcess() == 'MAP2':
        if xmlOBJ.getMinLat() is not None:
            mapOBJ.MinLat(float(xmlOBJ.getMinLat()))
        if xmlOBJ.getMaxLat() is not None:
            mapOBJ.MaxLat(float(xmlOBJ.getMaxLat()))
        if xmlOBJ.getMinLon() is not None:
            mapOBJ.MinLon(float(xmlOBJ.getMinLon()))
        if xmlOBJ.getMaxLon() is not None:
            mapOBJ.MaxLon(float(xmlOBJ.getMaxLon()))

    mapOBJ.Map2pvl()

    MAPfile = directory + "/" + key + '.map'
    mapOBJ.Map2File(MAPfile)

    try:
        f = open(MAPfile)
        f.close
        logger.info('Map File Creation: Success')
    except IOError as e:
        logger.error('Map File %s Not Found', MAPfile)

    # ** End Map Template Stuff **

    logger.info('Building Recipe')
    recipeOBJ = Recipe()
    if xmlOBJ.getProcess() == 'POW':
        recipeOBJ.AddJsonFile(recipe_base + xmlOBJ.getCleanName() + '.json',
                              "pow")
    elif xmlOBJ.getProcess() == 'MAP2':
        recipeOBJ.AddJsonFile(recipe_base + "map2_process.json", "map")
    # Test for stretch and add to recipe
    # if MAP2 and 8 or 16 bit run stretch to set range

    if xmlOBJ.getOutBit() == 'input':
        testBitType = str(label['IsisCube']['Core']['Pixels']['Type']).upper()
    else:
        testBitType = xmlOBJ.getOutBit().upper()

    if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.STR_Type() is None:
        if str(label['IsisCube']['Core']['Pixels']['Type']).upper(
        ) != xmlOBJ.getOutBit().upper() and str(
                label['IsisCube']['Core']['Pixels']['Type']).upper() != 'REAL':
            if str(label['IsisCube']['Core']['Pixels']
                   ['Type']).upper() == 'SIGNEDWORD':
                strpairs = '0:-32765 0:-32765 100:32765 100:32765'
            elif str(label['IsisCube']['Core']['Pixels']
                     ['Type']).upper() == 'UNSIGNEDBYTE':
                strpairs = '0:1 0:1 100:254 100:254'

            STRprocessOBJ = Process()
            STRprocessOBJ.newProcess('stretch')
            STRprocessOBJ.AddParameter('from_', 'value')
            STRprocessOBJ.AddParameter('to', 'value')
            STRprocessOBJ.AddParameter('usepercentages', 'yes')
            STRprocessOBJ.AddParameter('pairs', strpairs)
            recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    strType = xmlOBJ.STR_Type()
    if strType == 'StretchPercent' and xmlOBJ.STR_PercentMin(
    ) is not None and xmlOBJ.STR_PercentMax(
    ) is not None and testBitType != 'REAL':
        if float(xmlOBJ.STR_PercentMin()) != 0 and float(
                xmlOBJ.STR_PercentMax()) != 100:
            if testBitType == 'UNSIGNEDBYTE':
                strpairs = '0:1 ' + xmlOBJ.STR_PercentMin() + ':1 ' + \
                    xmlOBJ.STR_PercentMax() + ':254 100:254'
            elif testBitType == 'SIGNEDWORD':
                strpairs = '0:-32765 ' + xmlOBJ.STR_PercentMin() + ':-32765 ' + \
                    xmlOBJ.STR_PercentMax() + ':32765 100:32765'

            STRprocessOBJ = Process()
            STRprocessOBJ.newProcess('stretch')
            STRprocessOBJ.AddParameter('from_', 'value')
            STRprocessOBJ.AddParameter('to', 'value')
            STRprocessOBJ.AddParameter('usepercentages', 'yes')
            STRprocessOBJ.AddParameter('pairs', strpairs)
            recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    elif strType == 'GaussStretch':
        STRprocessOBJ = Process()
        STRprocessOBJ.newProcess('gaussstretch')
        STRprocessOBJ.AddParameter('from_', 'value')
        STRprocessOBJ.AddParameter('to', 'value')
        STRprocessOBJ.AddParameter('gsigma', xmlOBJ.STR_GaussSigma())
        recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    elif strType == 'HistogramEqualization':
        STRprocessOBJ = Process()
        STRprocessOBJ.newProcess('histeq')
        STRprocessOBJ.AddParameter('from_', 'value')
        STRprocessOBJ.AddParameter('to', 'value')
        if xmlOBJ.STR_PercentMin() is None:
            STRprocessOBJ.AddParameter('minper', '0')
        else:
            STRprocessOBJ.AddParameter('minper', xmlOBJ.STR_PercentMin())
        if xmlOBJ.STR_PercentMax() is None:
            STRprocessOBJ.AddParameter('maxper', '100')
        else:
            STRprocessOBJ.AddParameter('maxper', xmlOBJ.STR_PercentMax())
        recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    elif strType == 'SigmaStretch':
        STRprocessOBJ = Process()
        STRprocessOBJ.newProcess('sigmastretch')
        STRprocessOBJ.AddParameter('from_', 'value')
        STRprocessOBJ.AddParameter('to', 'value')
        STRprocessOBJ.AddParameter('variance', xmlOBJ.STR_SigmaVariance())
        recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    # Test for output bit type and add to recipe
    if xmlOBJ.getProcess() == 'POW':
        if xmlOBJ.getOutBit().upper() == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit(
        ).upper() == 'SIGNEDWORD':
            CAprocessOBJ = Process()
            CAprocessOBJ.newProcess('cubeatt-bit')
            CAprocessOBJ.AddParameter('from_', 'value')
            CAprocessOBJ.AddParameter('to', 'value')
            recipeOBJ.AddProcess(CAprocessOBJ.getProcess())
    elif xmlOBJ.getProcess() == 'MAP2':
        if xmlOBJ.getOutBit().upper() != 'INPUT':
            if xmlOBJ.getOutBit().upper(
            ) == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit().upper() == 'SIGNEDWORD':
                if str(label['IsisCube']['Core']['Pixels']
                       ['Type']).upper() != xmlOBJ.getOutBit().upper():
                    CAprocessOBJ = Process()
                    CAprocessOBJ.newProcess('cubeatt-bit')
                    CAprocessOBJ.AddParameter('from_', 'value')
                    CAprocessOBJ.AddParameter('to', 'value')
                    recipeOBJ.AddProcess(CAprocessOBJ.getProcess())

    # Add Grid(MAP2)
    if xmlOBJ.getGridInterval() is not None:
        GprocessOBJ = Process()
        GprocessOBJ.newProcess('grid')
        GprocessOBJ.AddParameter('from_', 'value')
        GprocessOBJ.AddParameter('to', 'value')
        GprocessOBJ.AddParameter('latinc', xmlOBJ.getGridInterval())
        GprocessOBJ.AddParameter('loninc', xmlOBJ.getGridInterval())
        GprocessOBJ.AddParameter('outline', 'yes')
        GprocessOBJ.AddParameter('boundary', 'yes')
        GprocessOBJ.AddParameter('linewidth', '3')
        recipeOBJ.AddProcess(GprocessOBJ.getProcess())

    # OUTPUT FORMAT
    # Test for GDAL and add to recipe
    Oformat = xmlOBJ.getOutFormat()
    if Oformat == 'GeoTiff-BigTiff' or Oformat == 'GeoJPEG-2000' or Oformat == 'JPEG' or Oformat == 'PNG':
        if Oformat == 'GeoJPEG-2000':
            Oformat = 'JP2KAK'
        if Oformat == 'GeoTiff-BigTiff':
            Oformat = 'GTiff'
        GDALprocessOBJ = Process()
        # @TODO remove hard-coded path in favor of using whichever utilities are found within the conda environment --
        #  we need more information here to ensure that whichever utilities are found are capable of supporting GeoJPEG-2000.
        GDALprocessOBJ.newProcess('/usgs/apps/anaconda/bin/gdal_translate')
        if xmlOBJ.getOutBit() != 'input':
            GDALprocessOBJ.AddParameter(
                '-ot', GDALprocessOBJ.GDAL_OBit(xmlOBJ.getOutBit()))
        GDALprocessOBJ.AddParameter('-of', Oformat)

        if Oformat == 'GTiff' or Oformat == 'JP2KAK' or Oformat == 'JPEG':
            GDALprocessOBJ.AddParameter('-co',
                                        GDALprocessOBJ.GDAL_Creation(Oformat))

        recipeOBJ.AddProcess(GDALprocessOBJ.getProcess())
    # set up pds2isis and add to recipe
    elif Oformat == 'PDS':
        pdsProcessOBJ = Process()
        pdsProcessOBJ.newProcess('isis2pds')
        pdsProcessOBJ.AddParameter('from_', 'value')
        pdsProcessOBJ.AddParameter('to', 'value')
        if xmlOBJ.getOutBit() == 'unsignedbyte':
            pdsProcessOBJ.AddParameter('bittype', '8bit')
        elif xmlOBJ.getOutBit() == 'signedword':
            pdsProcessOBJ.AddParameter('bittype', 's16bit')

        recipeOBJ.AddProcess(pdsProcessOBJ.getProcess())

    for item in recipeOBJ.getProcesses():
        processOBJ = Process()
        processOBJ.ProcessFromRecipe(item, recipeOBJ.getRecipe())

        if item == 'cam2map':

            processOBJ.updateParameter('map', MAPfile)

            if xmlOBJ.getResolution() is None:
                processOBJ.updateParameter('pixres', 'CAMERA')
            else:
                processOBJ.updateParameter('pixres', 'MAP')

            if xmlOBJ.getRangeType() is None:
                processOBJ.updateParameter('defaultrange', 'MINIMIZE')
            elif xmlOBJ.getRangeType() == 'smart' or xmlOBJ.getRangeType(
            ) == 'fill':
                processOBJ.updateParameter('defaultrange', 'CAMERA')
                processOBJ.AddParameter('trim', 'YES')

        elif item == 'map2map':
            processOBJ.updateParameter('map', MAPfile)
            if xmlOBJ.getResolution() is None:
                processOBJ.updateParameter('pixres', 'FROM')
            else:
                processOBJ.updateParameter('pixres', 'MAP')

            if xmlOBJ.OutputGeometry() is not None:
                processOBJ.updateParameter('defaultrange', 'MAP')
                processOBJ.AddParameter('trim', 'YES')
            else:
                processOBJ.updateParameter('defaultrange', 'FROM')

        processJSON = processOBJ.Process2JSON()
        try:
            RQ_recipe.QueueAdd(processJSON)
            logger.info('Recipe Element Added to Redis: %s : Success', item)
        except Exception as e:
            logger.warn('Recipe Element NOT Added to Redis: %s', item)

    # HPC job stuff
    logger.info('HPC Cluster job Submission Starting')
    jobOBJ = HPCjob()
    jobOBJ.setJobName(key + '_Service')
    jobOBJ.setStdOut(slurm_log + key + '_%A_%a.out')
    jobOBJ.setStdError(slurm_log + key + '_%A_%a.err')
    jobOBJ.setWallClock('24:00:00')
    jobOBJ.setMemory('24576')
    jobOBJ.setPartition('pds')
    JAsize = RQ_file.QueueSize()
    jobOBJ.setJobArray(JAsize)
    logger.info('Job Array Size : %s', str(JAsize))

    # @TODO replace with source activate <env>
    #jobOBJ.addPath('/usgs/apps/anaconda/bin')

    # Whether or not we use the default namespace, this guarantees that the POW/MAP queues will match the namespace
    #  used in the job manager.
    if xmlOBJ.getProcess() == 'POW':
        cmd = cmd_dir + "POWprocess.py -k {} -n {}".format(key, namespace)
    elif xmlOBJ.getProcess() == 'MAP2':
        cmd = cmd_dir + "MAPprocess.py -k {} -n {}".format(key, namespace)

    logger.info('HPC Command: %s', cmd)
    jobOBJ.setCommand(cmd)

    SBfile = directory + '/' + key + '.sbatch'
    jobOBJ.MakeJobFile(SBfile)

    try:
        sb = open(SBfile)
        sb.close
        logger.info('SBATCH File Creation: Success')
    except IOError as e:
        logger.error('SBATCH File %s Not Found', SBfile)

    try:
        jobOBJ.Run()
        logger.info('Job Submission to HPC: Success')
        DBQO.setJobsStarted(key)
    except IOError as e:
        logger.error('Jobs NOT Submitted to HPC')