Esempio n. 1
0
def main():
    PDS_info = json.load(open(pds_info, 'r'))
    reddis_queue = RedisQueue('DI_ReadyQueue')

    try:
        session, _ = db_connect(pds_db)
    except Exception as e:
        print(e)
        return 1

    for target in PDS_info:
        archive_id = PDS_info[target]['archiveid']
        td = (datetime.datetime.now(pytz.utc) -
              datetime.timedelta(days=30)).strftime("%Y-%m-%d %H:%M:%S")
        testing_date = datetime.datetime.strptime(str(td), "%Y-%m-%d %H:%M:%S")
        expired = archive_expired(session, archive_id, testing_date)
        # If any files within the archive are expired, send them to the queue
        if expired.count():
            # @TODO get rid of print statements or enable with --verbose?
            for f in expired:
                reddis_queue.QueueAdd((f.filename, target))
            print('Archive {} DI Ready: {} Files'.format(
                target, str(expired.count())))
        else:
            print('Archive {} DI Current'.format(target))
    return 0
Esempio n. 2
0
def main():
    RQ = RedisQueue('LinkQueue')
    while int(RQ.QueueSize()) > 0:
        # Grab a tuple of values from the redis queue
        item = literal_eval(RQ.QueueGet().decode('utf-8'))
        # Split tuple into two values
        inputfile = item[0]
        archive = item[1]

        json_file_path = recipe_base + archive + '.json'
        try:
            with open(json_file_path, 'r') as f:
                json_dict = json.load(f)
        except (ValueError):
            continue
        link_src_path = json_dict['src']

        voldesc = load_pvl(inputfile)
        dataset_id = voldesc['VOLUME']['DATA_SET_ID']
        volume_id = voldesc['VOLUME']['VOLUME_ID']
        # if more than one dataset id exists, link each of them
        if isinstance(dataset_id, (list, tuple, set)):
            [link(link_src_path, link_dest, volume_id, x) for x in dataset_id]
        else:
            # Not container type
            link(link_src_path, link_dest, volume_id, dataset_id)
def main():

    #    pdb.set_trace()

    args = Args()
    args.parse_args()

    RQ = RedisQueue('ChecksumUpdate_Queue')

    # @TODO Remove/replace "archiveID"
    archiveID = {
        'cassiniISS': 'cassini_iss_edr',
        'mroCTX': 16,
        'mroHIRISE_EDR': '124',
        'LROLRC_EDR': 74
    }

    # ********* Set up logging *************
    logger = logging.getLogger('ChecksumUpdate_Queueing.' + args.archive)
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'DI.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info('Starting %s Checksum update Queueing', args.archive)
    if args.volume:
        logger.info('Queueing %s Volume', args.volume)

    try:
        # Throws away engine information
        session, _ = db_connect(pds_db)
        logger.info('DataBase Connecton: Success')
    except:
        logger.error('DataBase Connection: Error')
        return 1

    if args.volume:
        volstr = '%' + args.volume + '%'
        QueryOBJ = session.query(Files).filter(
            Files.archiveid == archiveID[args.archive],
            Files.filename.like(volstr))
    else:
        QueryOBJ = session.query(Files).filter(
            Files.archiveid == archiveID[args.archive])
    addcount = 0
    for element in QueryOBJ:
        try:
            RQ.QueueAdd(element.filename)
            addcount = addcount + 1
        except:
            logger.error('File %s Not Added to DI_ReadyQueue',
                         element.filename)

    logger.info('Files Added to Queue %s', addcount)

    logger.info('DI Queueing Complete')
Esempio n. 4
0
def main():

    #    pdb.set_trace()
    args = Args()
    args.parse_args()

    logger = logging.getLogger('UPC_Queueing.' + args.archive)
    logger.setLevel(logging.INFO)
    # logFileHandle = logging.FileHandler('/usgs/cdev/PDS/logs/Process.log')
    logFileHandle = logging.FileHandler(pds_log + 'Process.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info('Starting Process')

    PDSinfoDICT = json.load(open(pds_info, 'r'))
    try:
        archiveID = PDSinfoDICT[args.archive]['archiveid']
    except KeyError:
        print("\nArchive '{}' not found in {}\n".format(
            args.archive, pds_info))
        print("The following archives are available:")
        for k in PDSinfoDICT.keys():
            print("\t{}".format(k))
        exit()

    RQ = RedisQueue('UPC_ReadyQueue')

    try:
        session, _ = db_connect(pds_db)
        print('Database Connection Success')
    except Exception as e:
        print(e)
        print('Database Connection Error')

    if args.volume:
        volstr = '%' + args.volume + '%'
        qOBJ = session.query(Files).filter(Files.archiveid == archiveID,
                                           Files.filename.like(volstr),
                                           Files.upc_required == 't')
    else:
        qOBJ = session.query(Files).filter(Files.archiveid == archiveID,
                                           Files.upc_required == 't')
    if qOBJ:
        addcount = 0
        for element in qOBJ:
            fname = PDSinfoDICT[args.archive]['path'] + element.filename
            fid = element.fileid
            RQ.QueueAdd((fname, fid, args.archive))
            addcount = addcount + 1

        logger.info('Files Added to UPC Queue: %s', addcount)

    print("Done")
Esempio n. 5
0
def main():
    args = Args()
    args.parse_args()

    logger = logging.getLogger('FinalJobber')
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'Service.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    #***************Look at Final queue for work************
    RQ_final = RedisQueue('FinalQueue')
    logger.info("Reddis Queue: %s", RQ_final.id_name)

    if int(RQ_final.QueueSize()) == 0:
        logging.info('Nothing Found in Final Queue')
    else:
        FKey = RQ_final.QueueGet()
        logger.info('Found %s in Final Queue', str(FKey))

        # ** *************** HPC job stuff ***********************

        logger.info('HPC Cluster job Submission Starting')
        jobOBJ = HPCjob()
        jobOBJ.setJobName(FKey + '_Final')
        jobOBJ.setStdOut(slurm_log + FKey + '_%A_%a.out')
        jobOBJ.setStdError(slurm_log + FKey + '_%A_%a.err')
        jobOBJ.setWallClock('24:00:00')
        jobOBJ.setMemory('8192')
        jobOBJ.setPartition('pds')

        cmd = cmd_dir + 'ServiceFinal.py ' + FKey
        jobOBJ.setCommand(cmd)
        logger.info('HPC Command: %s', cmd)

        #SBfile = '/scratch/pds_services/' + FKey + '/' + FKey + '_final.sbatch'
        SBfile = scratch + FKey + '/' + FKey + '_final.sbatch'
        jobOBJ.MakeJobFile(SBfile)

        try:
            sb = open(SBfile)
            sb.close()
            logger.info('SBATCH File Creation: Success')
        except IOError as e:
            logger.error('SBATCH File %s Not Found', SBfile)

        try:
            jobOBJ.Run()
            logger.info('Job Submission to HPC: Success')
        except IOError as e:
            logger.error('Jobs NOT Submitted to HPC\n%s', e)
Esempio n. 6
0
def main():
    args = Args()
    args.parse_args()

    PDS_info = json.load(open(pds_info, 'r'))
    reddis_queue = RedisQueue('UPC_ReadyQueue')
    logger = logging.getLogger('UPC_Queueing')
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'Process.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info("UPC Queue: %s", reddis_queue.id_name)

    try:
        session, _ = db_connect(pds_db)
    except Exception as e:
        logger.error("%s", e)
        return 1

    # For each archive in the db, test if there are files that are ready to
    #  process
    for archive_id in session.query(Files.archiveid).distinct():
        result = session.query(Files).filter(Files.archiveid == archive_id,
                                             Files.upc_required == 't')

        # Get filepath from archive id
        archive_name = session.query(Archives.archive_name).filter(
            Archives.archiveid == archive_id).first()

        # No archive name = no path.  Skip these values.
        if (archive_name is None):
            logger.warn("No archive name found for archive id: %s", archive_id)
            continue
        try:
            # Since results are returned as lists, we have to access the 0th
            #  element to pull out the string archive name.
            fpath = PDS_info[archive_name[0]]['path']
        except KeyError:
            logger.warn("Unable to locate file path for archive id %s",
                        archive_id)
            continue

        # Add each file in the archive to the redis queue.
        for element in result:
            fname = fpath + element.filename
            fid = element.fileid
            reddis_queue.QueueAdd((fname, fid, archive_name[0]))

        logger.info("Added %s files from %s", result.count(), archive_name)
    return 0
def main():

    #    pdb.set_trace()
    args = Args()
    args.parse_args()

    logger = logging.getLogger('Browse_Queueing.' + args.archive)
    logger.setLevel(logging.INFO)
    logFileHandle = logging.FileHandler(pds_log + 'Process.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info('Starting Process')

    PDSinfoDICT = json.load(open(pds_info, 'r'))
    archiveID = PDSinfoDICT[args.archive]['archiveid']

    RQ = RedisQueue('Browse_ReadyQueue')

    try:
        session, _ = db_connect(pds_db)
        logger.info('Database Connection Success')
    except:
        logger.error('Database Connection Error')

    if args.volume:
        volstr = '%' + args.volume + '%'
        qOBJ = session.query(Files).filter(Files.archiveid == archiveID,
                                           Files.filename.like(volstr),
                                           Files.upc_required == 't')
    else:
        qOBJ = session.query(Files).filter(Files.archiveid == archiveID,
                                           Files.upc_required == 't')
    if qOBJ:
        addcount = 0
        for element in qOBJ:
            fname = PDSinfoDICT[args.archive]['path'] + element.filename
            fid = element.fileid
            RQ.QueueAdd((fname, fid, args.archive))
            addcount = addcount + 1

        logger.info('Files Added to UPC Queue: %s', addcount)
Esempio n. 8
0
def main():
    PDS_info = json.load(open(pds_info, 'r'))
    reddis_queue = RedisQueue('UPC_ReadyQueue')

    try:
        # Safe to use prd database here because there are no writes/edits.
        session, _ = db_connect(pds_db)

    # @TODO Catch exceptions by type.  Bad practice to 'except Exception,' but
    #   I don't know what exception could happen here.
    except Exception as e:
        print(e)
        return 1

    # For each archive in the db, test if there are files that are ready to
    #  process
    for archive_id in session.query(Files.archiveid).distinct():
        result = session.query(Files).filter(Files.archiveid == archive_id,
                                             Files.upc_required == 't')

        # Get filepath from archive id
        archive_name = session.query(Archives.archive_name).filter(
            Archives.archiveid == archive_id).first()

        # No archive name = no path.  Skip these values.
        if (archive_name is None):
            # @TODO log an error
            continue

        try:
            # Since results are returned as lists, we have to access the 0th
            #  element to pull out the string archive name.
            fpath = PDS_info[archive_name[0]]['path']
        except KeyError:
            continue

        # Add each file in the archive to the redis queue.
        for element in result:
            fname = fpath + element.filename
            fid = element.fileid
            reddis_queue.QueueAdd((fname, fid, archive_name[0]))
    return 0
Esempio n. 9
0
def main():
    args = Args()
    args.parse_args()

    logger = logging.getLogger('DI_Queueing')
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'DI.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    PDS_info = json.load(open(pds_info, 'r'))
    reddis_queue = RedisQueue('DI_ReadyQueue')

    logger.info("DI Queue: %s", reddis_queue.id_name)

    try:
        session, _ = db_connect(pds_db)
    except Exception as e:
        logger.error("%s", e)
        return 1

    for target in PDS_info:
        archive_id = PDS_info[target]['archiveid']
        td = (datetime.datetime.now(pytz.utc) -
              datetime.timedelta(days=30)).strftime("%Y-%m-%d %H:%M:%S")
        testing_date = datetime.datetime.strptime(str(td), "%Y-%m-%d %H:%M:%S")
        expired = archive_expired(session, archive_id, testing_date)
        # If any files within the archive are expired, send them to the queue
        if expired.count():
            for f in expired:
                reddis_queue.QueueAdd((f.filename, target))
            logger.info('Archive %s DI Ready: %s Files', target,
                        str(expired.count()))
        else:
            logger.info('Archive %s DI Current', target)
    return 0
Esempio n. 10
0
def main():
    RQ = RedisQueue('LinkQueue')
    args = Args()
    args.parse_args()

    logger = logging.getLogger('LINK_Process')
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'Link.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    while int(RQ.QueueSize()) > 0:
        # Grab a tuple of values from the redis queue
        item = literal_eval(RQ.QueueGet().decode('utf-8'))
        # Split tuple into two values
        inputfile = item[0]
        archive = item[1]

        json_file_path = recipe_base + archive + '.json'
        try:
            with open(json_file_path, 'r') as f:
                json_dict = json.load(f)
        except ValueError as e:
            logging.warn(e)
            continue
        link_src_path = json_dict['src']

        voldesc = load_pvl(inputfile)
        dataset_id = voldesc['VOLUME']['DATA_SET_ID']
        volume_id = voldesc['VOLUME']['VOLUME_ID']
        # if more than one dataset id exists, link each of them
        if isinstance(dataset_id, (list, tuple, set)):
            [link(link_src_path, link_dest, volume_id, x) for x in dataset_id]
        else:
            # Not container type
            link(link_src_path, link_dest, volume_id, dataset_id)
Esempio n. 11
0
def main():

    #    pdb.set_trace()

    DBQO = PDS_DBquery('JOBS')
    Key = DBQO.jobKey()
    #    Key = '2d7379497fed4c092046b2a06f5471a5'
    DBQO.setJobsQueued(Key)

    #*************** Setup logging ******************
    logger = logging.getLogger(Key)
    logger.setLevel(logging.INFO)

    logFileHandle = logging.FileHandler('/usgs/cdev/PDS/logs/Service.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info('Starting Process')

    xmlOBJ = jobXML(DBQO.jobXML4Key(Key))

    # ********** Test if Directory exists and make it if not *******

    directory = '/scratch/pds_services/' + Key
    if not os.path.exists(directory):
        os.makedirs(directory)

    logger.info('Working Area: %s', directory)

    # ******************** Setup Redis Hash for ground range *********

    RedisH = RedisHash(Key + '_info')
    RedisH.RemoveAll()
    RedisErrorH = RedisHash(Key + '_error')
    RedisErrorH.RemoveAll()
    RedisH_DICT = {}
    RedisH_DICT['service'] = xmlOBJ.getProcess()
    RedisH_DICT['fileformat'] = xmlOBJ.getOutFormat()
    RedisH_DICT['outbit'] = xmlOBJ.getOutBit()
    if xmlOBJ.getRangeType() is not None:
        RedisH_DICT['grtype'] = xmlOBJ.getRangeType()
        RedisH_DICT['minlat'] = xmlOBJ.getMinLat()
        RedisH_DICT['maxlat'] = xmlOBJ.getMaxLat()
        RedisH_DICT['minlon'] = xmlOBJ.getMinLon()
        RedisH_DICT['maxlon'] = xmlOBJ.getMaxLon()

    if RedisH.IsInHash('service'):
        pass
    else:
        RedisH.AddHash(RedisH_DICT)
    if RedisH.IsInHash('service'):
        logger.info('Redis info Hash: Success')
    else:
        logger.error('Redis info Hash Not Found')

# ***end ground range **

    RQ_recipe = RedisQueue(Key + '_recipe')
    RQ_recipe.RemoveAll()
    RQ_file = RedisQueue(Key + '_FileQueue')
    RQ_file.RemoveAll()
    RQ_WorkQueue = RedisQueue(Key + '_WorkQueue')
    RQ_WorkQueue.RemoveAll()
    RQ_loggy = RedisQueue(Key + '_loggy')
    RQ_loggy.RemoveAll()
    RQ_zip = RedisQueue(Key + '_ZIP')
    RQ_zip.RemoveAll()

    if xmlOBJ.getProcess() == 'POW':
        fileList = xmlOBJ.getFileListWB()
    elif xmlOBJ.getProcess() == 'MAP2':
        fileList = xmlOBJ.getMFileListWB()

    for List_file in fileList:

        ######### Input and output file naming and path stuff ############

        if xmlOBJ.getProcess() == 'POW':
            if xmlOBJ.getInst() == 'THEMIS_IR':
                Input_file = List_file.replace('odtie1_', 'odtir1_')
                Input_file = Input_file.replace('xxedr', 'xxrdr')
                Input_file = Input_file.replace('EDR.QUB', 'RDR.QUB')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/',
                    '/pds_san/PDS_Archive/')
            elif xmlOBJ.getInst() == 'ISSNA':
                Input_file = List_file.replace('.IMG', '.LBL')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/',
                    '/pds_san/PDS_Archive/')
            elif xmlOBJ.getInst() == 'ISSWA':
                Input_file = List_file.replace('.IMG', '.LBL')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/',
                    '/pds_san/PDS_Archive/')
            elif xmlOBJ.getInst() == 'SOLID STATE IMAGING SYSTEM':
                Input_file = List_file.replace('.img', '.lbl')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/',
                    '/pds_san/PDS_Archive/')
            else:
                Input_file = List_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/',
                    '/pds_san/PDS_Archive/')

        elif xmlOBJ.getProcess() == 'MAP2':
            Input_file = List_file.replace('file://pds_san', '/pds_san')

            if '+' in Input_file:
                tempsplit = Input_file.split('+')
                tempFile = tempsplit[0]
            else:
                tempFile = Input_file
            label = pvl.load(tempFile)
            #*********Output final file naming **************
            Tbasename = os.path.splitext(os.path.basename(tempFile))[0]
            splitBase = Tbasename.split('_')

            labP = xmlOBJ.getProjection()
            if labP == 'INPUT':
                lab_proj = label['IsisCube']['Mapping']['ProjectionName'][0:4]
            else:
                lab_proj = labP[0:4]

            if xmlOBJ.getClat() is None or xmlOBJ.getClon() is None:
                basefinal = splitBase[0] + splitBase[1] + \
                    splitBase[2] + '_MAP2_' + lab_proj.upper()
            else:
                lab_clat = float(xmlOBJ.getClat())
                if lab_clat >= 0:
                    labH = 'N'
                elif lab_clat < 0:
                    labH = 'S'
                lab_clon = float(xmlOBJ.getClon())

                basefinal = splitBase[0] + splitBase[1] + splitBase[
                    2] + '_MAP2_' + str(lab_clat) + labH + str(
                        lab_clon) + '_' + lab_proj.upper()
            RedisH.MAPname(basefinal)

        try:
            RQ_file.QueueAdd(Input_file)
            logger.info('File %s Added to Redis Queue', Input_file)
        except Exception as e:
            logger.warn('File %s NOT Added to Redis Queue', Input_file)
            print('Redis Queue Error', e)
    RedisH.FileCount(RQ_file.QueueSize())
    logger.info('Count of Files Queue: %s', str(RQ_file.QueueSize()))

    # ************* Map Template Stuff ******************
    logger.info('Making Map File')
    mapOBJ = MakeMap()

    if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.getProjection() == 'INPUT':
        proj = label['IsisCube']['Mapping']['ProjectionName']
        mapOBJ.Projection(proj)
    else:
        mapOBJ.Projection(xmlOBJ.getProjection())

    if xmlOBJ.getClon() is not None:
        mapOBJ.CLon(float(xmlOBJ.getClon()))
    if xmlOBJ.getClat() is not None:
        mapOBJ.CLat(float(xmlOBJ.getClat()))
    if xmlOBJ.getFirstParallel() is not None:
        mapOBJ.FirstParallel(float(xmlOBJ.getFirstParallel()))
    if xmlOBJ.getSecondParallel() is not None:
        mapOBJ.SecondParallel(float(xmlOBJ.getSecondParallel()))
    if xmlOBJ.getResolution() is not None:
        mapOBJ.PixelRes(float(xmlOBJ.getResolution()))
    if xmlOBJ.getTargetName() is not None:
        mapOBJ.Target(xmlOBJ.getTargetName())
    if xmlOBJ.getERadius() is not None:
        mapOBJ.ERadius(float(xmlOBJ.getERadius()))
    if xmlOBJ.getPRadius() is not None:
        mapOBJ.PRadius(float(xmlOBJ.getPRadius()))
    if xmlOBJ.getLatType() is not None:
        mapOBJ.LatType(xmlOBJ.getLatType())
    if xmlOBJ.getLonDirection() is not None:
        mapOBJ.LonDirection(xmlOBJ.getLonDirection())
    if xmlOBJ.getLonDomain() is not None:
        mapOBJ.LonDomain(int(xmlOBJ.getLonDomain()))

    if xmlOBJ.getProcess() == 'MAP2':
        if xmlOBJ.getMinLat() is not None:
            mapOBJ.MinLat(float(xmlOBJ.getMinLat()))
        if xmlOBJ.getMaxLat() is not None:
            mapOBJ.MaxLat(float(xmlOBJ.getMaxLat()))
        if xmlOBJ.getMinLon() is not None:
            mapOBJ.MinLon(float(xmlOBJ.getMinLon()))
        if xmlOBJ.getMaxLon() is not None:
            mapOBJ.MaxLon(float(xmlOBJ.getMaxLon()))

    mapOBJ.Map2pvl()

    MAPfile = directory + "/" + Key + '.map'
    mapOBJ.Map2File(MAPfile)

    try:
        f = open(MAPfile)
        f.close
        logger.info('Map File Creation: Success')
    except IOError as e:
        logger.error('Map File %s Not Found', MAPfile)

# ** End Map Template Stuff **

# *************************************************
    logger.info('Building Recipe')
    recipeOBJ = Recipe()
    if xmlOBJ.getProcess() == 'POW':
        recipeOBJ.AddJsonFile(recipe_dict[xmlOBJ.getInst()])
    elif xmlOBJ.getProcess() == 'MAP2':
        recipeOBJ.AddJsonFile(recipe_dict['MAP'])
# ************** Test for stretch and add to recipe **********************
# if MAP2 and 8 or 16 bit run stretch to set range

    if xmlOBJ.getOutBit() == 'input':
        testBitType = str(label['IsisCube']['Core']['Pixels']['Type']).upper()
    else:
        testBitType = xmlOBJ.getOutBit().upper()

    if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.STR_Type() is None:
        if str(label['IsisCube']['Core']['Pixels']['Type']).upper(
        ) != xmlOBJ.getOutBit().upper() and str(
                label['IsisCube']['Core']['Pixels']['Type']).upper() != 'REAL':
            if str(label['IsisCube']['Core']['Pixels']
                   ['Type']).upper() == 'SIGNEDWORD':
                strpairs = '0:-32765 0:-32765 100:32765 100:32765'
            elif str(label['IsisCube']['Core']['Pixels']
                     ['Type']).upper() == 'UNSIGNEDBYTE':
                strpairs = '0:1 0:1 100:254 100:254'

            STRprocessOBJ = Process()
            STRprocessOBJ.newProcess('stretch')
            STRprocessOBJ.AddParameter('from_', 'value')
            STRprocessOBJ.AddParameter('to', 'value')
            STRprocessOBJ.AddParameter('usepercentages', 'yes')
            STRprocessOBJ.AddParameter('pairs', strpairs)
            recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    strType = xmlOBJ.STR_Type()
    if strType == 'StretchPercent' and xmlOBJ.STR_PercentMin(
    ) is not None and xmlOBJ.STR_PercentMax(
    ) is not None and testBitType != 'REAL':
        if float(xmlOBJ.STR_PercentMin()) != 0 and float(
                xmlOBJ.STR_PercentMax()) != 100:
            if testBitType == 'UNSIGNEDBYTE':
                strpairs = '0:1 ' + xmlOBJ.STR_PercentMin() + ':1 ' + \
                    xmlOBJ.STR_PercentMax() + ':254 100:254'
            elif testBitType == 'SIGNEDWORD':
                strpairs = '0:-32765 ' + xmlOBJ.STR_PercentMin() + ':-32765 ' + \
                    xmlOBJ.STR_PercentMax() + ':32765 100:32765'

            STRprocessOBJ = Process()
            STRprocessOBJ.newProcess('stretch')
            STRprocessOBJ.AddParameter('from_', 'value')
            STRprocessOBJ.AddParameter('to', 'value')
            STRprocessOBJ.AddParameter('usepercentages', 'yes')
            STRprocessOBJ.AddParameter('pairs', strpairs)
            recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    elif strType == 'GaussStretch':
        STRprocessOBJ = Process()
        STRprocessOBJ.newProcess('gaussstretch')
        STRprocessOBJ.AddParameter('from_', 'value')
        STRprocessOBJ.AddParameter('to', 'value')
        STRprocessOBJ.AddParameter('gsigma', xmlOBJ.STR_GaussSigma())
        recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    elif strType == 'HistogramEqualization':
        STRprocessOBJ = Process()
        STRprocessOBJ.newProcess('histeq')
        STRprocessOBJ.AddParameter('from_', 'value')
        STRprocessOBJ.AddParameter('to', 'value')
        if xmlOBJ.STR_PercentMin() is None:
            STRprocessOBJ.AddParameter('minper', '0')
        else:
            STRprocessOBJ.AddParameter('minper', xmlOBJ.STR_PercentMin())
        if xmlOBJ.STR_PercentMax() is None:
            STRprocessOBJ.AddParameter('maxper', '100')
        else:
            STRprocessOBJ.AddParameter('maxper', xmlOBJ.STR_PercentMax())
        recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    elif strType == 'SigmaStretch':
        STRprocessOBJ = Process()
        STRprocessOBJ.newProcess('sigmastretch')
        STRprocessOBJ.AddParameter('from_', 'value')
        STRprocessOBJ.AddParameter('to', 'value')
        STRprocessOBJ.AddParameter('variance', xmlOBJ.STR_SigmaVariance())
        recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

# ************* Test for output bit type and add to recipe *************
    if xmlOBJ.getProcess() == 'POW':
        if xmlOBJ.getOutBit().upper() == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit(
        ).upper() == 'SIGNEDWORD':
            CAprocessOBJ = Process()
            CAprocessOBJ.newProcess('cubeatt-bit')
            CAprocessOBJ.AddParameter('from_', 'value')
            CAprocessOBJ.AddParameter('to', 'value')
            recipeOBJ.AddProcess(CAprocessOBJ.getProcess())
    elif xmlOBJ.getProcess() == 'MAP2':
        if xmlOBJ.getOutBit().upper() != 'INPUT':
            if xmlOBJ.getOutBit().upper(
            ) == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit().upper() == 'SIGNEDWORD':
                if str(label['IsisCube']['Core']['Pixels']
                       ['Type']).upper() != xmlOBJ.getOutBit().upper():
                    CAprocessOBJ = Process()
                    CAprocessOBJ.newProcess('cubeatt-bit')
                    CAprocessOBJ.AddParameter('from_', 'value')
                    CAprocessOBJ.AddParameter('to', 'value')
                    recipeOBJ.AddProcess(CAprocessOBJ.getProcess())

# **************** Add Grid(MAP2) *************
    if xmlOBJ.getGridInterval() is not None:
        GprocessOBJ = Process()
        GprocessOBJ.newProcess('grid')
        GprocessOBJ.AddParameter('from_', 'value')
        GprocessOBJ.AddParameter('to', 'value')
        GprocessOBJ.AddParameter('latinc', xmlOBJ.getGridInterval())
        GprocessOBJ.AddParameter('loninc', xmlOBJ.getGridInterval())
        GprocessOBJ.AddParameter('outline', 'yes')
        GprocessOBJ.AddParameter('boundary', 'yes')
        GprocessOBJ.AddParameter('linewidth', '3')
        recipeOBJ.AddProcess(GprocessOBJ.getProcess())

# ********OUTPUT FORMAT ***************
# ************* Test for GDAL and add to recipe *************************
    Oformat = xmlOBJ.getOutFormat()
    if Oformat == 'GeoTiff-BigTiff' or Oformat == 'GeoJPEG-2000' or Oformat == 'JPEG' or Oformat == 'PNG':
        if Oformat == 'GeoJPEG-2000':
            Oformat = 'JP2KAK'
        if Oformat == 'GeoTiff-BigTiff':
            Oformat = 'GTiff'
        GDALprocessOBJ = Process()
        #        GDALprocessOBJ.newProcess('/usgs/dev/contrib/bin/FWTools-linux-x86_64-3.0.3/bin_safe/gdal_translate')
        GDALprocessOBJ.newProcess('/usgs/apps/anaconda/bin/gdal_translate')
        if xmlOBJ.getOutBit() != 'input':
            GDALprocessOBJ.AddParameter(
                '-ot', GDALprocessOBJ.GDAL_OBit(xmlOBJ.getOutBit()))
        GDALprocessOBJ.AddParameter('-of', Oformat)

        if Oformat == 'GTiff' or Oformat == 'JP2KAK' or Oformat == 'JPEG':
            GDALprocessOBJ.AddParameter('-co',
                                        GDALprocessOBJ.GDAL_Creation(Oformat))

        recipeOBJ.AddProcess(GDALprocessOBJ.getProcess())
# **************** set up pds2isis and add to recipe
    elif Oformat == 'PDS':
        pdsProcessOBJ = Process()
        pdsProcessOBJ.newProcess('isis2pds')
        pdsProcessOBJ.AddParameter('from_', 'value')
        pdsProcessOBJ.AddParameter('to', 'value')
        if xmlOBJ.getOutBit() == 'unsignedbyte':
            pdsProcessOBJ.AddParameter('bittype', '8bit')
        elif xmlOBJ.getOutBit() == 'signedword':
            pdsProcessOBJ.AddParameter('bittype', 's16bit')

        recipeOBJ.AddProcess(pdsProcessOBJ.getProcess())

    for item in recipeOBJ.getProcesses():
        processOBJ = Process()
        processOBJ.ProcessFromRecipe(item, recipeOBJ.getRecipe())

        if item == 'cam2map':

            processOBJ.updateParameter('map', MAPfile)

            if xmlOBJ.getResolution() is None:
                processOBJ.updateParameter('pixres', 'CAMERA')
            else:
                processOBJ.updateParameter('pixres', 'MAP')

            if xmlOBJ.getRangeType() is None:
                processOBJ.updateParameter('defaultrange', 'MINIMIZE')
            elif xmlOBJ.getRangeType() == 'smart' or xmlOBJ.getRangeType(
            ) == 'fill':
                processOBJ.updateParameter('defaultrange', 'CAMERA')
                processOBJ.AddParameter('trim', 'YES')

        elif item == 'map2map':
            processOBJ.updateParameter('map', MAPfile)
            if xmlOBJ.getResolution() is None:
                processOBJ.updateParameter('pixres', 'FROM')
            else:
                processOBJ.updateParameter('pixres', 'MAP')

            if xmlOBJ.OutputGeometry() is not None:
                processOBJ.updateParameter('defaultrange', 'MAP')
                processOBJ.AddParameter('trim', 'YES')
            else:
                processOBJ.updateParameter('defaultrange', 'FROM')

        processJSON = processOBJ.Process2JSON()
        try:
            RQ_recipe.QueueAdd(processJSON)
            logger.info('Recipe Element Added to Redis: %s : Success', item)
        except Exception as e:
            logger.warn('Recipe Element NOT Added to Redis: %s', item)


# ** *************** HPC job stuff ***********************
    logger.info('HPC Cluster job Submission Starting')
    jobOBJ = HPCjob()
    jobOBJ.setJobName(Key + '_Service')
    jobOBJ.setStdOut('/usgs/cdev/PDS/output/' + Key + '_%A_%a.out')
    jobOBJ.setStdError('/usgs/cdev/PDS/output/' + Key + '_%A_%a.err')
    jobOBJ.setWallClock('24:00:00')
    #    jobOBJ.setMemory('8192')
    #    jobOBJ.setMemory('16384')
    jobOBJ.setMemory('24576')
    jobOBJ.setPartition('pds')
    JAsize = RQ_file.QueueSize()
    jobOBJ.setJobArray(JAsize)
    logger.info('Job Array Size : %s', str(JAsize))

    jobOBJ.addPath('/usgs/apps/anaconda/bin')

    if xmlOBJ.getProcess() == 'POW':
        cmd = '/usgs/cdev/PDS/bin/POWprocess.py ' + Key
    elif xmlOBJ.getProcess() == 'MAP2':
        cmd = '/usgs/cdev/PDS/bin/MAPprocess.py ' + Key

    logger.info('HPC Command: %s', cmd)
    jobOBJ.setCommand(cmd)

    SBfile = directory + '/' + Key + '.sbatch'
    jobOBJ.MakeJobFile(SBfile)

    try:
        sb = open(SBfile)
        sb.close
        logger.info('SBATCH File Creation: Success')
    except IOError as e:
        logger.error('SBATCH File %s Not Found', SBfile)

    try:
        jobOBJ.Run()
        logger.info('Job Submission to HPC: Success')
        DBQO.setJobsStarted(Key)
    except IOError as e:
        logger.error('Jobs NOT Submitted to HPC')
Esempio n. 12
0
def main():

    #    pdb.set_trace()

    Key = sys.argv[-1]

    workarea = '/scratch/pds_services/' + Key + '/'

    RQ_file = RedisQueue(Key + '_FileQueue')
    RQ_work = RedisQueue(Key + '_WorkQueue')
    RQ_zip = RedisQueue(Key + '_ZIP')
    RQ_loggy = RedisQueue(Key + '_loggy')
    RQ_final = RedisQueue('FinalQueue')
    RHash = RedisHash(Key + '_info')
    RHerror = RedisHash(Key + '_error')

    if int(RQ_file.QueueSize()) == 0:

        print "No Files Found in Redis Queue"

    else:
        print RQ_file.getQueueName()
        jobFile = RQ_file.Qfile2Qwork(
            RQ_file.getQueueName(), RQ_work.getQueueName())

        # Setup system logging
        basename = os.path.splitext(os.path.basename(jobFile))[0]
        logger = logging.getLogger(Key + '.' + basename)
        logger.setLevel(logging.INFO)

        logFileHandle = logging.FileHandler('/usgs/cdev/PDS/logs/Service.log')
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
        logFileHandle.setFormatter(formatter)
        logger.addHandler(logFileHandle)

        logger.info('Starting POW Processing')

        # set up loggy
        loggyOBJ = Loggy(basename)


        # File Naming 
        if '+' in jobFile:
            bandSplit = jobFile.split('+')
            inputFile = bandSplit[0]
        else:
            inputFile = jobFile

        infile = workarea + \
            os.path.splitext(os.path.basename(jobFile))[0] + '.input.cub'
        outfile = workarea + \
            os.path.splitext(os.path.basename(jobFile))[0] + '.output.cub'

        RQ_recipe = RedisQueue(Key + '_recipe')

        status = 'success'
        for element in RQ_recipe.RecipeGet():
            if status == 'error':
                break
            elif status == 'success':
                processOBJ = Process()
                process = processOBJ.JSON2Process(element)

                if 'gdal_translate' not in processOBJ.getProcessName():
                    print processOBJ.getProcessName()
                    if '2isis' in processOBJ.getProcessName():
                        processOBJ.updateParameter('from_', inputFile)
                        processOBJ.updateParameter('to', outfile)
                    elif 'cubeatt-band' in processOBJ.getProcessName():
                        if '+' in jobFile:
                            infileB = infile + '+' + bandSplit[1]
                            processOBJ.updateParameter('from_', infileB)
                            processOBJ.updateParameter('to', outfile)
                            processOBJ.ChangeProcess('cubeatt')
                        else:
                            continue
                    elif 'cubeatt-bit' in processOBJ.getProcessName():
                        if RHash.OutBit() == 'unsignedbyte':
                            temp_outfile = outfile + '+lsb+tile+attached+unsignedbyte+1:254'
                        elif RHash.OutBit() == 'signedword':
                            temp_outfile = outfile + '+lsb+tile+attached+signedword+-32765:32765'
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', temp_outfile)
                        processOBJ.ChangeProcess('cubeatt')

                    elif 'spice' in processOBJ.getProcessName():
                        processOBJ.updateParameter('from_', infile)

                    elif 'ctxevenodd' in processOBJ.getProcessName():
                        label = pvl.load(infile)
                        SS = label['IsisCube']['Instrument']['SpatialSumming']
                        print SS
                        if SS != 1:
                            continue
                        else:
                            processOBJ.updateParameter('from_', infile)
                            processOBJ.updateParameter('to', outfile)

                    elif 'mocevenodd' in processOBJ.getProcessName():
                        label = pvl.load(infile)
                        CTS = label['IsisCube']['Instrument']['CrosstrackSumming']
                        print CTS
                        if CTS != 1:
                            continue
                        else:
                            processOBJ.updateParameter('from_', infile)
                            processOBJ.updateParameter('to', outfile)
                    elif 'mocnoise50' in processOBJ.getProcessName():
                        label = pvl.load(infile)
                        CTS = label['IsisCube']['Instrument']['CrosstrackSumming']
                        if CTS != 1:
                            continue
                        else:
                            processOBJ.updateParameter('from_', infile)
                            processOBJ.updateParameter('to', outfile)
                    elif 'cam2map' in processOBJ.getProcessName():
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', outfile)

                        if RHash.getGRtype() == 'smart' or RHash.getGRtype() == 'fill':
                            subloggyOBJ = SubLoggy('cam2map')
                            camrangeOUT = workarea + basename + '_camrange.txt'
                            isis.camrange(from_=infile,
                                          to=camrangeOUT)

                            cam = pvl.load(camrangeOUT)

                            if cam['UniversalGroundRange']['MaximumLatitude'] < float(RHash.getMinLat()) or \
                               cam['UniversalGroundRange']['MinimumLatitude'] > float(RHash.getMaxLat()) or \
                               cam['UniversalGroundRange']['MaximumLongitude'] < float(RHash.getMinLon()) or \
                               cam['UniversalGroundRange']['MinimumLongitude'] > float(RHash.getMaxLon()):

                                status = 'error'
                                eSTR = "Error Ground Range Outside Extent Range"
                                RHerror.addError(os.path.splitext(
                                    os.path.basename(jobFile))[0], eSTR)
                                subloggyOBJ.setStatus('ERROR')
                                subloggyOBJ.errorOut(eSTR)
                                loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())
                                break

                            elif RHash.getGRtype() == 'smart':
                                if cam['UniversalGroundRange']['MinimumLatitude'] > float(RHash.getMinLat()):
                                    minlat = cam['UniversalGroundRange']['MinimumLatitude']
                                else:
                                    minlat = RHash.getMinLat()

                                if cam['UniversalGroundRange']['MaximumLatitude'] < float(RHash.getMaxLat()):
                                    maxlat = cam['UniversalGroundRange']['MaximumLatitude']
                                else:
                                    maxlat = RHash.getMaxLat()

                                if cam['UniversalGroundRange']['MinimumLongitude'] > float(RHash.getMinLon()):
                                    minlon = cam['UniversalGroundRange']['MinimumLongitude']
                                else:
                                    minlon = RHash.getMinLon()

                                if cam['UniversalGroundRange']['MaximumLongitude'] < float(RHash.getMaxLon()):
                                    maxlon = cam['UniversalGroundRange']['MaximumLongitude']
                                else:
                                    maxlon = RHash.getMaxLon()
                            elif RHash.getGRtype() == 'fill':
                                minlat = RHash.getMinLat()
                                maxlat = RHash.getMaxLat()
                                minlon = RHash.getMinLon()
                                maxlon = RHash.getMaxLon()

                            processOBJ.AddParameter('minlat', minlat)
                            processOBJ.AddParameter('maxlat', maxlat)
                            processOBJ.AddParameter('minlon', minlon)
                            processOBJ.AddParameter('maxlon', maxlon)

                            os.remove(camrangeOUT)

                    elif 'isis2pds' in processOBJ.getProcessName():
                        finalfile = infile.replace('.input.cub', '_final.img')
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', finalfile)

                    else:
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', outfile)

                    print processOBJ.getProcess()

                    for k, v in processOBJ.getProcess().items():
                        func = getattr(isis, k)
                        subloggyOBJ = SubLoggy(k)
                        try:
                            func(**v)
                            logger.info('Process %s :: Success', k)
                            subloggyOBJ.setStatus('SUCCESS')
                            subloggyOBJ.setCommand(processOBJ.LogCommandline())
                            subloggyOBJ.setHelpLink(processOBJ.LogHelpLink())
                            loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())

                            if os.path.isfile(outfile):
                                os.rename(outfile, infile)
                            status = 'success'

                        except ProcessError as e:
                            logger.error('Process %s :: Error', k)
                            logger.error(e)
                            status = 'error'
                            eSTR = 'Error Executing ' + k + \
                                ' Standard Error: ' + str(e)
                            RHerror.addError(os.path.splitext(
                                os.path.basename(jobFile))[0], eSTR)
                            subloggyOBJ.setStatus('ERROR')
                            subloggyOBJ.setCommand(processOBJ.LogCommandline())
                            subloggyOBJ.setHelpLink(processOBJ.LogHelpLink())
                            subloggyOBJ.errorOut(eSTR)
                            loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())

                else:
                    GDALcmd = ""
                    for process, v, in processOBJ.getProcess().items():
                        subloggyOBJ = SubLoggy(process)
                        GDALcmd += process
                        for key, value in v.items():
                            GDALcmd += ' ' + key + ' ' + value

                    if RHash.Format() == 'GeoTiff-BigTiff':
                        fileext = 'tif'
                    elif RHash.Format() == 'GeoJPEG-2000':
                        fileext = 'jp2'
                    elif RHash.Format() == 'JPEG':
                        fileext = 'jpg'
                    elif RHash.Format() == 'PNG':
                        fileext = 'png'
                    elif RHash.Format() == 'GIF':
                        fileext = 'gif'

                    logGDALcmd = GDALcmd + ' ' + basename + \
                        '.input.cub ' + basename + '_final.' + fileext
                    finalfile = infile.replace(
                        '.input.cub', '_final.' + fileext)
                    GDALcmd += ' ' + infile + ' ' + finalfile
                    print GDALcmd

                    result = subprocess.call(GDALcmd, shell=True)
                    if result == 0:
                        logger.info('Process GDAL translate :: Success')
                        status = 'success'
                        subloggyOBJ.setStatus('SUCCESS')
                        subloggyOBJ.setCommand(logGDALcmd)
                        subloggyOBJ.setHelpLink(
                            'http://www.gdal.org/gdal_translate.html')
                        loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())
                        os.remove(infile)
                    else:
                        errmsg = 'Error Executing GDAL translate: Error'
                        logger.error(errmsg)
                        status = 'error'
                        RHerror.addError(os.path.splitext(
                            os.path.basename(jobFile))[0], errmsg)
                        subloggyOBJ.setStatus('ERROR')
                        subloggyOBJ.setCommand(logGDALcmd)
                        subloggyOBJ.setHelpLink(
                            'http://www.gdal.org/gdal_translate.html')
                        subloggyOBJ.errorOut('Process GDAL translate :: Error')
                        loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())

        if status == 'success':

            if RHash.Format() == 'ISIS3':
                finalfile = infile.replace('.input.cub', '_final.cub')
                shutil.move(infile, finalfile)
            if RHash.getStatus() != 'ERROR':
                RHash.Status('SUCCESS')

            try:
                RQ_zip.QueueAdd(finalfile)
                logger.info('File Added to ZIP Queue')
            except:
                logger.error('File NOT Added to ZIP Queue')

        elif status == 'error':
            RHash.Status('ERROR')
            if os.path.isfile(infile):
                os.remove(infile)

        try:
            RQ_loggy.QueueAdd(loggyOBJ.Loggy2json())
            RQ_work.QueueRemove(jobFile)
            logger.info('JSON Added to Loggy Queue')
        except:
            logger.error('JSON NOT Added to Loggy Queue')

        if RQ_file.QueueSize() == 0 and RQ_work.QueueSize() == 0:
            try:
                RQ_final.QueueAdd(Key)
                logger.info('Key %s Added to Final Queue: Success', Key)
                logger.info('Both Queues Empty: filequeue = %s  work queue = %s', str(
                    RQ_file.QueueSize()), str(RQ_work.QueueSize()))
                logger.info('JOB Complete')
            except:
                logger.error('Key NOT Added to Final Queue')
        elif RQ_file.QueueSize() == 0 and RQ_work.QueueSize() != 0:
            logger.warning('Work Queue Not Empty: filequeue = %s  work queue = %s', str(
                RQ_file.QueueSize()), str(RQ_work.QueueSize()))
Esempio n. 13
0
def main():
    args = Args()
    args.parse_args()
    key = args.key
    namespace = args.namespace

    if namespace is None:
        namespace = default_namespace

    # Set up logging
    logger = logging.getLogger(key)
    logger.setLevel(logging.INFO)

    logFileHandle = logging.FileHandler(pds_log + 'Service.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    # Connect to database and access 'jobs' table
    DBQO = PDS_DBquery('JOBS')
    if key is None:
        # If no key is specified, grab the first key
        key = DBQO.jobKey()
    try:
        # Set the 'queued' column to current time i.e. prep for processing
        DBQO.setJobsQueued(key)
    except KeyError as e:
        logger.error('%s', e)
        exit(1)

    logger.info('Starting Process')

    xmlOBJ = jobXML(DBQO.jobXML4Key(key))

    # Make directory if it doesn't exist
    directory = scratch + key
    if not os.path.exists(directory):
        os.makedirs(directory)

    logger.info('Working Area: %s', directory)

    # Set up Redis Hash for ground range
    RedisH = RedisHash(key + '_info')
    RedisH.RemoveAll()
    RedisErrorH = RedisHash(key + '_error')
    RedisErrorH.RemoveAll()
    RedisH_DICT = {}
    RedisH_DICT['service'] = xmlOBJ.getProcess()
    RedisH_DICT['fileformat'] = xmlOBJ.getOutFormat()
    RedisH_DICT['outbit'] = xmlOBJ.getOutBit()
    if xmlOBJ.getRangeType() is not None:
        RedisH_DICT['grtype'] = xmlOBJ.getRangeType()
        RedisH_DICT['minlat'] = xmlOBJ.getMinLat()
        RedisH_DICT['maxlat'] = xmlOBJ.getMaxLat()
        RedisH_DICT['minlon'] = xmlOBJ.getMinLon()
        RedisH_DICT['maxlon'] = xmlOBJ.getMaxLon()

    if RedisH.IsInHash('service'):
        pass
    else:
        RedisH.AddHash(RedisH_DICT)
    if RedisH.IsInHash('service'):
        logger.info('Redis info Hash: Success')
    else:
        logger.error('Redis info Hash Not Found')

    # End ground range

    RQ_recipe = RedisQueue(key + '_recipe', namespace)
    RQ_recipe.RemoveAll()
    RQ_file = RedisQueue(key + '_FileQueue', namespace)
    RQ_file.RemoveAll()
    RQ_WorkQueue = RedisQueue(key + '_WorkQueue', namespace)
    RQ_WorkQueue.RemoveAll()
    RQ_loggy = RedisQueue(key + '_loggy', namespace)
    RQ_loggy.RemoveAll()
    RQ_zip = RedisQueue(key + '_ZIP', namespace)
    RQ_zip.RemoveAll()

    if xmlOBJ.getProcess() == 'POW':
        fileList = xmlOBJ.getFileListWB()
    elif xmlOBJ.getProcess() == 'MAP2':
        fileList = xmlOBJ.getMFileListWB()

    for List_file in fileList:

        # Input and output file naming and path stuff
        if xmlOBJ.getProcess() == 'POW':
            if xmlOBJ.getInst() == 'THEMIS_IR':
                Input_file = List_file.replace('odtie1_', 'odtir1_')
                Input_file = Input_file.replace('xxedr', 'xxrdr')
                Input_file = Input_file.replace('EDR.QUB', 'RDR.QUB')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/', archive_base)
            elif xmlOBJ.getInst() == 'ISSNA':
                Input_file = List_file.replace('.IMG', '.LBL')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/', archive_base)
            elif xmlOBJ.getInst() == 'ISSWA':
                Input_file = List_file.replace('.IMG', '.LBL')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/', archive_base)
            elif xmlOBJ.getInst() == 'SOLID STATE IMAGING SYSTEM':
                Input_file = List_file.replace('.img', '.lbl')
                Input_file = Input_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/', archive_base)
            else:
                Input_file = List_file.replace(
                    'http://pdsimage.wr.usgs.gov/Missions/', archive_base)

        elif xmlOBJ.getProcess() == 'MAP2':
            Input_file = List_file.replace('file://pds_san', '/pds_san')

            if '+' in Input_file:
                tempsplit = Input_file.split('+')
                tempFile = tempsplit[0]
            else:
                tempFile = Input_file
            label = pvl.load(tempFile)
            # Output final file naming
            Tbasename = os.path.splitext(os.path.basename(tempFile))[0]
            splitBase = Tbasename.split('_')

            labP = xmlOBJ.getProjection()
            if labP == 'INPUT':
                lab_proj = label['IsisCube']['Mapping']['ProjectionName'][0:4]
            else:
                lab_proj = labP[0:4]

            if xmlOBJ.getClat() is None or xmlOBJ.getClon() is None:
                basefinal = splitBase[0] + splitBase[1] + \
                    splitBase[2] + '_MAP2_' + lab_proj.upper()
            else:
                lab_clat = float(xmlOBJ.getClat())
                if lab_clat >= 0:
                    labH = 'N'
                elif lab_clat < 0:
                    labH = 'S'
                lab_clon = float(xmlOBJ.getClon())

                basefinal = splitBase[0] + splitBase[1] + splitBase[
                    2] + '_MAP2_' + str(lab_clat) + labH + str(
                        lab_clon) + '_' + lab_proj.upper()
            RedisH.MAPname(basefinal)

        try:
            RQ_file.QueueAdd(Input_file)
            logger.info('File %s Added to Redis Queue', Input_file)
        except Exception as e:
            logger.warn('File %s NOT Added to Redis Queue', Input_file)
            print('Redis Queue Error', e)
    RedisH.FileCount(RQ_file.QueueSize())
    logger.info('Count of Files Queue: %s', str(RQ_file.QueueSize()))

    # Map Template Stuff
    logger.info('Making Map File')
    mapOBJ = MakeMap()

    if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.getProjection() == 'INPUT':
        proj = label['IsisCube']['Mapping']['ProjectionName']
        mapOBJ.Projection(proj)
    else:
        mapOBJ.Projection(xmlOBJ.getProjection())

    if xmlOBJ.getClon() is not None:
        mapOBJ.CLon(float(xmlOBJ.getClon()))
    if xmlOBJ.getClat() is not None:
        mapOBJ.CLat(float(xmlOBJ.getClat()))
    if xmlOBJ.getFirstParallel() is not None:
        mapOBJ.FirstParallel(float(xmlOBJ.getFirstParallel()))
    if xmlOBJ.getSecondParallel() is not None:
        mapOBJ.SecondParallel(float(xmlOBJ.getSecondParallel()))
    if xmlOBJ.getResolution() is not None:
        mapOBJ.PixelRes(float(xmlOBJ.getResolution()))
    if xmlOBJ.getTargetName() is not None:
        mapOBJ.Target(xmlOBJ.getTargetName())
    if xmlOBJ.getERadius() is not None:
        mapOBJ.ERadius(float(xmlOBJ.getERadius()))
    if xmlOBJ.getPRadius() is not None:
        mapOBJ.PRadius(float(xmlOBJ.getPRadius()))
    if xmlOBJ.getLatType() is not None:
        mapOBJ.LatType(xmlOBJ.getLatType())
    if xmlOBJ.getLonDirection() is not None:
        mapOBJ.LonDirection(xmlOBJ.getLonDirection())
    if xmlOBJ.getLonDomain() is not None:
        mapOBJ.LonDomain(int(xmlOBJ.getLonDomain()))

    if xmlOBJ.getProcess() == 'MAP2':
        if xmlOBJ.getMinLat() is not None:
            mapOBJ.MinLat(float(xmlOBJ.getMinLat()))
        if xmlOBJ.getMaxLat() is not None:
            mapOBJ.MaxLat(float(xmlOBJ.getMaxLat()))
        if xmlOBJ.getMinLon() is not None:
            mapOBJ.MinLon(float(xmlOBJ.getMinLon()))
        if xmlOBJ.getMaxLon() is not None:
            mapOBJ.MaxLon(float(xmlOBJ.getMaxLon()))

    mapOBJ.Map2pvl()

    MAPfile = directory + "/" + key + '.map'
    mapOBJ.Map2File(MAPfile)

    try:
        f = open(MAPfile)
        f.close
        logger.info('Map File Creation: Success')
    except IOError as e:
        logger.error('Map File %s Not Found', MAPfile)

    # ** End Map Template Stuff **

    logger.info('Building Recipe')
    recipeOBJ = Recipe()
    if xmlOBJ.getProcess() == 'POW':
        recipeOBJ.AddJsonFile(recipe_base + xmlOBJ.getCleanName() + '.json',
                              "pow")
    elif xmlOBJ.getProcess() == 'MAP2':
        recipeOBJ.AddJsonFile(recipe_base + "map2_process.json", "map")
    # Test for stretch and add to recipe
    # if MAP2 and 8 or 16 bit run stretch to set range

    if xmlOBJ.getOutBit() == 'input':
        testBitType = str(label['IsisCube']['Core']['Pixels']['Type']).upper()
    else:
        testBitType = xmlOBJ.getOutBit().upper()

    if xmlOBJ.getProcess() == 'MAP2' and xmlOBJ.STR_Type() is None:
        if str(label['IsisCube']['Core']['Pixels']['Type']).upper(
        ) != xmlOBJ.getOutBit().upper() and str(
                label['IsisCube']['Core']['Pixels']['Type']).upper() != 'REAL':
            if str(label['IsisCube']['Core']['Pixels']
                   ['Type']).upper() == 'SIGNEDWORD':
                strpairs = '0:-32765 0:-32765 100:32765 100:32765'
            elif str(label['IsisCube']['Core']['Pixels']
                     ['Type']).upper() == 'UNSIGNEDBYTE':
                strpairs = '0:1 0:1 100:254 100:254'

            STRprocessOBJ = Process()
            STRprocessOBJ.newProcess('stretch')
            STRprocessOBJ.AddParameter('from_', 'value')
            STRprocessOBJ.AddParameter('to', 'value')
            STRprocessOBJ.AddParameter('usepercentages', 'yes')
            STRprocessOBJ.AddParameter('pairs', strpairs)
            recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    strType = xmlOBJ.STR_Type()
    if strType == 'StretchPercent' and xmlOBJ.STR_PercentMin(
    ) is not None and xmlOBJ.STR_PercentMax(
    ) is not None and testBitType != 'REAL':
        if float(xmlOBJ.STR_PercentMin()) != 0 and float(
                xmlOBJ.STR_PercentMax()) != 100:
            if testBitType == 'UNSIGNEDBYTE':
                strpairs = '0:1 ' + xmlOBJ.STR_PercentMin() + ':1 ' + \
                    xmlOBJ.STR_PercentMax() + ':254 100:254'
            elif testBitType == 'SIGNEDWORD':
                strpairs = '0:-32765 ' + xmlOBJ.STR_PercentMin() + ':-32765 ' + \
                    xmlOBJ.STR_PercentMax() + ':32765 100:32765'

            STRprocessOBJ = Process()
            STRprocessOBJ.newProcess('stretch')
            STRprocessOBJ.AddParameter('from_', 'value')
            STRprocessOBJ.AddParameter('to', 'value')
            STRprocessOBJ.AddParameter('usepercentages', 'yes')
            STRprocessOBJ.AddParameter('pairs', strpairs)
            recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    elif strType == 'GaussStretch':
        STRprocessOBJ = Process()
        STRprocessOBJ.newProcess('gaussstretch')
        STRprocessOBJ.AddParameter('from_', 'value')
        STRprocessOBJ.AddParameter('to', 'value')
        STRprocessOBJ.AddParameter('gsigma', xmlOBJ.STR_GaussSigma())
        recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    elif strType == 'HistogramEqualization':
        STRprocessOBJ = Process()
        STRprocessOBJ.newProcess('histeq')
        STRprocessOBJ.AddParameter('from_', 'value')
        STRprocessOBJ.AddParameter('to', 'value')
        if xmlOBJ.STR_PercentMin() is None:
            STRprocessOBJ.AddParameter('minper', '0')
        else:
            STRprocessOBJ.AddParameter('minper', xmlOBJ.STR_PercentMin())
        if xmlOBJ.STR_PercentMax() is None:
            STRprocessOBJ.AddParameter('maxper', '100')
        else:
            STRprocessOBJ.AddParameter('maxper', xmlOBJ.STR_PercentMax())
        recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    elif strType == 'SigmaStretch':
        STRprocessOBJ = Process()
        STRprocessOBJ.newProcess('sigmastretch')
        STRprocessOBJ.AddParameter('from_', 'value')
        STRprocessOBJ.AddParameter('to', 'value')
        STRprocessOBJ.AddParameter('variance', xmlOBJ.STR_SigmaVariance())
        recipeOBJ.AddProcess(STRprocessOBJ.getProcess())

    # Test for output bit type and add to recipe
    if xmlOBJ.getProcess() == 'POW':
        if xmlOBJ.getOutBit().upper() == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit(
        ).upper() == 'SIGNEDWORD':
            CAprocessOBJ = Process()
            CAprocessOBJ.newProcess('cubeatt-bit')
            CAprocessOBJ.AddParameter('from_', 'value')
            CAprocessOBJ.AddParameter('to', 'value')
            recipeOBJ.AddProcess(CAprocessOBJ.getProcess())
    elif xmlOBJ.getProcess() == 'MAP2':
        if xmlOBJ.getOutBit().upper() != 'INPUT':
            if xmlOBJ.getOutBit().upper(
            ) == 'UNSIGNEDBYTE' or xmlOBJ.getOutBit().upper() == 'SIGNEDWORD':
                if str(label['IsisCube']['Core']['Pixels']
                       ['Type']).upper() != xmlOBJ.getOutBit().upper():
                    CAprocessOBJ = Process()
                    CAprocessOBJ.newProcess('cubeatt-bit')
                    CAprocessOBJ.AddParameter('from_', 'value')
                    CAprocessOBJ.AddParameter('to', 'value')
                    recipeOBJ.AddProcess(CAprocessOBJ.getProcess())

    # Add Grid(MAP2)
    if xmlOBJ.getGridInterval() is not None:
        GprocessOBJ = Process()
        GprocessOBJ.newProcess('grid')
        GprocessOBJ.AddParameter('from_', 'value')
        GprocessOBJ.AddParameter('to', 'value')
        GprocessOBJ.AddParameter('latinc', xmlOBJ.getGridInterval())
        GprocessOBJ.AddParameter('loninc', xmlOBJ.getGridInterval())
        GprocessOBJ.AddParameter('outline', 'yes')
        GprocessOBJ.AddParameter('boundary', 'yes')
        GprocessOBJ.AddParameter('linewidth', '3')
        recipeOBJ.AddProcess(GprocessOBJ.getProcess())

    # OUTPUT FORMAT
    # Test for GDAL and add to recipe
    Oformat = xmlOBJ.getOutFormat()
    if Oformat == 'GeoTiff-BigTiff' or Oformat == 'GeoJPEG-2000' or Oformat == 'JPEG' or Oformat == 'PNG':
        if Oformat == 'GeoJPEG-2000':
            Oformat = 'JP2KAK'
        if Oformat == 'GeoTiff-BigTiff':
            Oformat = 'GTiff'
        GDALprocessOBJ = Process()
        # @TODO remove hard-coded path in favor of using whichever utilities are found within the conda environment --
        #  we need more information here to ensure that whichever utilities are found are capable of supporting GeoJPEG-2000.
        GDALprocessOBJ.newProcess('/usgs/apps/anaconda/bin/gdal_translate')
        if xmlOBJ.getOutBit() != 'input':
            GDALprocessOBJ.AddParameter(
                '-ot', GDALprocessOBJ.GDAL_OBit(xmlOBJ.getOutBit()))
        GDALprocessOBJ.AddParameter('-of', Oformat)

        if Oformat == 'GTiff' or Oformat == 'JP2KAK' or Oformat == 'JPEG':
            GDALprocessOBJ.AddParameter('-co',
                                        GDALprocessOBJ.GDAL_Creation(Oformat))

        recipeOBJ.AddProcess(GDALprocessOBJ.getProcess())
    # set up pds2isis and add to recipe
    elif Oformat == 'PDS':
        pdsProcessOBJ = Process()
        pdsProcessOBJ.newProcess('isis2pds')
        pdsProcessOBJ.AddParameter('from_', 'value')
        pdsProcessOBJ.AddParameter('to', 'value')
        if xmlOBJ.getOutBit() == 'unsignedbyte':
            pdsProcessOBJ.AddParameter('bittype', '8bit')
        elif xmlOBJ.getOutBit() == 'signedword':
            pdsProcessOBJ.AddParameter('bittype', 's16bit')

        recipeOBJ.AddProcess(pdsProcessOBJ.getProcess())

    for item in recipeOBJ.getProcesses():
        processOBJ = Process()
        processOBJ.ProcessFromRecipe(item, recipeOBJ.getRecipe())

        if item == 'cam2map':

            processOBJ.updateParameter('map', MAPfile)

            if xmlOBJ.getResolution() is None:
                processOBJ.updateParameter('pixres', 'CAMERA')
            else:
                processOBJ.updateParameter('pixres', 'MAP')

            if xmlOBJ.getRangeType() is None:
                processOBJ.updateParameter('defaultrange', 'MINIMIZE')
            elif xmlOBJ.getRangeType() == 'smart' or xmlOBJ.getRangeType(
            ) == 'fill':
                processOBJ.updateParameter('defaultrange', 'CAMERA')
                processOBJ.AddParameter('trim', 'YES')

        elif item == 'map2map':
            processOBJ.updateParameter('map', MAPfile)
            if xmlOBJ.getResolution() is None:
                processOBJ.updateParameter('pixres', 'FROM')
            else:
                processOBJ.updateParameter('pixres', 'MAP')

            if xmlOBJ.OutputGeometry() is not None:
                processOBJ.updateParameter('defaultrange', 'MAP')
                processOBJ.AddParameter('trim', 'YES')
            else:
                processOBJ.updateParameter('defaultrange', 'FROM')

        processJSON = processOBJ.Process2JSON()
        try:
            RQ_recipe.QueueAdd(processJSON)
            logger.info('Recipe Element Added to Redis: %s : Success', item)
        except Exception as e:
            logger.warn('Recipe Element NOT Added to Redis: %s', item)

    # HPC job stuff
    logger.info('HPC Cluster job Submission Starting')
    jobOBJ = HPCjob()
    jobOBJ.setJobName(key + '_Service')
    jobOBJ.setStdOut(slurm_log + key + '_%A_%a.out')
    jobOBJ.setStdError(slurm_log + key + '_%A_%a.err')
    jobOBJ.setWallClock('24:00:00')
    jobOBJ.setMemory('24576')
    jobOBJ.setPartition('pds')
    JAsize = RQ_file.QueueSize()
    jobOBJ.setJobArray(JAsize)
    logger.info('Job Array Size : %s', str(JAsize))

    # @TODO replace with source activate <env>
    #jobOBJ.addPath('/usgs/apps/anaconda/bin')

    # Whether or not we use the default namespace, this guarantees that the POW/MAP queues will match the namespace
    #  used in the job manager.
    if xmlOBJ.getProcess() == 'POW':
        cmd = cmd_dir + "POWprocess.py -k {} -n {}".format(key, namespace)
    elif xmlOBJ.getProcess() == 'MAP2':
        cmd = cmd_dir + "MAPprocess.py -k {} -n {}".format(key, namespace)

    logger.info('HPC Command: %s', cmd)
    jobOBJ.setCommand(cmd)

    SBfile = directory + '/' + key + '.sbatch'
    jobOBJ.MakeJobFile(SBfile)

    try:
        sb = open(SBfile)
        sb.close
        logger.info('SBATCH File Creation: Success')
    except IOError as e:
        logger.error('SBATCH File %s Not Found', SBfile)

    try:
        jobOBJ.Run()
        logger.info('Job Submission to HPC: Success')
        DBQO.setJobsStarted(key)
    except IOError as e:
        logger.error('Jobs NOT Submitted to HPC')
Esempio n. 14
0
def main():

    args = Args()
    args.parse_args()
    override = args.override
    logger = logging.getLogger('Ingest_Process')
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'Ingest.log')
    print("Log File: {}Ingest.log".format(pds_log))
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info("Starting Ingest Process")
    PDSinfoDICT = json.load(open(pds_info, 'r'))

    RQ_main = RedisQueue('Ingest_ReadyQueue')
    RQ_lock = RedisLock(lock_obj)
    RQ_lock.add({RQ_main.id_name: '1'})
    RQ_work = RedisQueue('Ingest_WorkQueue')

    RQ_upc = RedisQueue('UPC_ReadyQueue')
    RQ_thumb = RedisQueue('Thumbnail_ReadyQueue')
    RQ_browse = RedisQueue('Browse_ReadyQueue')

    logger.info("UPC Queue: %s", RQ_upc.id_name)
    logger.info("Thumbnail Queue: %s", RQ_thumb.id_name)
    logger.info("Browse Queue: %s", RQ_browse.id_name)

    try:
        session, engine = db_connect(pds_db)
        logger.info('DataBase Connecton: Success')
    except:
        logger.error('DataBase Connection: Error')
        return 1

    index = 1

    while int(RQ_main.QueueSize()) > 0 and RQ_lock.available(RQ_main.id_name):

        item = literal_eval(RQ_main.QueueGet().decode("utf-8"))
        inputfile = item[0]
        archive = item[1]
        RQ_work.QueueAdd(inputfile)

        subfile = inputfile.replace(PDSinfoDICT[archive]['path'], '')
        # Calculate checksum in chunks of 4096
        f_hash = hashlib.md5()
        with open(inputfile, "rb") as f:
            for chunk in iter(lambda: f.read(4096), b""):
                f_hash.update(chunk)
        filechecksum = f_hash.hexdigest()

        QOBJ = session.query(Files).filter_by(filename=subfile).first()

        runflag = False
        if QOBJ is None or filechecksum != QOBJ.checksum:
            runflag = True

        if runflag or override:
            date = datetime.datetime.now(
                pytz.utc).strftime("%Y-%m-%d %H:%M:%S")
            fileURL = inputfile.replace(archive_base, web_base)

            # If all upc requirements are in 'inputfile,' flag for upc
            upcflag = all(x in inputfile
                          for x in PDSinfoDICT[archive]['upc_reqs'])
            filesize = os.path.getsize(inputfile)

            try:
                # If we found an existing file and want to overwrite the data
                if QOBJ is not None and override:
                    ingest_entry = QOBJ
                # If the file was not found, create a new entry
                else:
                    ingest_entry = Files()
                    ingest_entry.archiveid = PDSinfoDICT[archive]['archiveid']
                    ingest_entry.filename = subfile
                    ingest_entry.entry_date = date
                    ingest_entry.checksum = filechecksum
                    ingest_entry.upc_required = upcflag
                    ingest_entry.validation_required = True
                    ingest_entry.header_only = False
                    ingest_entry.release_date = date
                    ingest_entry.file_url = fileURL
                    ingest_entry.file_size = filesize
                    ingest_entry.di_pass = True
                    ingest_entry.di_date = date

                session.merge(ingest_entry)
                session.flush()

                if upcflag:
                    RQ_upc.QueueAdd((inputfile, ingest_entry.fileid, archive))
                    RQ_thumb.QueueAdd(
                        (inputfile, ingest_entry.fileid, archive))
                    RQ_browse.QueueAdd(
                        (inputfile, ingest_entry.fileid, archive))
                    #RQ_pilotB.QueueAdd((inputfile,ingest_entry.fileid, archive))

                RQ_work.QueueRemove(inputfile)

                index = index + 1

            except Exception as e:
                logger.error("Error During File Insert %s : %s", str(subfile),
                             str(e))

        elif not runflag and not override:
            RQ_work.QueueRemove(inputfile)
            logger.warn(
                "Not running ingest: file %s already present"
                " in database and no override flag supplied", inputfile)

        if index >= 250:
            try:
                session.commit()
                logger.info("Commit 250 files to Database: Success")
                index = 1
            except Exception as e:
                session.rollback()
                logger.warn("Unable to commit to database: %s", str(e))
    else:
        logger.info("No Files Found in Ingest Queue")
        try:
            session.commit()
            logger.info("Commit to Database: Success")
        except Exception as e:
            logger.error("Unable to commit to database: %s", str(e))
            session.rollback()

    # Close connection to database
    session.close()
    engine.dispose()

    if RQ_main.QueueSize() == 0 and RQ_work.QueueSize() == 0:
        logger.info("Process Complete All Queues Empty")
    elif RQ_main.QueueSize() == 0 and RQ_work.QueueSize() != 0:
        logger.warning("Process Done Work Queue NOT Empty Contains %s Files",
                       str(RQ_work.QueueSize()))

    logger.info("Ingest Complete")
Esempio n. 15
0
def main():

    # pdb.set_trace()

    args = Args()
    args.parse_args()
    logging.basicConfig(level=args.loglevel)
    RQ_ingest = RedisQueue('Ingest_ReadyQueue')
    RQ_linking = RedisQueue('LinkQueue')

    # Set up logging

    logger = logging.getLogger(args.archive + '_INGEST')
    logger.setLevel(logging.INFO)
    logFileHandle = logging.FileHandler(pds_log + 'Ingest.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    if args.loglevel == logging.INFO:
        print("Log File: {}Ingest.log".format(pds_log))

    PDSinfoDICT = json.load(open(pds_info, 'r'))
    try:
        archivepath = PDSinfoDICT[args.archive]['path'][:-1]
    except KeyError:
        print("\nArchive '{}' not found in {}\n".format(
            args.archive, pds_info))
        print("The following archives are available:")
        for k in PDSinfoDICT.keys():
            print("\t{}".format(k))
        logging.error("Unable to locate {}".format(args.archive))
        exit()

    if args.volume:
        archivepath = archivepath + '/' + args.volume

    logger.info('Starting Ingest for: %s', archivepath)
    logger.info('Ingest Queue: {}'.format(str(RQ_ingest.id_name)))
    logger.info('Linking Queue: {}'.format(str(RQ_linking.id_name)))

    # Possible bug in RQ?  Can't add to queue in "if fname == voldesc"
    queue_size = RQ_ingest.QueueSize()
    voldescs = []
    for dirpath, dirs, files in os.walk(archivepath):
        for filename in files:
            fname = os.path.join(dirpath, filename)
            if args.search:
                if args.search in fname:
                    try:
                        if os.path.basename(fname) == "voldesc.cat":
                            voldescs.append(fname)
                        if args.ingest:
                            RQ_ingest.QueueAdd((fname, args.archive))
                    except:
                        logger.error('File %s NOT added to Ingest Queue',
                                     fname)
                else:
                    continue
            else:
                try:
                    if os.path.basename(fname) == "voldesc.cat":
                        voldescs.append(fname)
                    if args.ingest:
                        RQ_ingest.QueueAdd((fname, args.archive))
                except:
                    logger.error('File %s NOT added to Ingest Queue', fname)

    n_added = RQ_ingest.QueueSize() - queue_size
    for fpath in voldescs:
        RQ_linking.QueueAdd((fpath, args.archive))
    logger.info('Files added to Ingest Queue: %s', n_added)
Esempio n. 16
0
            dag=dag)

        get_items_operator >> file_lookup_operator >> hash_file_operator >> cmp_checksum_operator
    return dag


def repeat_dag(context, dag_run_obj):
    rq = context['params']['rq']
    if rq.QueueSize() > 0:
        return dag_run_obj


# @TODO find a way to make these separate tasks.  Difficult because they
#  can't be pickled, therefore they can't be returned via a task.
session, _ = db_connect('pdsdi_dev')
rq = RedisQueue('DI_ReadyQueue')

process_operator = SubDagOperator(subdag=process_subdag('di_process',
                                                        'di_checksum',
                                                        session=session,
                                                        archiveID=archiveID,
                                                        n_procs=5,
                                                        rq=rq),
                                  task_id='di_checksum',
                                  dag=dag)

loop_operator = TriggerDagRunOperator(task_id='loop',
                                      provide_context=True,
                                      params={'rq': rq},
                                      trigger_dag_id='di_process',
                                      python_callable=repeat_dag,
Esempio n. 17
0
def main():
    # Connect to database - ignore engine information
    pds_session, pds_engine = db_connect(pds_db)

    # Connect to database - ignore engine information
    session, upc_engine = db_connect(upc_db)

    # ***************** Set up logging *****************
    logger = logging.getLogger('UPC_Process')
    logger.setLevel(logging.INFO)
    logFileHandle = logging.FileHandler(pds_log + 'Process.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    PDSinfoDICT = json.load(open(pds_info, 'r'))

    # Redis Queue Objects
    RQ_main = RedisQueue('UPC_ReadyQueue')
    logger.info("UPC Processing Queue: %s", RQ_main.id_name)
    RQ_lock = RedisLock(lock_obj)
    # If the queue isn't registered, add it and set it to "running"
    RQ_lock.add({RQ_main.id_name: '1'})

    proc_date_tid = get_tid('processdate', session)
    err_type_tid = get_tid('errortype', session)
    err_msg_tid = get_tid('errormessage', session)
    err_flag_tid = get_tid('error', session)
    isis_footprint_tid = get_tid('isisfootprint', session)
    isis_centroid_tid = get_tid('isiscentroid', session)
    start_time_tid = get_tid('starttime', session)
    stop_time_tid = get_tid('stoptime', session)
    checksum_tid = get_tid('checksum', session)

    # while there are items in the redis queue
    while int(RQ_main.QueueSize()) > 0 and RQ_lock.available(RQ_main.id_name):
        # get a file from the queue
        item = literal_eval(RQ_main.QueueGet().decode("utf-8"))
        inputfile = item[0]
        fid = item[1]
        archive = item[2]
        #inputfile = (RQ_main.QueueGet()).decode('utf-8')
        if os.path.isfile(inputfile):
            pass
        else:
            print("{} is not a file\n".format(inputfile))
        if os.path.isfile(inputfile):
            logger.info('Starting Process: %s', inputfile)

            # @TODO refactor this logic.  We're using an object to find a path, returning it,
            #  then passing it back to the object so that the object can use it.
            recipeOBJ = Recipe()
            recipe_json = recipeOBJ.getRecipeJSON(archive)
            #recipe_json = recipeOBJ.getRecipeJSON(getMission(str(inputfile)))
            recipeOBJ.AddJsonFile(recipe_json, 'upc')

            infile = workarea + os.path.splitext(
                str(os.path.basename(inputfile)))[0] + '.UPCinput.cub'
            outfile = workarea + os.path.splitext(
                str(os.path.basename(inputfile)))[0] + '.UPCoutput.cub'
            caminfoOUT = workarea + os.path.splitext(
                str(os.path.basename(inputfile)))[0] + '_caminfo.pvl'
            EDRsource = inputfile.replace(
                '/pds_san/PDS_Archive/',
                'https://pdsimage.wr.ugs.gov/Missions/')

            status = 'success'
            # Iterate through each process listed in the recipe
            for item in recipeOBJ.getProcesses():
                # If any of the processes failed, discontinue processing
                if status.lower() == 'error':
                    break
                elif status.lower() == 'success':
                    processOBJ = Process()
                    processOBJ.ProcessFromRecipe(item, recipeOBJ.getRecipe())
                    # Handle processing based on string description.
                    if '2isis' in item:
                        processOBJ.updateParameter('from_', inputfile)
                        processOBJ.updateParameter('to', outfile)
                    elif item == 'thmproc':
                        processOBJ.updateParameter('from_', inputfile)
                        processOBJ.updateParameter('to', outfile)
                        thmproc_odd = str(workarea) + str(
                            os.path.splitext(os.path.basename(inputfile))
                            [0]) + '.UPCoutput.raw.odd.cub'
                        thmproc_even = str(workarea) + str(
                            os.path.splitext(os.path.basename(inputfile))
                            [0]) + '.UPCoutput.raw.even.cub'
                    elif item == 'handmos':
                        processOBJ.updateParameter('from_', thmproc_even)
                        processOBJ.updateParameter('mosaic', thmproc_odd)
                    elif item == 'spiceinit':
                        processOBJ.updateParameter('from_', infile)
                    elif item == 'cubeatt':
                        band_infile = infile + '+' + str(1)
                        processOBJ.updateParameter('from_', band_infile)
                        processOBJ.updateParameter('to', outfile)
                    elif item == 'footprintinit':
                        processOBJ.updateParameter('from_', infile)
                    elif item == 'caminfo':
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', caminfoOUT)
                    else:
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', outfile)

                    pwd = os.getcwd()
                    # iterate through functions listed in process obj
                    for k, v in processOBJ.getProcess().items():
                        # load a function into func
                        func = getattr(isis, k)
                        try:
                            os.chdir(workarea)
                            # execute function
                            func(**v)
                            os.chdir(pwd)
                            if item == 'handmos':
                                if os.path.isfile(thmproc_odd):
                                    os.rename(thmproc_odd, infile)
                            else:
                                if os.path.isfile(outfile):
                                    os.rename(outfile, infile)
                            status = 'success'
                            if '2isis' in item:
                                label = pvl.load(infile)
                                infile_bandlist = label['IsisCube']['BandBin'][
                                    PDSinfoDICT[archive]['bandbinQuery']]
                                infile_centerlist = label['IsisCube'][
                                    'BandBin']['Center']
                            elif item == 'thmproc':
                                pass
                            elif item == 'handmos':
                                label = pvl.load(infile)
                                infile_bandlist = label['IsisCube']['BandBin'][
                                    PDSinfoDICT[archive]['bandbinQuery']]
                                infile_centerlist = label['IsisCube'][
                                    'BandBin']['Center']

                        except ProcessError as e:
                            print(e)
                            status = 'error'
                            processError = item

            # keyword definitions
            keywordsOBJ = None
            if status.lower() == 'success':
                try:
                    keywordsOBJ = UPCkeywords(caminfoOUT)
                except:
                    with open(caminfoOUT, 'r') as f:
                        filedata = f.read()

                    filedata = filedata.replace(';', '-').replace('&', '-')
                    filedata = re.sub(r'\-\s+', r'', filedata, flags=re.M)

                    with open(caminfoOUT, 'w') as f:
                        f.write(filedata)

                    keywordsOBJ = UPCkeywords(caminfoOUT)
                target_Qobj = session.query(upc_models.Targets).filter(
                    upc_models.Targets.targetname == keywordsOBJ.getKeyword(
                        'TargetName').upper()).first()

                instrument_Qobj = session.query(upc_models.Instruments).filter(
                    upc_models.Instruments.instrument ==
                    keywordsOBJ.getKeyword('InstrumentId')).first()

                if session.query(upc_models.DataFiles).filter(
                        upc_models.DataFiles.isisid == keywordsOBJ.getKeyword(
                            'IsisId')).first() is None:

                    test_input = upc_models.DataFiles(
                        isisid=keywordsOBJ.getKeyword('IsisId'),
                        productid=keywordsOBJ.getKeyword('ProductId'),
                        edr_source=EDRsource,
                        edr_detached_label='',
                        instrumentid=instrument_Qobj.instrumentid,
                        targetid=target_Qobj.targetid)

                    session.merge(test_input)
                    session.commit()

                Qobj = session.query(upc_models.DataFiles).filter(
                    upc_models.DataFiles.isisid == keywordsOBJ.getKeyword(
                        'IsisId')).first()

                UPCid = Qobj.upcid
                print(UPCid)
                # block to add band information to meta_bands
                if isinstance(infile_bandlist, list):
                    index = 0
                    while index < len(infile_bandlist):
                        B_DBinput = upc_models.MetaBands(
                            upcid=UPCid,
                            filter=str(infile_bandlist[index]),
                            centerwave=infile_centerlist[index])
                        session.merge(B_DBinput)
                        index = index + 1
                else:
                    try:
                        # If infile_centerlist is in "Units" format, grab the value
                        f_centerlist = float(infile_centerlist[0])
                    except TypeError:
                        f_centerlist = float(infile_centerlist)
                    B_DBinput = upc_models.MetaBands(upcid=UPCid,
                                                     filter=infile_bandlist,
                                                     centerwave=f_centerlist)
                    session.merge(B_DBinput)
                session.commit()

                # Block to add common keywords
                testjson = json.load(open(keyword_def, 'r'))
                for element_1 in testjson['instrument']['COMMON']:
                    keyvalue = ""
                    keytype = testjson['instrument']['COMMON'][element_1][
                        'type']
                    keyword = testjson['instrument']['COMMON'][element_1][
                        'keyword']
                    keyword_Qobj = session.query(upc_models.Keywords).filter(
                        and_(upc_models.Keywords.typename == element_1,
                             upc_models.Keywords.instrumentid == 1)).first()

                    if keyword_Qobj is None:
                        continue
                    else:
                        keyvalue = keywordsOBJ.getKeyword(keyword)
                    if keyvalue is None:
                        continue
                    keyvalue = db2py(keytype, keyvalue)
                    try:
                        DBinput = upc_models.create_table(
                            keytype,
                            upcid=UPCid,
                            typeid=keyword_Qobj.typeid,
                            value=keyvalue)
                    except Exception as e:
                        logger.warn("Unable to enter %s into table\n\n%s",
                                    keytype, e)
                        continue
                    session.merge(DBinput)
                    try:
                        session.flush()
                    except:
                        logger.warn("Unable to flush database connection")
                session.commit()

                for element_1 in testjson['instrument'][archive]:
                    keyvalue = ""
                    keytype = testjson['instrument'][archive][element_1][
                        'type']
                    keyword = testjson['instrument'][archive][element_1][
                        'keyword']
                    keyword_Qobj = session.query(upc_models.Keywords).filter(
                        and_(
                            upc_models.Keywords.typename == element_1,
                            upc_models.Keywords.instrumentid.in_(
                                (1, instrument_Qobj.instrumentid)))).first()

                    if keyword_Qobj is None:
                        continue
                    else:
                        keyvalue = keywordsOBJ.getKeyword(keyword)
                    if keyvalue is None:
                        logger.debug("Keyword %s not found", keyword)
                        continue
                    keyvalue = db2py(keytype, keyvalue)
                    try:
                        DBinput = upc_models.create_table(
                            keytype,
                            upcid=UPCid,
                            typeid=keyword_Qobj.typeid,
                            value=keyvalue)
                    except Exception as e:
                        logger.warn("Unable to enter %s into database\n\n%s",
                                    keytype, e)
                        continue
                    session.merge(DBinput)
                    try:
                        session.flush()
                    except:
                        logger.warn("Unable to flush database connection")
                session.commit()

                # geometry stuff
                G_centroid = 'point ({} {})'.format(
                    str(keywordsOBJ.getKeyword('CentroidLongitude')),
                    str(keywordsOBJ.getKeyword('CentroidLatitude')))

                G_keyword_Qobj = session.query(
                    upc_models.Keywords.typeid).filter(
                        upc_models.Keywords.typename ==
                        'isiscentroid').first()
                G_footprint_Qobj = session.query(
                    upc_models.Keywords.typeid).filter(
                        upc_models.Keywords.typename ==
                        'isisfootprint').first()
                G_footprint = keywordsOBJ.getKeyword('GisFootprint')
                G_DBinput = upc_models.MetaGeometry(upcid=UPCid,
                                                    typeid=G_keyword_Qobj,
                                                    value=G_centroid)
                session.merge(G_DBinput)
                G_DBinput = upc_models.MetaGeometry(upcid=UPCid,
                                                    typeid=G_footprint_Qobj,
                                                    value=G_footprint)
                session.merge(G_DBinput)
                session.flush()
                session.commit()

                f_hash = hashlib.md5()
                with open(inputfile, "rb") as f:
                    for chunk in iter(lambda: f.read(4096), b""):
                        f_hash.update(chunk)
                checksum = f_hash.hexdigest()

                DBinput = upc_models.MetaString(upcid=UPCid,
                                                typeid=checksum_tid,
                                                value=checksum)
                session.merge(DBinput)
                DBinput = upc_models.MetaBoolean(upcid=UPCid,
                                                 typeid=err_flag_tid,
                                                 value=False)
                session.merge(DBinput)
                session.commit()
                AddProcessDB(pds_session, fid, True)
                os.remove(infile)
                os.remove(caminfoOUT)

            elif status.lower() == 'error':
                try:
                    label = pvl.load(infile)
                except Exception as e:
                    logger.info('%s', e)
                    continue
                date = datetime.datetime.now(
                    pytz.utc).strftime("%Y-%m-%d %H:%M:%S")

                if '2isis' in processError or processError == 'thmproc':
                    if session.query(upc_models.DataFiles).filter(
                            upc_models.DataFiles.edr_source ==
                            EDRsource.decode("utf-8")).first() is None:

                        target_Qobj = session.query(upc_models.Targets).filter(
                            upc_models.Targets.targetname == str(
                                label['IsisCube']['Instrument']
                                ['TargetName']).upper()).first()

                        instrument_Qobj = session.query(
                            upc_models.Instruments).filter(
                                upc_models.Instruments.instrument == str(
                                    label['IsisCube']['Instrument']
                                    ['InstrumentId'])).first()

                        error1_input = upc_models.DataFiles(
                            isisid='1', edr_source=EDRsource)
                        session.merge(error1_input)
                        session.commit()

                    EQ1obj = session.query(upc_models.DataFiles).filter(
                        upc_models.DataFiles.edr_source == EDRsource).first()
                    UPCid = EQ1obj.upcid

                    errorMSG = 'Error running {} on file {}'.format(
                        processError, inputfile)

                    DBinput = MetaTime(upcid=UPCid,
                                       typeid=proc_date_tid,
                                       value=date)
                    session.merge(DBinput)

                    DBinput = MetaString(upcid=UPCid,
                                         typeid=err_type_tid,
                                         value=processError)
                    session.merge(DBinput)

                    DBinput = MetaString(upcid=UPCid,
                                         typeid=err_msg_tid,
                                         value=errorMSG)
                    session.merge(DBinput)

                    DBinput = MetaBoolean(upcid=UPCid,
                                          typeid=err_flag_tid,
                                          value=True)
                    session.merge(DBinput)

                    DBinput = MetaGeometry(upcid=UPCid,
                                           typeid=isis_footprint_tid,
                                           value='POINT(361 0)')
                    session.merge(DBinput)

                    DBinput = MetaGeometry(upcid=UPCid,
                                           typeid=isis_centroid_tid,
                                           value='POINT(361 0)')
                    session.merge(DBinput)

                    session.commit()
                else:
                    try:
                        label = pvl.load(infile)
                    except Exception as e:
                        logger.warn('%s', e)
                        continue

                    isisSerial = getISISid(infile)

                    if session.query(upc_models.DataFiles).filter(
                            upc_models.DataFiles.isisid ==
                            isisSerial).first() is None:
                        target_Qobj = session.query(upc_models.Targets).filter(
                            upc_models.Targets.targetname == str(
                                label['IsisCube']['Instrument']
                                ['TargetName']).upper()).first()
                        instrument_Qobj = session.query(
                            upc_models.Instruments).filter(
                                upc_models.Instruments.instrument == str(
                                    label['IsisCube']['Instrument']
                                    ['InstrumentId'])).first()

                        if target_Qobj is None or instrument_Qobj is None:
                            continue

                        error2_input = upc_models.DataFiles(
                            isisid=isisSerial,
                            productid=label['IsisCube']['Archive']
                            ['ProductId'],
                            edr_source=EDRsource,
                            instrumentid=instrument_Qobj.instrumentid,
                            targetid=target_Qobj.targetid)
                    session.merge(error2_input)
                    session.commit()

                    try:
                        EQ2obj = session.query(upc_models.DataFiles).filter(
                            upc_models.DataFiles.isisid == isisSerial).first()
                        UPCid = EQ2obj.upcid
                        errorMSG = 'Error running {} on file {}'.format(
                            processError, inputfile)

                        DBinput = MetaTime(upcid=UPCid,
                                           typeid=proc_date_tid,
                                           value=date)
                        session.merge(DBinput)

                        DBinput = MetaString(upcid=UPCid,
                                             typeid=err_type_tid,
                                             value=processError)
                        session.merge(DBinput)

                        DBinput = MetaString(upcid=UPCid,
                                             typeid=err_msg_tid,
                                             value=errorMSG)
                        session.merge(DBinput)

                        DBinput = MetaBoolean(upcid=UPCid,
                                              typeid=err_flag_tid,
                                              value=True)
                        session.merge(DBinput)

                        DBinput = MetaGeometry(upcid=UPCid,
                                               typeid=isis_footprint_tid,
                                               value='POINT(361 0)')
                        session.merge(DBinput)

                        DBinput = MetaGeometry(upcid=UPCid,
                                               typeid=isis_centroid_tid,
                                               value='POINT(361 0)')
                        session.merge(DBinput)
                    except:
                        pass

                    try:
                        v = label['IsisCube']['Instrument']['StartTime']
                    except KeyError:
                        v = None
                    except:
                        continue

                    try:
                        DBinput = MetaTime(upcid=UPCid,
                                           typeid=start_time_tid,
                                           value=v)
                        session.merge(DBinput)
                    except:
                        continue

                    try:
                        v = label['IsisCube']['Instrument']['StopTime']
                    except KeyError:
                        v = None
                    DBinput = MetaTime(upcid=UPCid,
                                       typeid=stop_time_tid,
                                       value=v)
                    session.merge(DBinput)

                    session.commit()

                AddProcessDB(pds_session, fid, False)
                os.remove(infile)

    # Disconnect from db sessions
    pds_session.close()
    session.close()
    # Disconnect from the engines
    pds_engine.dispose()
    upc_engine.dispose()
    logger.info("UPC processing exited successfully")
Esempio n. 18
0
def main():
    args = Args()
    args.parse_args()
    key = args.key
    namespace = args.namespace

    if namespace is None:
        namespace is default_namespace

    workarea = scratch + args.key + '/'
    RQ_file = RedisQueue(key + '_FileQueue', namespace)
    RQ_work = RedisQueue(key + '_WorkQueue', namespace)
    RQ_zip = RedisQueue(key + '_ZIP', namespace)
    RQ_loggy = RedisQueue(key + '_loggy', namespace)
    RQ_final = RedisQueue('FinalQueue', namespace)
    RHash = RedisHash(key + '_info')
    RHerror = RedisHash(key + '_error')
    RQ_lock = RedisLock(lock_obj)
    RQ_lock.add({'MAP':'1'})

    if int(RQ_file.QueueSize()) == 0 and RQ_lock.available('MAP'):
        print("No Files Found in Redis Queue")
    else:
        jobFile = RQ_file.Qfile2Qwork(
            RQ_file.getQueueName(), RQ_work.getQueueName()).decode('utf-8')

        # Setup system logging
        basename = os.path.splitext(os.path.basename(jobFile))[0]
        logger = logging.getLogger(key + '.' + basename)
        logger.setLevel(logging.INFO)

        logFileHandle = logging.FileHandler(pds_log + '/Service.log')

        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
        logFileHandle.setFormatter(formatter)
        logger.addHandler(logFileHandle)

        logger.info('Starting MAP Processing')

        loggyOBJ = Loggy(basename)

        # File Naming
        infile = workarea + \
            os.path.splitext(os.path.basename(jobFile))[0] + '.input.cub'
        outfile = workarea + \
            os.path.splitext(os.path.basename(jobFile))[0] + '.output.cub'

        # Recipe Stuff

        RQ_recipe = RedisQueue(key + '_recipe')

        status = 'success'

        for element in RQ_recipe.RecipeGet():

            if status == 'error':
                break
            elif status == 'success':
                processOBJ = Process()
                process = processOBJ.JSON2Process(element)
                if 'gdal_translate' not in processOBJ.getProcessName():
                    if 'cubeatt-band' in processOBJ.getProcessName():
                        if '+' in jobFile:
                            processOBJ.updateParameter('from_', jobFile)
                            processOBJ.updateParameter('to', outfile)
                            processOBJ.ChangeProcess('cubeatt')
                        else:
                            continue

                    elif 'map2map' in processOBJ.getProcessName():
                        if '+' in jobFile:
                            processOBJ.updateParameter('from_', infile)
                        else:
                            processOBJ.updateParameter('from_', jobFile)
                        processOBJ.updateParameter('to', outfile)

                    elif 'cubeatt-bit' in processOBJ.getProcessName():
                        if RHash.OutBit().decode('utf-8') == 'unsignedbyte':
                            temp_outfile = outfile + '+lsb+tile+attached+unsignedbyte+1:254'
                        elif RHash.OutBit().decode('utf-8') == 'signedword':
                            temp_outfile = outfile + '+lsb+tile+attached+signedword+-32765:32765'
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', temp_outfile)
                        processOBJ.ChangeProcess('cubeatt')

                    elif 'isis2pds' in processOBJ.getProcessName():
                        # finalfile = infile.replace('.input.cub', '_final.img')
                        finalfile = workarea + RHash.getMAPname().decode('utf-8') + '.img'
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', finalfile)

                    else:
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', outfile)

                    print(processOBJ.getProcess())

                    for k, v in processOBJ.getProcess().items():
                        func = getattr(isis, k)
                        subloggyOBJ = SubLoggy(k)
                        try:
                            func(**v)
                            logger.info('Process %s :: Success', k)
                            subloggyOBJ.setStatus('SUCCESS')
                            subloggyOBJ.setCommand(processOBJ.LogCommandline())
                            subloggyOBJ.setHelpLink(processOBJ.LogHelpLink())
                            loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())

                            if os.path.isfile(outfile):
                                os.rename(outfile, infile)
                            status = 'success'

                        except ProcessError as e:
                            logger.error('Process %s :: Error', k)
                            logger.error(e)
                            status = 'error'
                            eSTR = 'Error Executing ' + k + \
                                ' Standard Error: ' + str(e)
                            RHerror.addError(os.path.splitext(
                                os.path.basename(jobFile))[0], eSTR)
                            subloggyOBJ.setStatus('ERROR')
                            subloggyOBJ.setCommand(processOBJ.LogCommandline())
                            subloggyOBJ.setHelpLink(processOBJ.LogHelpLink())
                            subloggyOBJ.errorOut(eSTR)
                            loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())

                else:

                    GDALcmd = ""
                    for process, v, in processOBJ.getProcess().items():
                        subloggyOBJ = SubLoggy(process)
                        GDALcmd += process
                        for key, value in v.items():
                            GDALcmd += ' ' + key + ' ' + value

                    img_format = RHash.Format().decode('utf-8')

                    if img_format == 'GeoTiff-BigTiff':
                        fileext = 'tif'
                    elif img_format == 'GeoJPEG-2000':
                        fileext = 'jp2'
                    elif img_format == 'JPEG':
                        fileext = 'jpg'
                    elif img_format == 'PNG':
                        fileext = 'png'
                    elif img_format == 'GIF':
                        fileext = 'gif'

                    logGDALcmd = GDALcmd + ' ' + basename + '.input.cub ' + RHash.getMAPname().decode('utf-8') + '.' + fileext
                    finalfile = workarea + RHash.getMAPname().decode('utf-8') + '.' + fileext
                    GDALcmd += ' ' + infile + ' ' + finalfile
                    print(GDALcmd)
                    try:
                        subprocess.call(GDALcmd, shell=True)
                        logger.info('Process GDAL translate :: Success')
                        status = 'success'
                        subloggyOBJ.setStatus('SUCCESS')
                        subloggyOBJ.setCommand(logGDALcmd)
                        subloggyOBJ.setHelpLink(
                            'www.gdal.org/gdal_translate.html')
                        loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())
                        os.remove(infile)
                    except OSError as e:
                        logger.error('Process GDAL translate :: Error')
                        logger.error(e)
                        status = 'error'
                        RHerror.addError(os.path.splitext(os.path.basename(jobFile))[0],
                                         'Process GDAL translate :: Error')
                        subloggyOBJ.setStatus('ERROR')
                        subloggyOBJ.setCommand(logGDALcmd)
                        subloggyOBJ.setHelpLink(
                            'http://www.gdal.org/gdal_translate.html')
                        subloggyOBJ.errorOut(e)
                        loggyOBJ.AddProcess(subloggyOBJ.getSLprocess())

        if status == 'success':
            if RHash.Format().decode('utf-8') == 'ISIS3':
                finalfile = workarea + RHash.getMAPname().decode('utf-8') + '.cub'
                shutil.move(infile, finalfile)
            if RHash.getStatus() != b'ERROR':
                RHash.Status('SUCCESS')

            try:
                RQ_zip.QueueAdd(finalfile)
                logger.info('File Added to ZIP Queue')
            except:
                logger.error('File NOT Added to ZIP Queue')

            try:
                RQ_loggy.QueueAdd(loggyOBJ.Loggy2json())
                logger.info('JSON Added to Loggy Queue')
            except:
                logger.error('JSON NOT Added to Loggy Queue')

            RQ_work.QueueRemove(jobFile)
        elif status == 'error':
            RHash.Status('ERROR')
            if os.path.isfile(infile):
                os.remove(infile)

        if RQ_file.QueueSize() == 0 and RQ_work.QueueSize() == 0:
            try:
                RQ_final.QueueAdd(key)
                logger.info('Key %s Added to Final Queue: Success', key)
                logger.info('Job Complete')
            except:
                logger.error('Key NOT Added to Final Queue')
        else:
            logger.warning('Queues Not Empty: filequeue = %s  work queue = %s', str(
                RQ_file.QueueSize()), str(RQ_work.QueueSize()))
Esempio n. 19
0
def main():
    # pdb.set_trace()

    PDSinfoDICT = json.load(open(pds_info, 'r'))

    # ********* Set up logging *************
    logger = logging.getLogger('DI_Process')
    logger.setLevel(logging.INFO)
    #logFileHandle = logging.FileHandler('/usgs/cdev/PDS/logs/DI.log')
    logFileHandle = logging.FileHandler(pds_log + 'DI.log')
    #logFileHandle = logging.FileHandler('/home/arsanders/PDS-Pipelines/logs/DI.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info('Starting DI Process')

    try:
        # ignores engine information
        session, _ = db_connect(pds_db)
        logger.info('DataBase Connecton: Success')
    except:
        logger.error('DataBase Connection: Error')

    RQ = RedisQueue('DI_ReadyQueue')
    index = 0

    while int(RQ.QueueSize()) > 0:
        item = literal_eval(RQ.QueueGet().decode("utf-8"))
        inputfile = item[0]
        archive = item[1]
        try:
            Qelement = session.query(Files).filter(
                Files.filename == inputfile).one()
        except:
            logger.error('Query for File: %s', inputfile)

        archive_path = PDSinfoDICT[archive]['path']

        cpfile = archive_path + Qelement.filename
        if os.path.isfile(cpfile):
            f_hash = hashlib.md5()
            with open(cpfile, "rb") as f:
                for chunk in iter(lambda: f.read(4096), b""):
                    f_hash.update(chunk)
            checksum = f_hash.hexdigest()

            if checksum == Qelement.checksum:
                Qelement.di_pass = True
            else:
                Qelement.di_pass = False
            Qelement.di_date = datetime.datetime.now(
                pytz.utc).strftime("%Y-%m-%d %H:%M:%S")
            session.flush()
            index = index + 1
            if index > 50:
                session.commit()
                logger.info('Session Commit for 50 Records: Success')
                index = 0
        else:
            logger.error('File %s Not Found', cpfile)
    try:
        session.commit()
        logger.info("End Commit DI process to Database: Success")
        index = 1
    except:
        session.rollback()
def main():
    #    pdb.set_trace()

    archiveID = {16: '/pds_san/PDS_Archive/Mars_Reconnaissance_Orbiter/CTX/',
                 74: '/pds_san/PDS_Archive/Lunar_Reconnaissance_Orbiter/LROC/EDR/',
                 124: '/pds_san/PDS_Archive/Mars_Reconnaissance_Orbiter/HiRISE/',
                 101: '/pds_san/PDS_Archive/Apollo/Rock_Sample_Images/'
                 }

    args = Args()
    args.parse_args()

    logger = logging.getLogger('DI_Process')
    level = argparse.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'DI.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info('Starting DI Process')

    try:
        # Throws away engine information
        session, _ = db_connect(pds_db)
        logger.info('DataBase Connecton: Success')
    except:
        logger.error('DataBase Connection: Error')
        return 1
    RQ = RedisQueue('ChecksumUpdate_Queue')
    index = 0
    count = 0

    while int(RQ.QueueSize()) > 0:
        inputfile = RQ.QueueGet()
        Qelement = session.query(Files).filter(
            Files.filename == inputfile).one()
        cpfile = archiveID[Qelement.archiveid] + Qelement.filename
        if os.path.isfile(cpfile):
            # Calculate checksum in chunks of 4096
            f_hash = hashlib.md5()
            with open(cpfile, "rb") as f:
                for chunk in iter(lambda: f.read(4096), b""):
                    f_hash.update(chunk)
            checksum = f_hash.hexdigest()

            if checksum != Qelement.checksum:
                Qelement.checksum = checksum
                Qelement.di_pass = '******'
                Qelement.di_date = datetime.datetime.now(
                    pytz.utc).strftime("%Y-%m-%d %H:%M:%S")
                session.flush()
                index = index + 1
                count = count + 1
                logger.info('Update Checksum %s: Success', inputfile)

            if count > 25:
                session.commit()
                logger.info('Session Commit for 25 Records: Success')
                count = 0

        else:
            logger.error('File %s Not Found', cpfile)

    try:
        session.commit()
    except:
        session.rollback()
        logger.error('Error during commit')
    logger.info("End Commit DI process to Database: Success")
    logger.info('Checksum for %s Files Updated', str(index))
Esempio n. 21
0
def main():
    args = Args()
    args.parse_args()

    RQ = RedisQueue('DI_ReadyQueue')

    PDSinfoDICT = json.load(open(pds_info, 'r'))
    try:
        archiveID = PDSinfoDICT[args.archive]['archiveid']
    except KeyError:
        print("\nArchive '{}' not found in {}\n".format(
            args.archive, pds_info))
        print("The following archives are available:")
        for k in PDSinfoDICT.keys():
            print("\t{}".format(k))
        exit()

    logger = logging.getLogger('DI_Queueing.' + args.archive)
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'DI.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info("DI Queue: %s", RQ.id_name)

    logger.info('Starting %s DI Queueing', args.archive)
    if args.volume:
        logger.info('Queueing %s Volume', args.volume)

    try:
        session, _ = db_connect(pds_db)
        logger.info('DataBase Connecton: Success')
    except:
        logger.error('DataBase Connection: Error')
        return 1

    td = (datetime.datetime.now(pytz.utc) -
          datetime.timedelta(days=30)).strftime("%Y-%m-%d %H:%M:%S")
    testing_date = datetime.datetime.strptime(str(td), "%Y-%m-%d %H:%M:%S")

    if args.volume:
        volstr = '%' + args.volume + '%'
        testQ = session.query(Files).filter(
            Files.archiveid == archiveID, Files.filename.like(volstr)).filter(
                or_(
                    cast(Files.di_date, Date) < testing_date,
                    cast(Files.di_date, Date) is None))
    else:
        testQ = session.query(Files).filter(
            Files.archiveid == archiveID).filter(
                or_(
                    cast(Files.di_date, Date) < testing_date,
                    cast(Files.di_date, Date) is None))

    addcount = 0
    for element in testQ:
        try:
            RQ.QueueAdd((element.filename, args.archive))
            addcount = addcount + 1
        except:
            logger.warn('File %s Not Added to DI_ReadyQueue', element.filename)

    logger.info('Files Added to Queue %s', addcount)
    logger.info('DI Queueing Complete')
Esempio n. 22
0
def main():
    PDSinfoDICT = json.load(open(pds_info, 'r'))
    args = Args()
    args.parse_args()

    # Set up logging
    logger = logging.getLogger('DI_Process')
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log + 'DI.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info('Starting DI Process')

    try:
        session, engine = db_connect(pds_db)
        logger.info('DataBase Connecton: Success')
    except Exception as e:
        logger.error('DataBase Connection Error: %s', str(e))
        return 1

    RQ = RedisQueue('DI_ReadyQueue')
    RQ_lock = RedisLock(lock_obj)
    RQ_lock.add({RQ.id_name: '1'})
    index = 0

    logger.info("DI Queue: %s", RQ.id_name)

    while int(RQ.QueueSize()) > 0 and RQ_lock.available(RQ.id_name):
        item = literal_eval(RQ.QueueGet().decode("utf-8"))
        inputfile = item[0]
        archive = item[1]
        try:
            Qelement = session.query(Files).filter(
                Files.filename == inputfile).one()
        except Exception as e:
            logger.warn('Filename query failed for inputfile %s: %s',
                        inputfile, str(e))
            continue

        archive_path = PDSinfoDICT[archive]['path']

        cpfile = archive_path + Qelement.filename
        if os.path.isfile(cpfile):
            f_hash = hashlib.md5()
            with open(cpfile, "rb") as f:
                for chunk in iter(lambda: f.read(4096), b""):
                    f_hash.update(chunk)
            checksum = f_hash.hexdigest()

            Qelement.di_pass = checksum == Qelement.checksum

            Qelement.di_date = datetime.datetime.now(
                pytz.utc).strftime("%Y-%m-%d %H:%M:%S")
            session.flush()
            index = index + 1
            if index > 50:
                session.commit()
                logger.info('Session Commit for 50 Records: Success')
                index = 0
        else:
            logger.warn('File %s Not Found', cpfile)
    try:
        session.commit()
        logger.info("End Commit DI process to Database: Success")
        index = 1
    except Exception as e:
        logger.warn("Unable to commit changes to database\n\n%s", e)
        session.rollback()

    # Close connection to database
    session.close()
    engine.dispose()
Esempio n. 23
0
def main():

    args = Args()
    args.parse_args()
    FKey = sys.argv[-1]

#***************** Setup Logging **************
    logger = logging.getLogger('ServiceFinal.' + FKey)
    level = logging.getLevelName(args.log_level)
    logger.setLevel(level)
    logFileHandle = logging.FileHandler(pds_log+'Service.log')

    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    logger.info('Starting Final Process')
#************Set up REDIS Queues ****************
    zipQueue = RedisQueue(FKey + '_ZIP')
    loggyQueue = RedisQueue(FKey + '_loggy')
    infoHash = RedisHash(FKey + '_info')
    recipeQueue = RedisQueue(FKey + '_recipe')
    errorHash = RedisHash(FKey + '_error')

    DBQO = PDS_DBquery('JOBS')

    if errorHash.HashCount() > 0:
        root = ET.Element('errors')

        test = errorHash.getKeys()
        for key in test:
            sub = ET.Element('error')
            root.append(sub)

            field1 = ET.SubElement(sub, 'file')
            field1.text = key

            Eval = errorHash.getError(key)

            field2 = ET.SubElement(sub, 'message')
            field2.text = Eval

        tree = ET.ElementTree(root)

#        testfile = 'test.xml'
#        with open(testfile, "w") as fh:

        fh = BytesIO()
        tree.write(fh, encoding='utf-8', xml_declaration=True)
        testval = DBQO.addErrors(FKey, fh.getvalue())
        if testval == 'Success':
            logger.info('Error XML add to JOBS DB')
        elif testval == 'Error':
            logger.error('Addin Error XML to JOBS DB: Error')
        print(fh.getvalue())

    #Fdir = '/pds_san/PDS_Services/' + infoHash.Service() + '/' + FKey
    Fdir = pow_map2_base + infoHash.Service() + '/' + FKey
#    Fdir = '/scratch/bsucharski/PDS_service/' + FKey
    #Wpath = '/scratch/pds_services/' + FKey
    Wpath = scratch + FKey
#********* Make final directory ************
    if not os.path.exists(Fdir):
        try:
            os.makedirs(Fdir)
            logger.info('Final Location Success: %s', Fdir)
        except:
            logger.error('Error Making Final Directory')

#********** Block to build job log file **************

    outputLOG = Wpath + "/" + FKey + '.log'
    logOBJ = open(outputLOG, "w")

    logOBJ.write("       U.S. Geological Survey Cloud Processing Services\n")
    logOBJ.write("                http://astrocloud.wr.usgs.gov\n\n")

    if infoHash.Service() == 'POW':
        logOBJ.write("                 Processing On the Web(POW)\n\n")

    logOBJ.write("       Processing Provided by ASTROGEOLOGY USGS Flagstaff\n")
    logOBJ.write("              Contact Information: [email protected]\n\n")
    logOBJ.write(
        "____________________________________________________________________\n\n")

    logOBJ.write("JOB INFORMATION\n\n")
    logOBJ.write("     SERVICE:         " + infoHash.Service() + "\n")
    logOBJ.write("     JOB KEY:         " + FKey + "\n")
    logOBJ.write("     PROCESSING DATE: " +
                 datetime.datetime.now().strftime("%Y-%m-%d %H:%M") + "\n")

    isisV = subprocess.check_output(['ls', '-la', '/usgs/pkgs/isis3'])
    isisA = isisV.split('>')
    logOBJ.write("     ISIS VERSION:   " + isisA[-1])
    if infoHash.getStatus() == 'ERROR':
        logOBJ.write("     JOB STATUS:      " +
                     infoHash.getStatus() + " See Details Below\n")
    else:
        logOBJ.write("     JOB STATUS:      " + infoHash.getStatus() + "\n")
    logOBJ.write("     FILE COUNT:      " + infoHash.getFileCount() + "\n\n")
    logOBJ.write(
        "_____________________________________________________________________\n\n")

    logOBJ.write("PROCESSING INFORMATION\n\n")
    for element in loggyQueue.ListGet():
        procDICT = json.loads(element, object_pairs_hook=OrderedDict)
        for infile in procDICT:
            logOBJ.write("     IMAGE: " + infile + "\n")
            for proc, _ in procDICT[infile].items():
                logOBJ.write("          PROCESS:  " + str(proc) + "\n")
                for k, val in procDICT[infile][proc].items():
                    if k == 'status':
                        logOBJ.write(
                            "               STATUS:     " + val + "\n")
                    elif k == 'command':
                        logOBJ.write(
                            "               COMMAND:    " + val + "\n")
                    elif k == 'helplink':
                        logOBJ.write(
                            "               HELP LINK:  " + val + "\n\n")
                    elif k == 'error':
                        logOBJ.write(
                            "               ERROR:      " + val + "\n\n")

    logOBJ.write("END-PROCESSING\n")
    logOBJ.close()

#******** Block for to copy and zip files to final directory ******
    Zfile = Wpath + '/' + FKey + '.zip'
    logger.info('Making Zip File %s', Zfile)

# log file stuff
    try:
        Lfile = FKey + '.log'
        Zcmd = 'zip -j ' + Zfile + " -q " + outputLOG
        process = subprocess.Popen(
            Zcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
        (stdout, stderr) = process.communicate()
#        zOBJ.write(outputLOG, arcname=Lfile)
        logger.info('Log file %s Added to Zip File: Success', Lfile)
        logger.info('zip stdout: ' + stdout)
        logger.info('zip stderr: ' + stderr)
    except:
        logger.error('Log File %s NOT Added to Zip File', Lfile)

    try:
        shutil.copyfile(outputLOG, Fdir + "/" + Lfile)
        logger.info('Copied Log File %s to Final Area: Success', Lfile)
        os.remove(outputLOG)
    except IOError as e:
        logger.error('Log File %s NOT COPIED to Final Area', Lfile)
        logger.error(e)

# file stuff
    for Lelement in zipQueue.ListGet():
        Pfile = os.path.basename(Lelement)
#        auxfile = os.path.basename(Lelement) + '.aux.xml'
        auxfile = Wpath + '/' + os.path.basename(Lelement) + '.aux.xml'

        try:
            Zcmd = 'zip -j ' + Zfile + " -q " + Lelement
            process = subprocess.Popen(
                Zcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
            (stdout, stderr) = process.communicate()
            logger.info('File %s Added to Zip File: Success', Pfile)
            if os.path.isfile(auxfile):
                Zcmd = 'zip -j ' + Zfile + " -q " + auxfile
                process = subprocess.Popen(
                    Zcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
                (stdout, stderr) = process.communicate()
                logger.info('File %s Added to Zip File: Success',
                            os.path.basename(Lelement) + '.aux.xml')
        except:
            logger.error('Error During Zip Operation')

        try:
            shutil.copyfile(Wpath + '/' + Pfile, Fdir + '/' + Pfile)
            logger.info('Copy File %s : Success', Pfile)
            os.remove(Wpath + "/" + Pfile)
            if os.path.isfile(auxfile):
                shutil.copyfile(auxfile, Fdir + '/' +
                                os.path.basename(Lelement) + '.aux.xml')
                logger.info('Copy File %s : Success',
                            os.path.basename(Lelement) + '.aux.xml')
                os.remove(auxfile)
        except IOError as e:
            logger.error('Error During File Copy Operation')
            logger.error(e)

#    zOBJ.close()

    try:
        shutil.copy(Zfile, Fdir + '/' + FKey + '.zip')
        os.remove(Zfile)
        logger.info('Zip File Copied to Final Directory')
    except IOError as e:
        logger.error('Error During Zip File Copy Operation')
        logger.error(e)

#************** Clean up *******************
    os.remove(Wpath + '/' + FKey + '.map')
    os.remove(Wpath + '/' + FKey + '.sbatch')
    try:
        #        os.rmdir(Wpath)
        shutil.rmtree(Wpath)
        logger.info('Working Directory Removed: Success')
    except:
        logger.error('Working Directory NOT Removed')

    DBQO2 = PDS_DBquery('JOBS')
    DBQO2.setJobsFinished(FKey)

    infoHash.RemoveAll()
    loggyQueue.RemoveAll()
    zipQueue.RemoveAll()
    recipeQueue.RemoveAll()

    logger.info('Job %s is Complete', FKey)
Esempio n. 24
0
def main():

    #    pdb.set_trace()
    ##***************** Set up logging *****************
    logger = logging.getLogger('Browse_Process')
    logger.setLevel(logging.INFO)
    logFileHandle = logging.FileHandler(pds_log + 'Process.log')
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s, %(message)s')
    logFileHandle.setFormatter(formatter)
    logger.addHandler(logFileHandle)

    RQ_main = RedisQueue('Browse_ReadyQueue')

    PDSinfoDICT = json.load(open(pds_info, 'r'))

    pds_session, _ = db_connect(pds_db)
    upc_session, _ = db_connect(upc_db)

    tid = get_tid('fullimageurl', upc_session)

    while int(RQ_main.QueueSize()) > 0:
        item = literal_eval(RQ_main.QueueGet().decode("utf-8"))
        inputfile = item[0]
        fid = item[1]
        archive = item[2]
        if os.path.isfile(inputfile):
            logger.info('Starting Process: %s', inputfile)
            finalpath = makedir(inputfile)

            recipeOBJ = Recipe()
            recip_json = recipeOBJ.getRecipeJSON(archive)
            recipeOBJ.AddJsonFile(recip_json, 'reduced')
            infile = workarea + os.path.splitext(
                os.path.basename(inputfile))[0] + '.Binput.cub'
            outfile = workarea + os.path.splitext(
                os.path.basename(inputfile))[0] + '.Boutput.cub'
            status = 'success'
            for item in recipeOBJ.getProcesses():
                if status == 'error':
                    break
                elif status == 'success':
                    processOBJ = Process()
                    processR = processOBJ.ProcessFromRecipe(
                        item, recipeOBJ.getRecipe())

                    if '2isis' in item:
                        processOBJ.updateParameter('from_', inputfile)
                        processOBJ.updateParameter('to', outfile)
                    elif item == 'spiceinit':
                        processOBJ.updateParameter('from_', infile)
                    elif item == 'cubeatt':
                        label = pvl.load(infile)
                        bands = PDSinfoDICT[archive]['bandorder']
                        query_bands = label['IsisCube']['BandBin'][
                            PDSinfoDICT[archive]['bandbinQuery']]
                        # Create a set from the list / single value
                        try:
                            query_band_set = set(query_bands)
                        except:
                            query_band_set = set([query_bands])

                        # Iterate through 'bands' and grab the first value that is present in the
                        #  set defined by 'bandbinquery' -- if not present, default to 1
                        exband = next(
                            (band for band in bands if band in query_band_set),
                            1)

                        band_infile = infile + '+' + str(exband)
                        processOBJ.updateParameter('from_', band_infile)
                        processOBJ.updateParameter('to', outfile)

                    elif item == 'ctxevenodd':
                        label = pvl.load(infile)
                        SS = label['IsisCube']['Instrument']['SpatialSumming']
                        if SS != 1:
                            break
                        else:
                            processOBJ.updateParameter('from_', infile)
                            processOBJ.updateParameter('to', outfile)

                    elif item == 'reduce':
                        label = pvl.load(infile)
                        Nline = label['IsisCube']['Core']['Dimensions'][
                            'Lines']
                        Nsample = label['IsisCube']['Core']['Dimensions'][
                            'Samples']
                        Nline = int(Nline)
                        Nsample = int(Nsample)
                        Sfactor = scaleFactor(Nline, Nsample, recip_json)
                        processOBJ.updateParameter('lscale', Sfactor)
                        processOBJ.updateParameter('sscale', Sfactor)
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', outfile)

                    elif item == 'isis2std':
                        final_outfile = finalpath + '/' + os.path.splitext(
                            os.path.basename(inputfile))[0] + '.browse.jpg'
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', final_outfile)

                    else:
                        processOBJ.updateParameter('from_', infile)
                        processOBJ.updateParameter('to', outfile)

                    for k, v in processOBJ.getProcess().items():
                        func = getattr(isis, k)
                        try:
                            func(**v)
                            logger.info('Process %s :: Success', k)
                            if os.path.isfile(outfile):
                                if '.cub' in outfile:
                                    os.rename(outfile, infile)
                            status = 'success'
                            if '2isis' in item:
                                isisSerial = getISISid(infile)
                        except ProcessError as e:
                            print(e)
                            logger.error('Process %s :: Error', k)
                            status = 'error'
            if status == 'success':
                DB_addURL(upc_session, isisSerial, final_outfile, tid)
                os.remove(infile)
                logger.info('Browse Process Success: %s', inputfile)

                AddProcessDB(pds_session, fid, 't')
        else:
            logger.error('File %s Not Found', inputfile)