Exemple #1
0
def hyp3_process(cfg, n):
    try:
        log.info('Processing hello_world')
        if not cfg['skip_processing']:
            log.info(f'Process starting at {datetime.now()}')
            launch_dir = os.getcwd()
            os.chdir(cfg['workdir'])

            hyp3proclib.process(cfg, 'proc_ci', ["--hello-world"])

            os.chdir(launch_dir)
        else:
            log.info('Processing skipped!')
            cfg['log'] += "(debug mode)"

        cfg['success'] = True
        hyp3proclib.update_completed_time(cfg)

        product_dir = os.path.join(cfg['workdir'], 'PRODUCT')
        if not os.path.isdir(product_dir):
            log.info(f'PRODUCT directory not found: {product_dir}')
            log.error('Processing failed')
            raise Exception('Processing failed: PRODUCT directory not found')

        # TODO: final product cleanup and upload to HyP3 DB

    except Exception as e:
        log.exception('ci processing failed!')
        log.exception('Notifying user')
        hyp3proclib.failure(cfg, str(e))

    hyp3proclib.file_system.cleanup_workdir(cfg)

    log.info('ci done')
def add_instance_record(cfg, conn):
    log.debug('Adding instance record')

    instance_record = {}
    if cfg['proc_node_type'] != 'CLOUD':
        instance_record['instance_id'] = socket.gethostname()
        instance_record['instance_type'] = 'on-prem'
    else:
        instance_record['instance_id'] = get_instance_id()
        instance_record['instance_type'] = get_instance_type()
    instance_record['local_queue_id'] = cfg['id']
    cfg['instance_record'] = instance_record

    try:
        instance_record_sql = '''
            insert into instance_records (instance_id, local_queue_id, start_time, instance_type)
            values (%(instance_id)s, %(local_queue_id)s, current_timestamp, %(instance_type)s)
        '''
        query_database(conn=conn,
                       query=instance_record_sql,
                       params=instance_record,
                       commit=True)

    except Exception:
        log.exception("Instance record could not be inserted")
    else:
        log.info("Instance record of instance %s and job %s inserted",
                 instance_record['instance_id'],
                 instance_record['local_queue_id'])
Exemple #3
0
def get_db_connection(s, tries=0):
    connection_string =\
        "host='" + get_config(s, 'host') + "' " + \
        "dbname='" + get_config(s, 'db') + "' " + \
        "user='******'user') + "' " + \
        "password='******'pass') + "'"
    log.info("Connected to db: {0}".format(get_config(s, 'host')))
    try:
        conn = psycopg2.connect(connection_string)
    except Exception as e:
        if (tries > 4):
            log.exception('DB connection problem: ' + str(e))
            raise
        else:
            log.warning("Problem connecting to DB: " + str(e))
            log.info("Retrying in {0} seconds...".format(30 * (tries + 1)))
            time.sleep(30 * (tries + 1))
            return get_db_connection(s, tries=tries + 1)

    return conn
def update_instance_record(cfg, conn):
    if 'instance_record' in cfg:
        instance_record = cfg['instance_record']
        try:
            instance_record_sql = 'update instance_records set end_time=current_timestamp where (instance_id=%(instance_id)s and local_queue_id=%(local_queue_id)s);'
            query_database(conn=conn,
                           query=instance_record_sql,
                           params=instance_record,
                           commit=True)
        except Exception:
            log.exception(
                "Instance record for instance %s and job %s could not be updated with job completion time",
                instance_record['instance_id'],
                instance_record['local_queue_id'])
        else:
            log.info(
                "Instance record for instance %s and job %s had end_time updated with job completion time",
                instance_record['instance_id'],
                instance_record['local_queue_id'])
    else:
        log.debug('No instance record found to update')
Exemple #5
0
def process_insar(cfg, n):
    try:
        log.info('Processing ISCE InSAR pair "{0}" for "{1}"'.format(cfg['sub_name'], cfg['username']))

        g1, g2 = earlier_granule_first(cfg['granule'], cfg['other_granules'][0])

        list_file = 'list.csv'
        write_list_file(os.path.join(cfg['workdir'], list_file), g1, g2)

        d1 = g1[17:25]
        d2 = g2[17:25]
        delta = (datetime.datetime.strptime(d2, '%Y%m%d')-datetime.datetime.strptime(d1, '%Y%m%d')).days
        ifm_dir = d1 + '_' + d2
        cfg['ifm'] = ifm_dir
        log.debug('IFM dir is: ' + ifm_dir)

        sd1 = d1[0:4]+'-'+d1[4:6]+'-'+d1[6:8]
        sd2 = d2[0:4]+'-'+d2[4:6]+'-'+d2[6:8]
        cfg["email_text"] = "This is a {0}-day InSAR pair from {1} to {2}.".format(delta, sd1, sd2)

        subswath = get_extra_arg(cfg, "subswath", "0")
        if subswath == "0":
            process(cfg, 'procAllS1StackISCE.py', ["-90", "90", "-180", "180", "-f", list_file, "-d"])
        else:
            process(cfg, 'procS1StackISCE.py', ["-f", list_file, "-d", "-s", subswath])

        subdir = os.path.join(cfg['workdir'], 'PRODUCT')
        if not os.path.isdir(subdir):
            log.info('PRODUCT directory not found: ' + subdir)
            log.error('Processing failed')
            raise Exception("Processing failed: PRODUCT directory not found")
        else:
            looks = get_looks(subdir)
            out_name = build_output_name_pair(g1, g2, cfg['workdir'], looks + "-iw" + subswath + cfg['suffix'])
            log.info('Output name: ' + out_name)

            out_path = os.path.join(cfg['workdir'], out_name)
            zip_file = out_path + '.zip'
            if os.path.isdir(out_path):
                shutil.rmtree(out_path)
            if os.path.isfile(zip_file):
                os.unlink(zip_file)
            cfg['out_path'] = out_path

            # clip_tiffs_to_roi(cfg, conn, product)

            log.debug('Renaming '+subdir+' to '+out_path)
            os.rename(subdir, out_path)

            find_browses(cfg, out_path)

            cfg['attachment'] = find_phase_png(out_path)
            add_esa_citation(g1, out_path)
            zip_dir(out_path, zip_file)

            cfg['final_product_size'] = [os.stat(zip_file).st_size, ]
            cfg['original_product_size'] = 0

            with get_db_connection('hyp3-db') as conn:
                record_metrics(cfg, conn)
                upload_product(zip_file, cfg, conn)
                success(conn, cfg)

    except Exception as e:
        log.exception('Processing failed')
        log.info('Notifying user')
        failure(cfg, str(e))

    cleanup_workdir(cfg)

    log.info('Done')