Пример #1
0
def add_citation(cfg, dir_):

    if not cfg['granule'].startswith('S1'):
        return

    y = int(datetime.datetime.now().year)
    ay = None
    for subdir, dirs, files in os.walk(dir_):
        for f in files:
            try:
                for item in f.split("_"):
                    if item[0:8].isdigit() and item[8] == "T" and item[9:15].isdigit():
                        ay = item[0:4]
                        break
            except:
                log.error("ERROR: Unable to determine acquisition year from filename {f}".format(f=f))
            if ay:
                break
        if ay:
            break

    if ay is None:
        ay = y

    with open(os.path.join(dir_, 'ESA_citation.txt'), 'w') as f:
        f.write('ASF DAAC {0}, contains modified Copernicus Sentinel data {1}, processed by ESA.'.format(y,ay))
Пример #2
0
def hyp3_process(cfg, n):
    try:
        log.info('Processing hello_world')
        if not cfg['skip_processing']:
            log.info(f'Process starting at {datetime.now()}')
            launch_dir = os.getcwd()
            os.chdir(cfg['workdir'])

            hyp3proclib.process(cfg, 'proc_ci', ["--hello-world"])

            os.chdir(launch_dir)
        else:
            log.info('Processing skipped!')
            cfg['log'] += "(debug mode)"

        cfg['success'] = True
        hyp3proclib.update_completed_time(cfg)

        product_dir = os.path.join(cfg['workdir'], 'PRODUCT')
        if not os.path.isdir(product_dir):
            log.info(f'PRODUCT directory not found: {product_dir}')
            log.error('Processing failed')
            raise Exception('Processing failed: PRODUCT directory not found')

        # TODO: final product cleanup and upload to HyP3 DB

    except Exception as e:
        log.exception('ci processing failed!')
        log.exception('Notifying user')
        hyp3proclib.failure(cfg, str(e))

    hyp3proclib.file_system.cleanup_workdir(cfg)

    log.info('ci done')
Пример #3
0
def send_email(
        to_address, subject, body, from_address="no-reply@asf-hyp3", retries=0,
        maximum_retries=0, mime_type="plain"):
    """Send an email and return whether the email was successfully sent.

    We also retry sending the email if something went wrong the first
    time, with the maximum number of retries configurable in the
    arguments. This method only supports sending plain text emails.
    """
    if retries > maximum_retries:
        log.critical(
            "Notification failed permanently (maximum retries reached)",
        )
        return False, None
    if retries == 0:
        log.info("Sending email")
    else:
        log.info("Retrying email")

    smtp = smtplib.SMTP("localhost")

    msg = MIMEMultipart('related')
    msg["Subject"] = subject
    msg["From"] = from_address
    msg["To"] = to_address
    msg.preamble = 'This is a multi-part message in MIME format.'

    msgAlt = MIMEMultipart('alternative')
    msg.attach(msgAlt)

    msgText = MIMEText('HyP3 product notification email')
    msgAlt.attach(msgText)

    msgText = MIMEText(body)
    msgText.replace_header('Content-Type', 'text/html')
    msgAlt.attach(msgText)

    log.debug("Sending email from {0} to {1}".format(from_address, to_address))

    bcc_address = []
    bcc = get_config('general', 'bcc', default='')
    if len(bcc) > 0:
        bcc_address += bcc.split(',')
        log.debug("Bcc: " + str(bcc_address))

    try:
        smtp.sendmail(from_address, [to_address] + bcc_address, msg.as_string())
    except smtplib.SMTPException as e:
        msg = str(e)
        log.error("Failed to notify user: "******"Notification failed permanently (maximum retries reached)")
            return False, msg

        return send_email(to_address, subject, body, from_address, retries + 1, maximum_retries)

    smtp.quit()
    return True, None
Пример #4
0
def get_config(section, key, default=None, config_file=None):
    if hyp3proclib.default_cfg is None:
        log.debug('Config keys requested from uninitialized config file!')
        init_config(config_file=config_file)
    if hyp3proclib.default_cfg.has_section(section) \
            and hyp3proclib.default_cfg.has_option(section, key):
        return hyp3proclib.default_cfg.get(section, key)
    else:
        if default is None:
            log.error('No config value for: ' + section + '/' + key)
        return default
Пример #5
0
def log_instance_shutdown_in_hyp3_db(cfg):
    if cfg['proc_node_type'] != 'CLOUD':
        return

    instance = get_instance_info(cfg)
    try:
        with get_db_connection('hyp3-db') as hyp3db_conn:
            sql = "update instances set shutdown_time = current_timestamp where id = (%(instance_id)s)"
            query_database(hyp3db_conn, sql, instance, commit=True)
    except Exception as e:
        log.error("Instance %s could not be updated with shutdown time",
                  instance["instance_id"])
        log.error("Error was: %s", str(e))
    else:
        log.info("Instance %s was updated with shutdown time",
                 instance["instance_id"])
Пример #6
0
def add_instance_to_hyp3_db(cfg):
    if cfg['proc_node_type'] != 'CLOUD':
        return
    instance = get_instance_info(cfg)
    try:
        with get_db_connection('hyp3-db') as hyp3db_conn:
            sql = 'insert into instances (id, start_time, process_id) values (%(instance_id)s, current_timestamp, %(process_id)s);'
            query_database(conn=hyp3db_conn,
                           query=sql,
                           params=instance,
                           commit=True)
    except Exception as e:
        log.error("Instance %s could not be inserted into instances",
                  instance['instance_id'])
        log.error("Error was: %s", str(e))
    else:
        log.info("Instance %s was inserted into instances",
                 instance['instance_id'])
Пример #7
0
def update_instance_with_specific_gamma_id(cfg):
    if cfg['proc_node_type'] != 'CLOUD':
        return

    instance = get_instance_info(cfg)
    try:
        with get_db_connection('hyp3-db') as hyp3db_conn:
            sql = 'update instances set process_id = %(process_id)s where id = %(instance_id)s'
            query_database(conn=hyp3db_conn,
                           query=sql,
                           params=instance,
                           commit=True)
    except Exception as e:
        log.error(
            "any_gamma instance %s could not be updated with specific gamma process id, %s",
            instance['instance_id'], cfg['proc_name'])
        log.error("Error was: %s", str(e))
    else:
        log.info(
            "any_gamma instance %s was update with specific gamma process id, %s",
            instance['instance_id'], cfg['proc_name'])
Пример #8
0
def check_lockfile(cfg):
    lock_file = os.path.join(cfg['lock_dir'], cfg['proc_name'] + '.lock')
    cfg['lock_file'] = lock_file

    if os.path.isfile(lock_file):
        log.info('Lock file exists: ' + lock_file)
        log.info('Exiting -- already running.')
        sys.exit(0)

    # We use os.open with O_CREAT so that two ingests don't both do the
    # above check, and pass, and then both try to create the lock file,
    # and both succeed - this way one will fail
    try:
        o = os.open(lock_file, os.O_WRONLY | os.O_CREAT | os.O_EXCL)
        fd = os.fdopen(o, 'w')
    except Exception as e:
        log.warning('Failed to open lock file: ' + str(e))
        fd = None

    if not fd:
        log.error('Could not open lock file: ' + lock_file)
        sys.exit(1)

    pid = str(os.getpid())
    fd.write(pid)
    fd.close()

    # Now check the file just in case...
    with open(lock_file, 'r') as fd:
        s = fd.read()

    if s != pid:
        log.error('Failed to correctly initialize lock file')
        sys.exit(1)
    else:
        log.info('Acquired lock file, PID is ' + pid)
Пример #9
0
def process_insar(cfg, n):
    try:
        log.info('Processing ISCE InSAR pair "{0}" for "{1}"'.format(cfg['sub_name'], cfg['username']))

        g1, g2 = earlier_granule_first(cfg['granule'], cfg['other_granules'][0])

        list_file = 'list.csv'
        write_list_file(os.path.join(cfg['workdir'], list_file), g1, g2)

        d1 = g1[17:25]
        d2 = g2[17:25]
        delta = (datetime.datetime.strptime(d2, '%Y%m%d')-datetime.datetime.strptime(d1, '%Y%m%d')).days
        ifm_dir = d1 + '_' + d2
        cfg['ifm'] = ifm_dir
        log.debug('IFM dir is: ' + ifm_dir)

        sd1 = d1[0:4]+'-'+d1[4:6]+'-'+d1[6:8]
        sd2 = d2[0:4]+'-'+d2[4:6]+'-'+d2[6:8]
        cfg["email_text"] = "This is a {0}-day InSAR pair from {1} to {2}.".format(delta, sd1, sd2)

        subswath = get_extra_arg(cfg, "subswath", "0")
        if subswath == "0":
            process(cfg, 'procAllS1StackISCE.py', ["-90", "90", "-180", "180", "-f", list_file, "-d"])
        else:
            process(cfg, 'procS1StackISCE.py', ["-f", list_file, "-d", "-s", subswath])

        subdir = os.path.join(cfg['workdir'], 'PRODUCT')
        if not os.path.isdir(subdir):
            log.info('PRODUCT directory not found: ' + subdir)
            log.error('Processing failed')
            raise Exception("Processing failed: PRODUCT directory not found")
        else:
            looks = get_looks(subdir)
            out_name = build_output_name_pair(g1, g2, cfg['workdir'], looks + "-iw" + subswath + cfg['suffix'])
            log.info('Output name: ' + out_name)

            out_path = os.path.join(cfg['workdir'], out_name)
            zip_file = out_path + '.zip'
            if os.path.isdir(out_path):
                shutil.rmtree(out_path)
            if os.path.isfile(zip_file):
                os.unlink(zip_file)
            cfg['out_path'] = out_path

            # clip_tiffs_to_roi(cfg, conn, product)

            log.debug('Renaming '+subdir+' to '+out_path)
            os.rename(subdir, out_path)

            find_browses(cfg, out_path)

            cfg['attachment'] = find_phase_png(out_path)
            add_esa_citation(g1, out_path)
            zip_dir(out_path, zip_file)

            cfg['final_product_size'] = [os.stat(zip_file).st_size, ]
            cfg['original_product_size'] = 0

            with get_db_connection('hyp3-db') as conn:
                record_metrics(cfg, conn)
                upload_product(zip_file, cfg, conn)
                success(conn, cfg)

    except Exception as e:
        log.exception('Processing failed')
        log.info('Notifying user')
        failure(cfg, str(e))

    cleanup_workdir(cfg)

    log.info('Done')