예제 #1
0
def gen_html(root_path, chunk_size=20):
    urls = list(get_urls())
    chunk_count = math.ceil(len(urls) / float(chunk_size))
    for i in xrange(int(chunk_count)):
        with open(os.path.join(root_path, "%d.html" % i), "w") as f:
            subset = i == chunk_count - 1 and urls[i * chunk_size :] or urls[i * chunk_size : (i + 1) * chunk_size]

            print len(subset)

            f.write("""<html lang="en"><head><meta charset="UTF-8"><title>No.%d file</title>""" % i)

            for url in subset:
                html = template % (_get_uid(url))
                f.write(html)
            f.write("""</head><body>""")
예제 #2
0
def gen_html(root_path, chunk_size=20):
    urls = list(get_urls())
    chunk_count = math.ceil(len(urls) / float(chunk_size))
    for i in xrange(int(chunk_count)):
        with open(os.path.join(root_path, "%d.html" % i), "w") as f:
            subset = (i == chunk_count - 1 and urls[i * chunk_size:]
                      or urls[i * chunk_size:(i + 1) * chunk_size])

            print len(subset)

            f.write(
                """<html lang="en"><head><meta charset="UTF-8"><title>No.%d file</title>"""
                % i)

            for url in subset:
                html = template % (_get_uid(url))
                f.write(html)
            f.write("""</head><body>""")
예제 #3
0
import os
import sys
from util import get_urls


def open_urls(urls):
    os.system("google-chrome %s" % ' '.join(urls))


if __name__ == "__main__":
    chunk_size = 20
    chunk_num = int(sys.argv[1])
    urls = get_urls()
    if (chunk_num + 1) * chunk_size >= len(urls):
        url_subset = urls[chunk_num * chunk_size:]
    else:
        url_subset = urls[chunk_num * chunk_size:(chunk_num + 1) * chunk_size]
    open_urls(url_subset)
예제 #4
0
def gen_js():
    url_str_list = []
    for url in get_urls():
        url_str_list.append("\"%s\"" % url.strip())
    return "var urls = [%s];" % (','.join(url_str_list))
예제 #5
0
def publish_data(acq_info,
                 project,
                 job_priority,
                 dem_type,
                 track,
                 tags,
                 starttime,
                 endtime,
                 master_scene,
                 slave_scene,
                 master_acqs,
                 slave_acqs,
                 orbitNumber,
                 direction,
                 platform,
                 union_geojson,
                 bbox,
                 ifg_hash,
                 in_master_orbit_file,
                 in_slave_orbit_file,
                 wuid=None,
                 job_num=None):
    """Map function for create interferogram job json creation."""

    logger.info("\n\n\n PUBLISH IFG JOB!!!")
    logger.info("project : %s " % project)
    logger.info("dem type : %s " % dem_type)
    logger.info("track : %s" % track)
    logger.info("tags : %s" % tags)
    logger.info("starttime, endtime, : %s : %s " % (starttime, endtime))
    logger.info("master_scene, slave_scene : %s, %s" %
                (master_scene, slave_scene))
    logger.info("union_geojson : %s, bbox : %s " % (union_geojson, bbox))
    logger.info("publish_data : orbitNumber : %s" % orbitNumber)
    #version = get_version()
    version = "v2.0.0"

    if type(project) is list:
        project = project[0]
    logger.info("project : %s" % project)

    # set job type and disk space reqs
    disk_usage = "100GB"

    # set job queue based on project
    job_queue = "coseismic_product_s1ifg-slc_localizer"
    #job_type = "job-coseismic-product-ifg:%s" %coseismic_product_ifg_version

    # get metadata
    master_md = {i: query_es(GRQ_ES_ENDPOINT, i) for i in master_scene}
    #logger.info("master_md: {}".format(json.dumps(master_md, indent=2)))
    slave_md = {i: query_es(GRQ_ES_ENDPOINT, i) for i in slave_scene}
    #logger.info("slave_md: {}".format(json.dumps(slave_md, indent=2)))

    # get urls (prefer s3)
    logger.info("master_md : %s" % master_md)
    logger.info("slave_md : %s" % slave_md)
    master_zip_url = util.get_urls(master_md)
    logger.info("master_zip_url: {}".format(master_zip_url))
    slave_zip_url = util.get_urls(slave_md)
    logger.info("slave_ids: {}".format(slave_zip_url))

    # get orbits
    master_orbit_file = None
    master_orbit_url = get_orbit_from_metadata(master_md)
    logger.info("master_orbit_url: {}".format(master_orbit_url))
    if master_orbit_url:
        master_orbit_file = os.path.basename(master_orbit_url)
    else:
        raise RuntimeError("Reference Orbit File NOT Found")
        '''
        master_orbit_file = in_master_orbit_file
        master_orbit_url = get_orbit_from_orbit_file(in_master_orbit_file)
        '''

    slave_orbit_file = None
    slave_orbit_url = get_orbit_from_metadata(slave_md)
    if slave_orbit_url:
        slave_orbit_file = os.path.basename(slave_orbit_url)
    else:
        raise RuntimeError("Secondery Orbit File NOT Found")
    '''
    else:
        slave_orbit_file = in_slave_orbit_file
        slave_orbit_url = get_orbit_from_orbit_file(in_slave_orbit_file)
    '''
    logger.info("slave_orbit_url: {}".format(slave_orbit_url))

    try:
        dem_type = get_dem_type(master_md)
    except:
        pass

    slc_master_dt, slc_slave_dt = util.get_scene_dates_from_metadata(
        master_md, slave_md)

    # set localize urls
    localize_urls = [
        {
            'url': master_orbit_url
        },
        {
            'url': slave_orbit_url
        },
    ]
    for m in master_zip_url:
        localize_urls.append({'url': m})
    for s in slave_zip_url:
        localize_urls.append({'url': s})

    logger.info(" master_scene : %s slave_slcs : %s" %
                (master_scene, slave_scene))
    orbit_type = 'poeorb'
    logger.info("Publish IFG job: direction : %s, platform : %s" %
                (direction, platform))

    id = IFG_CFG_ID_TMPL.format(
        'M', len(master_scene), len(slave_scene), track,
        parser.parse(slc_master_dt.strftime('%Y%m%dT%H%M%S')),
        parser.parse(slc_slave_dt.strftime('%Y%m%dT%H%M%S')), orbit_type,
        ifg_hash[0:4])

    #id = "coseismic-product-ifg-cfg-%s" %id_hash[0:4]
    prod_dir = id
    os.makedirs(prod_dir, 0o755)

    met_file = os.path.join(prod_dir, "{}.met.json".format(id))
    ds_file = os.path.join(prod_dir, "{}.dataset.json".format(id))

    logger.info("master_orbit_file : %s" % master_orbit_file)
    logger.info("slave_orbit_file : %s" % slave_orbit_file)

    #with open(met_file) as f: md = json.load(f)
    md = {}
    md['id'] = id
    md['project'] = project,
    md['priority'] = job_priority
    md['azimuth_looks'] = 7
    md['range_looks'] = 19
    md['filter_strength'] = 0.5
    md['precise_orbit_only'] = 'true'
    md['auto_bbox'] = 'true'
    md['_disk_usage'] = disk_usage
    md['soft_time_limit'] = 86400
    md['time_limit'] = 86700
    md['dem_type'] = dem_type
    md['track_number'] = track
    md['starttime'] = starttime
    md['endtime'] = endtime
    md['union_geojson'] = union_geojson
    md['master_scenes'] = master_scene
    md['slave_scenes'] = slave_scene
    md["master_acquisitions"] = master_acqs
    md["slave_acquisitions"] = slave_acqs
    md['orbitNumber'] = orbitNumber
    md['direction'] = direction
    md['platform'] = platform
    md['master_orbit_url'] = master_orbit_url
    md['slave_orbit_url'] = slave_orbit_url
    md['master_zip_url'] = master_zip_url
    md['slave_zip_url'] = slave_zip_url
    md['localize_urls'] = localize_urls
    md['slc_master_dt'] = slc_master_dt.strftime('%Y%m%dT%H%M%S')
    md['slc_slave_dt'] = slc_slave_dt.strftime('%Y%m%dT%H%M%S')
    md["master_zip_file"] = [os.path.basename(i) for i in master_zip_url]
    md["master_orbit_file"] = os.path.basename(master_orbit_url)
    md["slave_zip_file"] = [os.path.basename(i) for i in slave_zip_url]
    md["slave_orbit_file"] = os.path.basename(slave_orbit_url)
    md["full_id_hash"] = ifg_hash
    md["id_hash"] = ifg_hash[0:4]
    md["tags"] = tags

    if bbox:
        md['bbox'] = bbox

    with open(met_file, 'w') as f:
        json.dump(md, f, indent=2)

    print("creating dataset file : %s" % ds_file)
    create_dataset_json(id, version, met_file, ds_file)

    return prod_dir
def gen_js():
    url_str_list = []
    for url in get_urls():
        url_str_list.append("\"%s\"" %url.strip())
    return "var urls = [%s];" %(','.join(url_str_list))
예제 #7
0
import os
import sys
from util import get_urls

def open_urls(urls):
     os.system("google-chrome %s" %' '.join(urls))

if __name__ == "__main__":
    chunk_size = 20
    chunk_num = int(sys.argv[1])
    urls = get_urls()
    if (chunk_num + 1) * chunk_size >= len(urls):
        url_subset = urls[chunk_num * chunk_size : ]
    else:                
        url_subset = urls[chunk_num * chunk_size : (chunk_num + 1) * chunk_size]
    open_urls(url_subset)