def main():
    #master_acqs = ["acquisition-S1A_IW_ACQ__1SDV_20180702T135953_20180702T140020_022616_027345_3578"]
    #slave_acqs = ["acquisition-S1B_IW_ACQ__1SDV_20180720T015751_20180720T015819_011888_015E1C_3C64"]
    master_acqs = ["acquisition-S1A_IW_ACQ__1SDV_20180807T135955_20180807T140022_023141_02837E_DA79"]
    slave_acqs =["acquisition-S1A_IW_ACQ__1SDV_20180714T140019_20180714T140046_022791_027880_AFD3", "acquisition-S1A_IW_ACQ__1SDV_20180714T135954_20180714T140021_022791_027880_D224", "acquisition-S1A_IW_ACQ__1SDV_20180714T135929_20180714T135956_022791_027880_9FCA"]


    #acq_data= util.get_partial_grq_data("acquisition-S1A_IW_ACQ__1SDV_20180702T135953_20180702T140020_022616_027345_3578")['fields']['partial'][0]
    acq_data= util.get_partial_grq_data("acquisition-S1A_IW_SLC__1SSV_20160630T135949_20160630T140017_011941_01266D_C62F")['fields']['partial'][0]
    print(acq_data) 
    
    #resolve_source(master_acqs, slave_acqs)
    print(acq_data["dataset_type"])
    print(acq_data["dataset"])    
    print(acq_data["metadata"]["identifier"]) 
    print(acq_data["metadata"]["download_url"])
    print(acq_data["metadata"]["archive_filename"])
def get_acq_data_from_list(acq_list):
    logger.info("get_acq_data_from_list")
    acq_info = {}
    # Find out status of all Master ACQs, create a ACQ object with that and update acq_info dictionary 
    for acq in acq_list: 
        #logger.info(acq) 
        #acq_data = util.get_acquisition_data(acq)[0]['fields']['partial'][0] 
        acq_data = util.get_partial_grq_data(acq)['fields']['partial'][0] 
        status = check_slc_status(acq_data['metadata']['identifier']) 
        if status: 
            # status=1 
            logger.info("%s exists" %acq_data['metadata']['identifier']) 
            acq_info[acq]=get_acq_object(acq, acq_data, 1) 
        else: 
            #status = 0 
            logger.info("%s does NOT exist"%acq_data['metadata']['identifier']) 
            acq_info[acq]=get_acq_object(acq, acq_data, 0)
    return acq_info
def get_output_data(acq_info):
    localized_data = {}
    for acq_id in list(acq_info.keys()):
        if not acq_info[acq_id]['localized']:
            return None
        acq_data = acq_info[acq_id]['acq_data']
        slc_data = util.get_partial_grq_data(acq_data['metadata']['identifier'])['fields']['partial'][0] 
        localize_url = ""
        urls = slc_data['urls']
        for url in urls:
            localize_url = url
            if localize_url.startswith('s3://'):
                break
        
        acq_localized_data = {}
        acq_localized_data['acquisition'] = acq_id
        acq_localized_data['identifier'] = acq_data['metadata']['identifier']
        acq_localized_data['localized'] = acq_info[acq_id]['localized']
        acq_localized_data['urls'] = urls
        acq_localized_data['localize_url'] = localize_url
        localized_data[acq_id] = acq_localized_data
    return localized_data
예제 #4
0
def resolve_source(ctx_file):
    """Resolve best URL from acquisition."""


    # get settings
    # read in context
    with open(ctx_file) as f:
        ctx = json.load(f)
    
    '''
    settings_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'settings.json')
    with open(settings_file) as f:
        settings = json.load(f)
    '''

    sleep_seconds = 30
    

    # build args
    project = ctx["input_metadata"]["project"]
    if type(project) is list:
        project = project[0]

    acq_list = ctx["input_metadata"]["acq_list"]   
 
    spyddder_extract_version = ctx["spyddder_extract_version"]
    acquisition_localizer_version = ctx["acquisition_localizer_version"]
    ''' 
    spyddder_extract_version = ctx["input_metadata"]["spyddder_extract_version"]
    acquisition_localizer_version = ctx["input_metadata"]["acquisition_localizer_version"]
    standard_product_ifg_version = ctx["input_metadata"]["standard_product_ifg_version"]
    '''
    job_priority = ctx["input_metadata"]["job_priority"]
    job_type, job_version = ctx['job_specification']['id'].split(':') 

    queues = []  # where should we get the queue value
    identifiers = []
    prod_dates = []
   

    acq_info = {}
    
    index_suffix = "S1-IW_ACQ"



    # Find out status of all Master ACQs, create a ACQ object with that and update acq_info dictionary
    for acq in acq_list:
 	acq_type = "master"
	#logger.info(acq)
        #acq_data = util.get_acquisition_data(acq)[0]['fields']['partial'][0]
        acq_data = util.get_partial_grq_data(acq)['fields']['partial'][0]
        status = check_slc_status(acq_data['metadata']['identifier'])
        if status:
            # status=1
            logger.info("%s exists" %acq_data['metadata']['identifier'])
            acq_info[acq]=get_acq_object(acq, acq_type, acq_data, 1)
        else:
            #status = 0
            logger.info("%s does NOT exist"%acq_data['metadata']['identifier'])
            acq_info[acq]=get_acq_object(acq, acq_type, acq_data, 0)


    # Find out status of all Slave ACQs, create a ACQ object with that and update acq_info dictionary
    for acq in slave_acqs:
        #logger.info(acq)
	acq_type = "slave"
        #acq_data = util.get_acquisition_data(acq)[0]['fields']['partial'][0]
	#logger.info("ACQ value : %s" %acq)
	acq_data = util.get_partial_grq_data(acq)['fields']['partial'][0]
        status = check_slc_status(acq_data['metadata']['identifier'])
        if status:
	    # status=1
            logger.info("%s exists" %acq_data['metadata']['identifier'])
	    acq_info[acq]=get_acq_object(acq, acq_type, acq_data, 1)
        else:
	    #status = 0
	    logger.info("%s does NOT exist"%acq_data['metadata']['identifier'])
	    acq_info[acq]=get_acq_object(acq, acq_type, acq_data, 0)

    acq_infoes =[]
    projects = []
    job_priorities = []
    job_types = []
    job_versions = []
    spyddder_extract_versions = []
    acquisition_localizer_versions = []
    #standard_product_ifg_versions = []
    starttimes = []
    endtimes = []
    bboxes = []
    union_geojsons =[]
    master_scenes = []
    slave_scenes = []
    master_scenes.append(master_scene)
    slave_scenes.append(slave_scene)


    acq_infoes.append(acq_info)
    projects.append(project)
    job_priorities.append(job_priority)
    job_types.append(job_type)
    job_versions.append(job_version)
    spyddder_extract_versions.append(spyddder_extract_version)
    acquisition_localizer_versions.append(acquisition_localizer_version)
    #standard_product_ifg_versions.append(standard_product_ifg_version)
    starttimes.append(starttime)
    endtimes.append(endtime)
    union_geojsons.append(union_geojson)
    if bbox:
        bboxes.append(bbox)
    
    #return acq_infoes, spyddder_extract_versions, acquisition_localizer_versions, standard_product_ifg_versions, projects, job_priorities, job_types, job_versions
    return acq_info, spyddder_extract_version, acquisition_localizer_version, project, job_priority, job_type, job_version, dem_type, track, starttime, endtime, master_scenes, slave_scenes, union_geojson, bbox
예제 #5
0
        result = r.json()
        sleep_seconds = sleep_seconds * 2

    logging.info("Job status updated on ES to %s"%str(result["hits"]["hits"][0]["_source"]["status"]))
    return True
    

def main():
    #master_acqs = ["acquisition-S1A_IW_ACQ__1SDV_20180702T135953_20180702T140020_022616_027345_3578"]
    #slave_acqs = ["acquisition-S1B_IW_ACQ__1SDV_20180720T015751_20180720T015819_011888_015E1C_3C64"]
    master_acqs = ["acquisition-S1A_IW_ACQ__1SDV_20180807T135955_20180807T140022_023141_02837E_DA79"]
    slave_acqs =["acquisition-S1A_IW_ACQ__1SDV_20180714T140019_20180714T140046_022791_027880_AFD3", "acquisition-S1A_IW_ACQ__1SDV_20180714T135954_20180714T140021_022791_027880_D224", "acquisition-S1A_IW_ACQ__1SDV_20180714T135929_20180714T135956_022791_027880_9FCA"]


    #acq_data= util.get_partial_grq_data("acquisition-S1A_IW_ACQ__1SDV_20180702T135953_20180702T140020_022616_027345_3578")['fields']['partial'][0]
    acq_data= util.get_partial_grq_data("acquisition-S1A_IW_SLC__1SSV_20160630T135949_20160630T140017_011941_01266D_C62F")['fields']['partial'][0]
    print(acq_data) 
    
    #resolve_source(master_acqs, slave_acqs)
    print(acq_data["dataset_type"])
    print(acq_data["dataset"])    
    print(acq_data["metadata"]["identifier"]) 
    print(acq_data["metadata"]["download_url"])
    print(acq_data["metadata"]["archive_filename"])
    #print(acq_data["metadata"][""])
if __name__ == "__main__":
    main()