interval = TimeInterval(dt_start, dt_end - wedge) contexts = comp.find_contexts(platform, hirs_version, collo_version, csrb_version, ctp_version, tpw_version, interval) num_contexts_exist = 0 for context in contexts: num_contexts_exist += SPC.exists(comp.dataset('out').product(context)) LOG.info("Interval {} has {}/{} contexts existing".format( interval, num_contexts_exist, len(contexts))) missing_contexts = len(contexts) - num_contexts_exist if missing_contexts == 1: intervals.append(interval) LOG.info("Submitting intervals...") for interval in intervals: LOG.info("Submitting interval {} -> {}".format(interval.left, interval.right)) contexts = comp.find_contexts(platform, hirs_version, collo_version, csrb_version, ctp_version, tpw_version, interval) LOG.info("\tThere are {} contexts in this interval".format(len(contexts))) contexts.sort() for context in contexts: LOG.debug(context) LOG.info("\tFirst context: {}".format(contexts[0])) LOG.info("\tLast context: {}".format(contexts[-1])) LOG.info("\t{}".format( safe_submit_order(comp, [comp.dataset('out')], contexts, download_onlies=[HIRS_TPW_DAILY()])))
LOG.info("\tThere are {} contexts in this interval".format( len(contexts))) contexts.sort() if contexts != []: #for context in contexts: #LOG.info(context) LOG.info("\tFirst context: {}".format(contexts[0])) LOG.info("\tLast context: {}".format(contexts[-1])) try: job_nums = [] job_nums = safe_submit_order( comp, [comp.dataset('out')], contexts, download_onlies=[hirs_ctp_orbital_comp]) if job_nums != []: #job_nums = range(len(contexts)) #LOG.info("\t{}".format(job_nums)) file_obj.write( "contexts: [{}, {}]; job numbers: {{{}..{}}}\n".format( contexts[0], contexts[-1], job_nums[0], job_nums[-1])) LOG.info( "contexts: [{}, {}]; job numbers: {{{},{}}}".format( contexts[0], contexts[-1], job_nums[0], job_nums[-1])) LOG.info("job numbers: {{{}..{}}}\n".format(
file_obj = open(log_name, 'a') LOG.info("\tThere are {} contexts in this interval".format( len(contexts))) contexts.sort() if contexts != []: #for context in contexts: #LOG.info(context) LOG.info("\tFirst context: {}".format(contexts[0])) LOG.info("\tLast context: {}".format(contexts[-1])) try: job_nums = [] job_nums = safe_submit_order(comp, [comp.dataset('out')], contexts) if job_nums != []: #job_nums = range(len(contexts)) #LOG.info("\t{}".format(job_nums)) file_obj.write( "contexts: [{}, {}]; job numbers: [{}..{}]\n".format( contexts[0], contexts[-1], job_nums[0], job_nums[-1])) LOG.info("contexts: [{}, {}]; job numbers: [{},{}]".format( contexts[0], contexts[-1], job_nums[0], job_nums[-1])) LOG.info("job numbers: [{}..{}]\n".format( job_nums[0], job_nums[-1])) else: LOG.info("contexts: [{}, {}]; --> no jobs\n".format(
LOG.info("\tThere are {} contexts in this interval".format( len(contexts))) contexts.sort() if contexts != []: #for context in contexts: #LOG.info(context) LOG.info("\tFirst context: {}".format(contexts[0])) LOG.info("\tLast context: {}".format(contexts[-1])) try: job_nums = [] job_nums = safe_submit_order( comp, [comp.dataset('zonal_means')], contexts, download_onlies=[hirs_csrb_daily_comp]) if job_nums != []: #job_nums = range(len(contexts)) #LOG.info("\t{}".format(job_nums)) file_obj.write( "contexts: [{}, {}]; job numbers: {{{}..{}}}\n".format( contexts[0], contexts[-1], job_nums[0], job_nums[-1])) LOG.info( "contexts: [{}, {}]; job numbers: {{{},{}}}".format( contexts[0], contexts[-1], job_nums[0], job_nums[-1])) LOG.info("job numbers: {{{}..{}}}\n".format(
LOG.info("Opening log file {}".format(log_name)) file_obj = open(log_name,'a') LOG.info("\tThere are {} contexts in this interval".format(len(contexts))) contexts.sort() if contexts != []: #for context in contexts: #LOG.info(context) LOG.info("\tFirst context: {}".format(contexts[0])) LOG.info("\tLast context: {}".format(contexts[-1])) try: job_nums = [] job_nums = safe_submit_order(comp, [comp.dataset('out')], contexts, download_onlies=[hirs2nc_comp, hirs_avhrr_comp, hirs_csrb_monthly_comp]) if job_nums != []: #job_nums = range(len(contexts)) #LOG.info("\t{}".format(job_nums)) file_obj.write("contexts: [{}, {}]; job numbers: {{{}..{}}}\n".format(contexts[0], contexts[-1], job_nums[0],job_nums[-1])) LOG.info("contexts: [{}, {}]; job numbers: {{{},{}}}".format(contexts[0], contexts[-1], job_nums[0],job_nums[-1])) LOG.info("job numbers: {{{}..{}}}\n".format(job_nums[0],job_nums[-1])) else: LOG.info("contexts: {{{}, {}}}; --> no jobs".format(contexts[0], contexts[-1])) file_obj.write("contexts: {{{}, {}}}; --> no jobs\n".format(contexts[0], contexts[-1])) except Exception: LOG.warning(traceback.format_exc()) #sleep(10.)
LOG.info("\tThere are {} contexts in this interval".format( len(contexts))) contexts.sort() if contexts != []: #for context in contexts: #LOG.info(context) LOG.info("\tFirst context: {}".format(contexts[0])) LOG.info("\tLast context: {}".format(contexts[-1])) try: job_nums = [] job_nums = safe_submit_order( comp, [comp.dataset('means')], contexts, download_onlies=[hirs2nc_comp, hirs_avhrr_comp]) if job_nums != []: #job_nums = range(len(contexts)) #LOG.info("\t{}".format(job_nums)) file_obj.write( "contexts: [{}, {}]; job numbers: {{{}..{}}}\n".format( contexts[0], contexts[-1], job_nums[0], job_nums[-1])) LOG.info( "contexts: [{}, {}]; job numbers: {{{},{}}}".format( contexts[0], contexts[-1], job_nums[0], job_nums[-1])) LOG.info("job numbers: {{{}..{}}}\n".format(
interval = TimeInterval(dt_start, dt_end - wedge) contexts = comp.find_contexts(platform, hirs_version, collo_version, csrb_version, ctp_version, tpw_version, interval) num_contexts_exist = 0 for context in contexts: num_contexts_exist += SPC.exists(comp.dataset('out').product(context)) LOG.info("Interval {} has {}/{} contexts existing".format( interval, num_contexts_exist, len(contexts))) missing_contexts = len(contexts) - num_contexts_exist if missing_contexts > 3: intervals.append(interval) LOG.info("Submitting intervals...") for interval in intervals: LOG.info("Submitting interval {} -> {}".format(interval.left, interval.right)) contexts = comp.find_contexts(platform, hirs_version, collo_version, csrb_version, ctp_version, tpw_version, interval) LOG.info("\tThere are {} contexts in this interval".format(len(contexts))) contexts.sort() for context in contexts: LOG.debug(context) LOG.info("\tFirst context: {}".format(contexts[0])) LOG.info("\tLast context: {}".format(contexts[-1])) LOG.info("\t{}".format( safe_submit_order(comp, [comp.dataset('out')], contexts, download_onlies=[HIRS(), HIRS_CTP_ORBITAL()])))