def run(pair, ymd, is_monthly): """ pair: sat1+sensor1_sat2+sensor2 ymd: str YYYYMMDD """ # 提取参数中的卫星信息和传感器信息 part1, part2 = pair.split("_") sat1, sensor1 = part1.split("+") sat2, sensor2 = part2.split("+") # 判断是静止卫星还是动态卫星 if "FY2" in part1 or "FY4" in part1: Type = "GEOLEO" elif "FY3" in part1: Type = "LEOLEO" else: Log.error("Cant distinguish the satellite type") return # 加载绘图配置文件 plt_cfg_file = os.path.join(MainPath, "%s_%s_3d.yaml" % (sensor1, sensor2)) plt_cfg = loadYamlCfg(plt_cfg_file) if plt_cfg is None: Log.error("Not find the config file: {}".format(plt_cfg_file)) return Log.info(u"----- Start Drawing Regression-Pic, PAIR: {}, YMD: {} -----".format(pair, ymd)) for each in plt_cfg["regression"]: dict_cabr = {} dict_cabr_d = {} dict_cabr_n = {} dict_bias = {} dict_bias_d = {} dict_bias_n = {} # 需要回滚的天数 if is_monthly: PERIOD = calendar.monthrange(int(ymd[:4]), int(ymd[4:6]))[1] # 当月天数 ymd = ymd[:6] + "%02d" % PERIOD # 当月最后一天 else: PERIOD = plt_cfg[each]["days"] # must be in "all", "day", "night" Day_Night = ["all", "day", "night"] if "time" in plt_cfg[each].keys(): Day_Night = plt_cfg[each]["time"] for t in Day_Night: if t not in ["all", "day", "night"]: Day_Night.remove(t) for idx, chan in enumerate(plt_cfg[each]["chan"]): Log.info(u"Start Drawing {} Channel {}".format(each, chan)) oneHDF5 = ReadHDF5() num_file = PERIOD for daydelta in xrange(PERIOD): cur_ymd = pb_time.ymd_plus(ymd, -daydelta) hdf5_name = "COLLOC+%sIR,%s_C_BABJ_%s.hdf5" % (Type, pair, cur_ymd) filefullpath = os.path.join(MATCH_DIR, pair, hdf5_name) if not os.path.isfile(filefullpath): Log.info(u"File not found: {}".format(filefullpath)) num_file -= 1 continue if not oneHDF5.LoadData(filefullpath, chan): Log.error("Error occur when reading %s of %s" % (chan, filefullpath)) if num_file == 0: Log.error(u"No file found.") continue elif num_file != PERIOD: Log.error(u"{} of {} file(s) found.".format(num_file, PERIOD)) if is_monthly: str_time = ymd[:6] cur_path = os.path.join(MRA_DIR, pair, str_time) else: str_time = ymd cur_path = os.path.join(DRA_DIR, pair, str_time) # delete 0 in std if len(oneHDF5.rad1_std) > 0.0001: # TODO: 有些极小的std可能是异常值,而导致权重极大,所以 std>0 改成 std>0.0001 deletezeros = np.where(oneHDF5.rad1_std > 0.0001) oneHDF5.rad1_std = oneHDF5.rad1_std[deletezeros] oneHDF5.rad1 = oneHDF5.rad1[deletezeros] if len( oneHDF5.rad1) > 0 else oneHDF5.rad1 oneHDF5.rad2 = oneHDF5.rad2[deletezeros] if len( oneHDF5.rad2) > 0 else oneHDF5.rad2 oneHDF5.tbb1 = oneHDF5.tbb1[deletezeros] if len( oneHDF5.tbb1) > 0 else oneHDF5.tbb1 oneHDF5.tbb2 = oneHDF5.tbb2[deletezeros] if len( oneHDF5.tbb2) > 0 else oneHDF5.tbb2 oneHDF5.time = oneHDF5.time[deletezeros] if len( oneHDF5.time) > 0 else oneHDF5.time oneHDF5.lon1 = oneHDF5.lon1[deletezeros] if len( oneHDF5.lon1) > 0 else oneHDF5.lon1 oneHDF5.lon2 = oneHDF5.lon2[deletezeros] if len( oneHDF5.lon2) > 0 else oneHDF5.lon2 if len(oneHDF5.ref1_std) > 0.0001: deletezeros = np.where(oneHDF5.ref1_std > 0.0001) oneHDF5.ref1_std = oneHDF5.ref1_std[deletezeros] oneHDF5.ref1 = oneHDF5.ref1[deletezeros] if len( oneHDF5.ref1) > 0 else oneHDF5.ref1 oneHDF5.ref2 = oneHDF5.ref2[deletezeros] if len( oneHDF5.ref2) > 0 else oneHDF5.ref2 oneHDF5.dn1 = oneHDF5.dn1[deletezeros] if len( oneHDF5.dn1) > 0 else oneHDF5.dn1 oneHDF5.dn2 = oneHDF5.dn1[deletezeros] if len( oneHDF5.dn2) > 0 else oneHDF5.dn2 oneHDF5.time = oneHDF5.time[deletezeros] if len( oneHDF5.time) > 0 else oneHDF5.time oneHDF5.lon1 = oneHDF5.lon1[deletezeros] if len( oneHDF5.lon1) > 0 else oneHDF5.lon1 oneHDF5.lon2 = oneHDF5.lon2[deletezeros] if len( oneHDF5.lon2) > 0 else oneHDF5.lon2 # find out day and night if ("day" in Day_Night or "night" in Day_Night) and len(oneHDF5.time) > 0: vect_is_day = np.vectorize(is_day_timestamp_and_lon) day_index = vect_is_day(oneHDF5.time, oneHDF5.lon1) night_index = np.logical_not(day_index) else: day_index = None night_index = None # 将每个对通用的属性值放到对循环,每个通道用到的属性值放到通道循环 # get threhold, unit, names... xname, yname = each.split("-") xname_l = plt_cfg[each]["x_name"] xunit = plt_cfg[each]["x_unit"] xlimit = plt_cfg[each]["x_range"][idx] xmin, xmax = xlimit.split("-") xmin = float(xmin) xmax = float(xmax) yname_l = plt_cfg[each]["y_name"] yunit = plt_cfg[each]["y_unit"] ylimit = plt_cfg[each]["y_range"][idx] ymin, ymax = ylimit.split("-") ymin = float(ymin) ymax = float(ymax) weight = None if "rad" in xname: x = oneHDF5.rad1 elif "tbb" in xname: x = oneHDF5.tbb1 elif "ref" in xname: x = oneHDF5.ref1 elif "dn" in xname: x = oneHDF5.dn1 else: Log.error("Can't plot %s" % each) continue if "rad" in yname: y = oneHDF5.rad2 elif "tbb" in yname: y = oneHDF5.tbb2 elif "ref" in yname: y = oneHDF5.ref2 else: Log.error("Can't plot %s" % each) continue if "rad" in xname and "rad" in yname: if len(oneHDF5.rad1_std) > 0: weight = oneHDF5.rad1_std o_name = "RadCalCoeff" elif "tbb" in xname and "tbb" in yname: o_name = "TBBCalCoeff" elif "ref" in xname and "ref" in yname: if len(oneHDF5.ref1_std) > 0: weight = oneHDF5.ref1_std o_name = "CorrcCoeff" elif "dn" in xname and "ref" in yname: o_name = "CalCoeff" # 画对角线 if xname == yname: diagonal = True else: diagonal = False if "all" in Day_Night and o_name not in dict_cabr: dict_cabr[o_name] = {} dict_bias[xname] = {} if "day" in Day_Night and o_name not in dict_cabr_d: dict_cabr_d[o_name] = {} dict_bias_d[xname] = {} if "night" in Day_Night and o_name not in dict_cabr_n: dict_cabr_n[o_name] = {} dict_bias_n[xname] = {} # 对样本点数量进行判断,如果样本点少于 100 个,则不进行绘制 if x.size < 100: Log.error("Not enough match point to draw: {}, {}".format(each, chan)) if "all" in Day_Night: dict_cabr[o_name][chan] = [0, np.NaN, np.NaN, np.NaN] dict_bias[xname][chan] = [np.NaN, np.NaN] if "day" in Day_Night: dict_cabr_d[o_name][chan] = [0, np.NaN, np.NaN, np.NaN] dict_bias_d[xname][chan] = [np.NaN, np.NaN] if "night" in Day_Night: dict_cabr_n[o_name][chan] = [0, np.NaN, np.NaN, np.NaN] dict_bias_n[xname][chan] = [np.NaN, np.NaN] continue # regression starts if "all" in Day_Night: o_file = os.path.join(cur_path, "%s_%s_%s_ALL_%s" % ( pair, o_name, chan, str_time)) print("x_all, y_all", len(x), len(y)) abr, bias = plot(x, y, weight, o_file, num_file, part1, part2, chan, str_time, xname, xname_l, xunit, xmin, xmax, yname, yname_l, yunit, ymin, ymax, diagonal, is_monthly) if abr: dict_cabr[o_name][chan] = abr else: dict_cabr[o_name][chan] = [0, np.NaN, np.NaN, np.NaN] if bias: dict_bias[xname][chan] = bias else: dict_bias[xname][chan] = [np.NaN, np.NaN] # ------- day ---------- if "day" in Day_Night: if day_index is not None and np.where(day_index)[0].size > 10: o_file = os.path.join(cur_path, "%s_%s_%s_Day_%s" % ( pair, o_name, chan, str_time)) x_d = x[day_index] y_d = y[day_index] w_d = weight[day_index] if weight is not None else None print("x_all, y_all", len(x), len(y)) print("x_day, y_day", len(x_d), len(y_d)) abr, bias = plot(x_d, y_d, w_d, o_file, num_file, part1, part2, chan, str_time, xname, xname_l, xunit, xmin, xmax, yname, yname_l, yunit, ymin, ymax, diagonal, is_monthly) if abr: dict_cabr_d[o_name][chan] = abr else: dict_cabr_d[o_name][chan] = [0, np.NaN, np.NaN, np.NaN] if bias: dict_bias_d[xname][chan] = bias else: dict_bias_d[xname][chan] = [np.NaN, np.NaN] else: dict_cabr_d[o_name][chan] = [0, np.NaN, np.NaN, np.NaN] dict_bias_d[xname][chan] = [np.NaN, np.NaN] # ---------night ------------ if "night" in Day_Night: if night_index is not None and np.where(night_index)[0].size > 10: o_file = os.path.join(cur_path, "%s_%s_%s_Night_%s" % ( pair, o_name, chan, str_time)) x_n = x[night_index] y_n = y[night_index] w_n = weight[night_index] if weight is not None else None print("x_all, y_all", len(x), len(y)) print("x_night, y_night", len(x_n), len(y_n)) abr, bias = plot(x_n, y_n, w_n, o_file, num_file, part1, part2, chan, str_time, xname, xname_l, xunit, xmin, xmax, yname, yname_l, yunit, ymin, ymax, diagonal, is_monthly) if abr: dict_cabr_n[o_name][chan] = abr else: dict_cabr_n[o_name][chan] = [0, np.NaN, np.NaN, np.NaN] if bias: dict_bias_n[xname][chan] = bias else: dict_bias_n[xname][chan] = [np.NaN, np.NaN] else: dict_cabr_n[o_name][chan] = [0, np.NaN, np.NaN, np.NaN] dict_bias_n[xname][chan] = [np.NaN, np.NaN] oneHDF5.clear() # write txt lock.acquire() channel = plt_cfg[each]["chan"] if "all" in Day_Night: for o_name in dict_cabr: write_bias(channel, part1, part2, xname, ymd, dict_bias, "ALL") write_cabr(channel, part1, part2, o_name, ymd, dict_cabr, "ALL") if "day" in Day_Night: for o_name in dict_cabr_d: write_bias(channel, part1, part2, xname, ymd, dict_bias_d, "Day") write_cabr(channel, part1, part2, o_name, ymd, dict_cabr_d, "Day") if "night" in Day_Night: for o_name in dict_cabr_n: write_bias(channel, part1, part2, xname, ymd, dict_bias_n, "Night") write_cabr(channel, part1, part2, o_name, ymd, dict_cabr_n, "Night") lock.release()
def run(pair, ymd): """ pair: sat1+sensor1_sat2+sensor2 ymd: str YYYYMMDD """ # 提取参数中的卫星信息和传感器信息 part1, part2 = pair.split("_") sat1, sensor1 = part1.split("+") sat2, sensor2 = part2.split("+") # 判断是静止卫星还是动态卫星 if "FY2" in part1 or "FY4" in part1: Type = "GEOLEO" elif "FY3" in part1: Type = "LEOLEO" else: LOG.error("Cant distinguish the satellite type") return # 加载绘图配置文件 plt_cfg_file = os.path.join(MAIN_PATH, "cfg", "%s.plt" % pair) plt_cfg = loadYamlCfg(plt_cfg_file) if plt_cfg is None: LOG.error("Not find the config file: {}".format(plt_cfg_file)) return PERIOD = calendar.monthrange(int(ymd[:4]), int(ymd[4:6]))[1] # 当月天数 ym = ymd[:6] ymd = ym + '%02d' % PERIOD # 当月最后一天 LOG.info(u"----- Start Drawing Monthly TBBias Analysis Pic, PAIR: {}, YMD: {} -----".format(pair, ymd)) for each in plt_cfg['monthly_staistics']: # Day_Night must be in 'all', 'day', 'night' Day_Night = ['all', 'day', 'night'] # default if 'time' in plt_cfg[each].keys(): Day_Night = plt_cfg[each]['time'] for i in Day_Night: if i not in ['all', 'day', 'night']: Day_Night.remove(i) for idx, chan in enumerate(plt_cfg[each]['chan']): LOG.info(u"Start Drawing {} Channel {}".format(each, chan)) oneHDF5 = ReadHDF5() # load Matched HDF5 num_file = PERIOD for daydelta in xrange(PERIOD): cur_ymd = pb_time.ymd_plus(ymd, -daydelta) nc_name = 'COLLOC+%sIR,%s_C_BABJ_%s.hdf5' % (Type, pair, cur_ymd) filefullpath = os.path.join(MATCH_DIR, pair, nc_name) if not os.path.isfile(filefullpath): LOG.info(u"HDF5 not found: {}".format(filefullpath)) num_file -= 1 continue if not oneHDF5.LoadData(filefullpath, chan): LOG.error('Error occur when reading %s of %s' % (chan, filefullpath)) if num_file == 0: LOG.error(u"No file found.") continue elif num_file != PERIOD: LOG.info(u"{} of {} file(s) found.".format(num_file, PERIOD)) # 输出目录 cur_path = os.path.join(MBA_DIR, pair, ymd[:6]) # get threhold, unit, names... xname, yname = each.split('-') bias = xname xname_l = xname.upper() xunit = plt_cfg[each]['x_unit'] xlimit = plt_cfg[each]['x_range'][idx] xmin, xmax = xlimit.split('-') xmin = float(xmin) xmax = float(xmax) # delete 0 in std if len(oneHDF5.rad1_std) > 0.0001: # TODO: 有些极小的std可能是异常值,而导致权重极大,所以 std>0 改成 std>0.0001 deletezeros = np.where(oneHDF5.rad1_std > 0.0001) oneHDF5.rad1_std = oneHDF5.rad1_std[deletezeros] oneHDF5.rad1 = oneHDF5.rad1[deletezeros] if len( oneHDF5.rad1) > 0 else oneHDF5.rad1 oneHDF5.rad2 = oneHDF5.rad2[deletezeros] if len( oneHDF5.rad2) > 0 else oneHDF5.rad2 oneHDF5.tbb1 = oneHDF5.tbb1[deletezeros] if len( oneHDF5.tbb1) > 0 else oneHDF5.tbb1 oneHDF5.tbb2 = oneHDF5.tbb2[deletezeros] if len( oneHDF5.tbb2) > 0 else oneHDF5.tbb2 oneHDF5.time = oneHDF5.time[deletezeros] if len( oneHDF5.time) > 0 else oneHDF5.time oneHDF5.lon1 = oneHDF5.lon1[deletezeros] if len( oneHDF5.lon1) > 0 else oneHDF5.lon1 oneHDF5.lon2 = oneHDF5.lon2[deletezeros] if len( oneHDF5.lon2) > 0 else oneHDF5.lon2 if len(oneHDF5.ref1_std) > 0.0001: deletezeros = np.where(oneHDF5.ref1_std > 0.0001) oneHDF5.ref1_std = oneHDF5.ref1_std[deletezeros] oneHDF5.ref1 = oneHDF5.ref1[deletezeros] if len( oneHDF5.ref1) > 0 else oneHDF5.ref1 oneHDF5.ref2 = oneHDF5.ref2[deletezeros] if len( oneHDF5.ref2) > 0 else oneHDF5.ref2 oneHDF5.dn1 = oneHDF5.dn1[deletezeros] if len( oneHDF5.dn1) > 0 else oneHDF5.dn1 oneHDF5.dn2 = oneHDF5.dn1[deletezeros] if len( oneHDF5.dn2) > 0 else oneHDF5.dn2 oneHDF5.time = oneHDF5.time[deletezeros] if len( oneHDF5.time) > 0 else oneHDF5.time oneHDF5.lon1 = oneHDF5.lon1[deletezeros] if len( oneHDF5.lon1) > 0 else oneHDF5.lon1 oneHDF5.lon2 = oneHDF5.lon2[deletezeros] if len( oneHDF5.lon2) > 0 else oneHDF5.lon2 # find out day and night if ('day' in Day_Night or 'night' in Day_Night) and len(oneHDF5.time) > 0: vect_is_day = np.vectorize(is_day_timestamp_and_lon) day_index = vect_is_day(oneHDF5.time, oneHDF5.lon1) night_index = np.logical_not(day_index) else: day_index = None night_index = None # get x dset_name = xname + "1" if hasattr(oneHDF5, dset_name): x = getattr(oneHDF5, dset_name) else: LOG.error("Can't plot, no %s in HDF5 class" % dset_name) continue # get y dset_name = yname + "2" if hasattr(oneHDF5, dset_name): y = getattr(oneHDF5, dset_name) else: LOG.error("Can't plot, no %s in HDF5 class" % dset_name) continue if 'rad' == bias: o_name = 'RadBiasMonthStats' elif 'tbb' == bias: o_name = 'TBBiasMonthStats' elif 'ref' == bias: o_name = 'RefBiasMonthStats' else: o_name = 'DUMMY' if x.size < 10: LOG.error("Not enough match point to draw.") continue # 获取 std weight = None if 'rad' in xname and 'rad' in yname: if len(oneHDF5.rad1_std) > 0: weight = oneHDF5.rad1_std elif 'tbb' in xname and 'tbb' in yname: weight = None elif 'ref' in xname and 'ref' in yname: if len(oneHDF5.ref1_std) > 0: weight = oneHDF5.ref1_std elif 'dn' in xname and 'ref' in yname: weight = None # rad-specified regression starts reference_list = [] if 'reference' in plt_cfg[each]: reference_list = plt_cfg[each]['reference'][idx] if 'all' in Day_Night: o_file = os.path.join(cur_path, '%s_%s_%s_ALL_%s' % (pair, o_name, chan, ym)) print("x_all, y_all", len(x), len(y)) plot(x, y, weight, o_file, part1, part2, chan, ym, 'ALL', reference_list, xname, xname_l, xunit, xmin, xmax) # ------- day ---------- if 'day' in Day_Night: if day_index is not None and np.where(day_index)[0].size > 10: # rad-specified o_file = os.path.join(cur_path, '%s_%s_%s_Day_%s' % (pair, o_name, chan, ym)) x_d = x[day_index] y_d = y[day_index] w_d = weight[day_index] if weight is not None else None print("x_all, y_all", len(x), len(y)) print("x_day, y_day", len(x_d), len(y_d)) plot(x_d, y_d, w_d, o_file, part1, part2, chan, ym, 'Day', reference_list, xname, xname_l, xunit, xmin, xmax) if 'night' in Day_Night: # ---------night ------------ if night_index is not None and np.where(night_index)[0].size > 10: # rad-specified o_file = os.path.join(cur_path, '%s_%s_%s_Night_%s' % (pair, o_name, chan, ym)) x_n = x[night_index] y_n = y[night_index] w_n = weight[day_index] if weight is not None else None print("x_all, y_all", len(x), len(y)) print("x_night, y_night", len(x_n), len(y_n)) plot(x_n, y_n, w_n, o_file, part1, part2, chan, ym, 'Night', reference_list, xname, xname_l, xunit, xmin, xmax)
def run(pair, date_s, date_e): """ pair: sat1+sensor1_sat2+sensor2 date_s: datetime of start date None 处理 从发星 到 有数据的最后一天 date_e: datetime of end date None 处理 从发星 到 有数据的最后一天 """ # 提取参数中的卫星信息和传感器信息 part1, part2 = pair.split('_') sat1, sensor1 = part1.split('+') sat2, sensor2 = part2.split('+') # 判断是否从发星开始处理 if date_s is None or date_e is None: isLaunch = True elif date_s is not None and date_e is not None: isLaunch = False else: Log.error('Wrong date argument') return # 加载绘图配置文件 plt_cfg_file = os.path.join(MAIN_PATH, "cfg", "%s.plt" % pair) plt_cfg = loadYamlCfg(plt_cfg_file) if plt_cfg is None: Log.error("Not find the config file: {}".format(plt_cfg_file)) return # 设置开始时间和结束时间 if isLaunch: if sat1 in GLOBAL_CONFIG['LUANCH_DATE']: date_s = pb_time.ymd2date(str(GLOBAL_CONFIG['LUANCH_DATE'][sat1])) date_e = datetime.utcnow() else: Log.error('%s not in LUANCH_DATE of Cfg, use the first day in txt instead.') return ymd_s, ymd_e = date_s.strftime('%Y%m%d'), date_e.strftime('%Y%m%d') Log.info(u"----- Start Drawing Regression-Pic, PAIR: {} -----".format(pair)) for each in plt_cfg['time_series']: # must be in 'all', 'day', 'night' Day_Night = ['all', 'day', 'night'] if 'time' in plt_cfg[each].keys(): Day_Night = plt_cfg[each]['time'] for i in Day_Night: if i not in ['all', 'day', 'night']: Day_Night.remove(i) # 将每个对通用的属性值放到对循环,每个通道用到的属性值放到通道循环 xname, yname = each.split('-') xname_l = plt_cfg[each]['x_name'] xunit = plt_cfg[each]['x_unit'] yname_l = plt_cfg[each]['y_name'] yunit = plt_cfg[each]['y_unit'] if 'tbb' in xname and 'tbb' in yname: o_name = 'TBBCalCoeff' elif 'ref' in xname and 'ref' in yname: o_name = 'CorrcCoeff' elif 'dn' in xname and 'ref' in yname: o_name = 'CalCoeff' else: continue for DayOrNight in Day_Night: if DayOrNight == 'all': DayOrNight = DayOrNight.upper() # all -> ALL else: DayOrNight = DayOrNight[0].upper() + DayOrNight[1:] for i, chan in enumerate(plt_cfg[each]['chan']): x_range = plt_cfg[each]['x_range'][i] y_range = plt_cfg[each]['y_range'][i] # plot slope Intercept count ------------------------ print "plot abc : {} {} {}".format(each, DayOrNight, chan) abc_path = os.path.join(ABR_DIR, '%s_%s' % (part1, part2), "CABR") abc_daily_file = os.path.join(abc_path, '%s_%s_%s_%s_%s_Daily.txt' % ( part1, part2, o_name, chan, DayOrNight)) abc_monthly_file = os.path.join(abc_path, '%s_%s_%s_%s_%s_Monthly.txt' % ( part1, part2, o_name, chan, DayOrNight)) abc_day_data = get_cabr_data(abc_daily_file) abc_month_data = get_cabr_data(abc_monthly_file) date_D = abc_day_data["date"] a_D = abc_day_data["slope"] b_D = abc_day_data["intercept"] c_D = np.log10(abc_day_data["count"]) date_M = abc_month_data["date"] + relativedelta(days=14) a_M = abc_month_data["slope"] b_M = abc_month_data["intercept"] c_M = np.log10(abc_month_data["count"]) idx_D = np.where(np.logical_and(date_D >= date_s, date_D <= date_e)) idx_M = np.where(np.logical_and(date_M >= pb_time.ymd2date(ymd_s[:6] + '01'), date_M <= date_e)) title = 'Time Series of Slope Intercept & Counts %s %s\n(%s = Slope * %s + Intercept)' % \ (chan, DayOrNight, part1.replace('+', '_'), part2.replace('+', '_')) if isLaunch: picPath = os.path.join(ABC_DIR, pair, '%s_%s_ABC_%s_%s_Launch.png' % (pair, o_name, chan, DayOrNight)) else: picPath = os.path.join(ABC_DIR, pair, ymd_e, '%s_%s_ABC_%s_%s_Year_%s.png' % (pair, o_name, chan, DayOrNight, ymd_e)) # 系数坐标范围 slope_range = plt_cfg[each]['slope_range'][i] slope_min, slope_max = slope_range.split('-') slope_min = float(slope_min) slope_max = float(slope_max) plot_abc(date_D[idx_D], a_D[idx_D], b_D[idx_D], c_D[idx_D], date_M[idx_M], a_M[idx_M], b_M[idx_M], c_M[idx_M], picPath, title, date_s, date_e, slope_min, slope_max, each) # plot MD ---------------------------------------------------- if xname == "ref" or xname == "tbb": print "plot MD : {} {} {}".format(each, DayOrNight, chan) # 从 BIAS 文件中读取数据 bias_ref_path = os.path.join(ABR_DIR, '%s_%s' % (part1, part2), "BIAS") file_name_monthly = os.path.join( bias_ref_path, '%s_%s_%s_%s_%s_Monthly.txt' % ( part1, part2, xname.upper(), chan, DayOrNight)) file_name_daily = os.path.join( bias_ref_path, '%s_%s_%s_%s_%s_Daily.txt' % ( part1, part2, xname.upper(), chan, DayOrNight)) bias_d = get_bias_data(file_name_daily) bias_m = get_bias_data(file_name_monthly) date_md_d = bias_d["date"] data_md_d = bias_d["md"] date_md_m = bias_m["date"] + relativedelta(days=14) data_md_m = bias_m["md"] std_md_m = bias_m["md_std"] idx_d = np.where(np.logical_and(date_md_d >= date_s, date_md_d <= date_e)) idx_m = np.where( np.logical_and(date_md_m >= pb_time.ymd2date(ymd_s[:6] + '01'), date_md_m <= date_e)) # 根据拟合系数进行绘制 reference_list = plt_cfg[each]['reference'][i] for ref_temp in reference_list: if isLaunch: pic_path = os.path.join( OMB_DIR, pair, '%s_%s_MD_%s_%s_Launch.png' % ( pair, xname.upper(), chan, DayOrNight)) else: # plot latest year pic_path = os.path.join( OMB_DIR, pair, ymd_e, '%s_%s_MD_%s_%s_Year_%s.png' % ( pair, xname.upper(), chan, DayOrNight, ymd_e, )) plot_md( date_md_d[idx_d], data_md_d[idx_d], date_md_m[idx_m], data_md_m[idx_m], std_md_m[idx_m], pic_path, date_s, date_e, sat1, pair, chan, DayOrNight, ref_temp, xname, xname_l, xunit, x_range, yname, yname_l, yunit, y_range, ) # plot RMD ---------------------------------------------------- if xname == "ref" and yname == "ref": print "plot RMD : {} {} {}".format(each, DayOrNight, chan) # 从 BIAS 文件中读取数据 bias_ref_path = os.path.join(ABR_DIR, '%s_%s' % (part1, part2), "BIAS") file_name_monthly = os.path.join( bias_ref_path, '%s_%s_%s_%s_%s_Monthly.txt' % ( part1, part2, xname.upper(), chan, DayOrNight)) file_name_daily = os.path.join( bias_ref_path, '%s_%s_%s_%s_%s_Daily.txt' % ( part1, part2, xname.upper(), chan, DayOrNight)) bias_d = get_bias_data(file_name_daily) bias_m = get_bias_data(file_name_monthly) date_rmd_d = bias_d["date"] data_rmd_d = bias_d["bias"] date_rmd_m = bias_m["date"] + relativedelta(days=14) data_rmd_m = bias_m["bias"] std_rmd_m = bias_m["bias_std"] idx_d = np.where(np.logical_and(date_rmd_d >= date_s, date_rmd_d <= date_e)) idx_m = np.where(np.logical_and(date_rmd_m >= pb_time.ymd2date(ymd_s[:6] + '01'), date_rmd_m <= date_e)) # 根据拟合系数进行绘制 reference_list = plt_cfg[each]['reference'][i] for ref_temp in reference_list: # ref_temp_f = float(ref_temp) if isLaunch: pic_path = os.path.join( OMB_DIR, pair, '%s_RMD_%s_%s_Launch_%d.png' % ( pair, chan, DayOrNight, ref_temp * 100)) else: # plot latest year pic_path = os.path.join( OMB_DIR, pair, ymd_e, '%s_RMD_%s_%s_Year_%s_%d.png' % ( pair, chan, DayOrNight, ymd_e, ref_temp * 100)) plot_rmd( date_rmd_d[idx_d], data_rmd_d[idx_d], date_rmd_m[idx_m], data_rmd_m[idx_m], std_rmd_m[idx_m], pic_path, date_s, date_e, sat1, pair, chan, DayOrNight, ref_temp, xname, xname_l, xunit, yname, yname_l, yunit, ) # plot TBBias ------------------------ if xname == 'tbb' and yname == 'tbb': print "plot TBBias : {} {} {}".format(each, DayOrNight, chan) # 从 BIAS 文件中读取数据 bias_tbb_path = os.path.join(ABR_DIR, '%s_%s' % (part1, part2), "BIAS") file_name_monthly = os.path.join( bias_tbb_path, '%s_%s_%s_%s_%s_Monthly.txt' % ( part1, part2, xname.upper(), chan, DayOrNight)) file_name_daily = os.path.join( bias_tbb_path, '%s_%s_%s_%s_%s_Daily.txt' % ( part1, part2, xname.upper(), chan, DayOrNight)) tbbias_d = get_bias_data(file_name_daily) tbbias_m = get_bias_data(file_name_monthly) date_tbbias_d = tbbias_d["date"] data_tbbias_d = tbbias_d["bias"] date_tbbias_m = tbbias_m["date"] date_tbbias_m = date_tbbias_m + relativedelta(days=14) data_tbbias_m = tbbias_m["bias"] std_tbbias_m = tbbias_m["bias_std"] idx_d = np.where(np.logical_and(date_tbbias_d >= date_s, date_tbbias_d <= date_e)) idx_m = np.where(np.logical_and(date_tbbias_m >= pb_time.ymd2date(ymd_s[:6] + '01'), date_tbbias_m <= date_e)) # 根据拟合系数进行绘制 reference_list = plt_cfg[each]['reference'][i] for ref_temp in reference_list: if isLaunch: pic_path = os.path.join( OMB_DIR, pair, '%s_TBBias_%s_%s_Launch_%dK.png' % ( pair, chan, DayOrNight, ref_temp)) else: # plot latest year pic_path = os.path.join( OMB_DIR, pair, ymd_e, '%s_TBBias_%s_%s_Year_%s_%dK.png' % ( pair, chan, DayOrNight, ymd_e, ref_temp)) plot_tbbias( date_tbbias_d[idx_d], data_tbbias_d[idx_d], date_tbbias_m[idx_m], data_tbbias_m[idx_m], std_tbbias_m[idx_m], pic_path, date_s, date_e, sat1, pair, chan, DayOrNight, ref_temp, xname, xname_l, xunit, yname, yname_l, yunit, ) # plot interpolated TBBias img (obs minus backgroud) ------------- print "plot OMB : {} {} {}".format(each, DayOrNight, chan) title = 'Brightness Temperature Correction\n%s %s %s' % \ (pair, chan, DayOrNight) if isLaunch: picPath = os.path.join(OMB_DIR, pair, '%s_TBBOMB_%s_%s_Launch.png' % ( pair, chan, DayOrNight)) else: picPath = os.path.join(OMB_DIR, pair, ymd_e, '%s_TBBOMB_%s_%s_Year_%s.png' % ( pair, chan, DayOrNight, ymd_e)) plot_omb(date_D[idx_D], a_D[idx_D], b_D[idx_D], picPath, title, date_s, date_e)
def run(pair1, pair2, date_s, date_e): """ pair: sat1+sensor1_sat2+sensor2 date_s: datetime of start date None 处理 从发星 到 有数据的最后一天 date_e: datetime of end date None 处理 从发星 到 有数据的最后一天 """ Log.info(u'开始运行双差统计图绘制程序%s %s-----------------' % (pair1, pair2)) isLaunch = False if date_s is None or date_e is None: isLaunch = True satsen11, satsen12 = pair1.split("_") satsen21, satsen22 = pair2.split("_") if satsen11 != satsen21: Log.error("%s and %s not the same, can't do double bias" % (satsen11, satsen21)) return # 读取传感器对的配置文件 sat11, sen11 = satsen11.split('+') sat12, sen12 = satsen12.split('+') plt_cfg_file = os.path.join(MAIN_PATH, "cfg", '%s_%s_3d.yaml' % (sen11, sen12)) plt_cfg = loadYamlCfg(plt_cfg_file) chans = plt_cfg["rad-rad"]["chan"] # change sensor Name if "VISSR" in satsen11: # VISSR -> SVISSR satsen11 = satsen11.replace("VISSR", "SVISSR") satsen21 = satsen21.replace("VISSR", "SVISSR") if "METOP-" in satsen12: # METOP -> MetOp satsen12 = satsen12.replace("METOP-", "MetOp") if "METOP-" in satsen22: # METOP -> MetOp satsen22 = satsen22.replace("METOP-", "MetOp") flst = [e for e in os.listdir(StdNC_DIR) if os.path.isfile(os.path.join(StdNC_DIR, e))] nc1_path = nc2_path = None for each in flst: if satsen11 in each and satsen12 in each: nc1_path = os.path.join(StdNC_DIR, each) if satsen21 in each and satsen22 in each: nc2_path = os.path.join(StdNC_DIR, each) nc1 = stdNC() if not nc1.LoadData(nc1_path): return nc2 = stdNC() if not nc2.LoadData(nc2_path): return time1 = nc1.time[:, 0] time2 = nc2.time[:, 0] tbbias1 = nc1.tbbias tbbias2 = nc2.tbbias reftmp = nc1.reftmp if date_s is None: # TODO: # timestamp_s = max(time1[0], time2[0]) # date_s = datetime.fromtimestamp(timestamp_s, tz=pytz.utc) sat1, sen1 = satsen11.split("+") date_s = pb_time.ymd2date(GLOBAL_CONFIG["LUANCH_DATE"][sat1]) date_s = pytz.utc.localize(date_s) timestamp_s = calendar.timegm(date_s.timetuple()) if date_e is None: timestamp_e = min(time1[-1], time2[-1]) date_e = datetime.fromtimestamp(timestamp_e, tz=pytz.utc) else: date_e = pytz.utc.localize(date_e) timestamp_e = calendar.timegm(date_e.timetuple()) days1 = tbbias1.shape[0] days2 = tbbias2.shape[0] index1 = [] index2 = [] date_D = [] for i in xrange(days1): if time1[i] < timestamp_s or time1[i] > timestamp_e: continue idxs2 = np.where(time2 == time1[i])[0] if len(idxs2) != 1: continue date_D.append(datetime.fromtimestamp(time1[i])) index1.append(i) index2.append(idxs2[0]) if len(date_D) == 0: return for k, ch in enumerate(chans): ch = chans[k] ref_temp = reftmp[k] tb1 = tbbias1[index1, k] tb2 = tbbias2[index2, k] bias_D = tb1 - tb2 idx = np.logical_or(tb1 < -998, tb2 < -998) bias_D[idx] = None date_M, bias_M = month_mean(date_D, bias_D) title = 'Time Series of Double Bias \n%s_%s Miuns %s_%s %s %dK' % \ (satsen11, satsen12, satsen11, satsen22, ch, ref_temp) if isLaunch: picPath = os.path.join(DBB_DIR, '%s_%s' % (pair1, satsen22), '%s_%s_DoubleBias_%s_Launch_%dK.png' % (pair1, satsen22, ch, ref_temp)) else: # plot latest year ymd_s = date_s.strftime("%Y%m%d") ymd_e = date_e.strftime("%Y%m%d") picPath = os.path.join(DBB_DIR, '%s_%s' % (pair1, satsen22), ymd_s, '%s_%s_DoubleBias_%s_Year_%s_%dK.png' % (pair1, satsen22, ch, ymd_s, ref_temp)) plot_tbbias(date_D, bias_D, date_M, bias_M, picPath, title, date_s, date_e) Log.info(u'Success')
def run(pair, ymd): """ pair: sat1+sensor1_sat2+sensor2 ymd: YYYYMMDD """ # 提取参数中的卫星信息和传感器信息 part1, part2 = pair.split("_") sat1, sensor1 = part1.split("+") sat2, sensor2 = part2.split("+") # 判断是静止卫星还是动态卫星 if "FY2" in part1 or "FY4" in part1: Type = "GEOLEO" elif "FY3" in part1: Type = "LEOLEO" else: Log.error("Cant distinguish the satellite type") return # 加载绘图配置文件 plt_cfg_file = os.path.join(MainPath, "%s_%s_3d.yaml" % (sensor1, sensor2)) plt_cfg = loadYamlCfg(plt_cfg_file) if plt_cfg is None: Log.error("Not find the config file: {}".format(plt_cfg_file)) return # 读取配置文件的信息 PERIOD = plt_cfg['collocation_map']['days'] # 回滚天数 chans = plt_cfg['collocation_map']['chan'] # 通道 maptype = plt_cfg['collocation_map']['maptype'] # 需要绘制的类型 if 'area' in maptype: # 区域块视图 area = plt_cfg['collocation_map']['area'] else: area = None if 'polar' in maptype: # 两极视图 polar = plt_cfg['collocation_map']['polar'] else: polar = None # 读取范围配置 if not area and not polar: return else: map_range = (polar, area) Log.info( u"----- Start Drawing Matched Map-Pic, PAIR: {}, YMD: {}, PERIOD: {} -----" .format(pair, ymd, PERIOD)) # 读取 HDF5 文件数据 oneHDF5 = ReadHDF5() num_file = PERIOD cur_ymd = pb_time.ymd_plus(ymd, 1) # 回滚天数,现在为 1 for daydelta in xrange(PERIOD): cur_ymd = pb_time.ymd_plus(ymd, -daydelta) filename = "COLLOC+%sIR,%s+%s_%s+%s_C_BABJ_%s.hdf5" % ( Type, sat1, sensor1, sat2, sensor2, cur_ymd) filefullpath = os.path.join(MATCH_DIR, pair, filename) if not os.path.isfile(filefullpath): Log.info(u"File not found: {}".format(filefullpath)) num_file -= 1 continue if not oneHDF5.LoadData(filefullpath, chans): Log.error('Error occur when reading %s of %s' % (chans, filefullpath)) if num_file == 0: Log.error(u"No file found.") return elif num_file != PERIOD: Log.error(u"{} of {} file(s) found.".format(num_file, PERIOD)) cur_path = os.path.join(DMS_DIR, pair, ymd) o_file = os.path.join(cur_path, '%s_%s_MatchedPoints_ALL_%s' % (part1, part2, ymd)) # find out day and night vect_is_day = np.vectorize(is_day_timestamp_and_lon) day_index = vect_is_day(oneHDF5.time, oneHDF5.lon1) night_index = np.logical_not(day_index) x = oneHDF5.lon1 # 经度数据 y = oneHDF5.lat1 # 维度数据 print 'date: {}, x_all: {} y_all: {} '.format(ymd, len(x), len(y)) draw_butterfly(part1, part2, cur_ymd, ymd, x, y, o_file, map_range) # ------- day ---------- if np.where(day_index)[0].size > 0: o_file = os.path.join( cur_path, '%s-%s_MatchedPoints_Day_%s' % (part1, part2, ymd)) x_d = x[day_index] y_d = y[day_index] print 'date: {}, x_day: {} y_day: {} '.format(ymd, len(x_d), len(y_d)) draw_butterfly(part1, part2, cur_ymd, ymd, x_d, y_d, o_file, map_range) # ---------night ------------ if np.where(night_index)[0].size > 0: o_file = os.path.join( cur_path, '%s-%s_MatchedPoints_Night_%s' % (part1, part2, ymd)) x_n = x[night_index] y_n = y[night_index] print 'date: {}, x_night: {} y_night: {} '.format( ymd, len(x_n), len(y_n)) draw_butterfly(part1, part2, cur_ymd, ymd, x_n, y_n, o_file, map_range) Log.info(u"Success")