def get_metadata (station_id,id_type=None): ucanid = None station_name = station_id try: if not id_type: if station_id[0:1] >= '1' and station_id[0:1] <= '9' and station_id[1:2] >= '0' and station_id[1:2] <= '9': id_type = 'njwx' elif len(station_id) == 4: id_type = 'icao' elif station_id[0:3] == "cu_" or station_id[0:3] == "um_" or station_id[0:3] == "uc_" or station_id[0:3] == "un_": id_type = 'cu_log' elif station_id[0:3] == "ew_": station_id = station_id[3:] id_type = 'miwx' elif len(station_id) == 3 or len(station_id) == 6: id_type = 'newa' else: return newaCommon_io.errmsg('Error processing form; check station input') elif id_type == 'miwx' and len(station_id) == 6: station_id = station_id[3:] query = ucan.get_query() r = query.getUcanFromIdAsSeq(station_id,id_type) if len(r) > 0: ucanid = r[-1].ucan_id info = query.getInfoForUcanIdAsSeq(ucanid,()) fields = ucanCallMethods.NameAny_to_dict(info[-1].fields) station_name = fields['name'] query.release() except: print 'Error getting metadata for',station_id,id_type print_exception() if not query._non_existent(): query.release() raise return ucanid, station_name
def get_sister_info(stn): var_sister = [] try: from sister_info import sister_info if sister_info.has_key(stn): sister = sister_info[stn] for var in sister.keys(): if ( sister[var][0:1] >= "1" and sister[var][0:1] <= "9" and sister[var][1:2] >= "0" and sister[var][1:2] <= "9" ): station_type = "njwx" elif len(sister[var]) == 4: sister[var] = sister[var].upper() station_type = "icao" elif ( sister[var][0:3] == "cu_" or sister[var][0:3] == "um_" or sister[var][0:3] == "uc_" or sister[var][0:3] == "un_" ): station_type = "cu_log" elif len(sister[var]) == 3 or len(sister[var]) == 6: station_type = "newa" est_staid, sister_name = newaCommon.get_metadata(sister[var], station_type) var_sister.append((var, sister_name)) except: print "Error finding sister info" print_exception() return var_sister
def get_fcst_data (stn, requested_var, start_date_dt, end_date_dt): hourly_fcst = {} try: if requested_var == 'prcp': requested_var = 'qpf' if requested_var == 'srad': hourly_fcst = solar_main_fcst2(stn,(start_date_dt.year,start_date_dt.month,start_date_dt.day,start_date_dt.hour),\ (end_date_dt.year,end_date_dt.month,end_date_dt.day,end_date_dt.hour)) else: stn = stn.upper() forecast_db = hashopen('/Users/keith/NDFD/hourly_forecasts.db','r') stn_dict = loads(forecast_db[stn]) forecast_db.close() if stn_dict.has_key(requested_var): for dkey in stn_dict[requested_var].keys(): dkey_dt = DateTime.DateTime(*dkey) if dkey_dt >= start_date_dt and dkey_dt <= end_date_dt: for h in range(0,24): if stn_dict[requested_var][dkey][h] != miss: if requested_var != 'qpf': tkey = (dkey[0],dkey[1],dkey[2],h) hourly_fcst[tkey] = stn_dict[requested_var][dkey][h] else: #split qpf over last 6 hours for phr in range(0,6): pdt = dkey_dt + DateTime.RelativeDate(hour=h) + DateTime.RelativeDate(hours=-phr) tkey = (pdt.year,pdt.month,pdt.day,pdt.hour) hourly_fcst[tkey] = stn_dict[requested_var][dkey][h]/6. except: print_exception() return hourly_fcst
def apple_biofix_process(request): try: # retrieve input if request.form: try: now = DateTime.now() if request.form['submit field'] == 'Submit': outfil = open( '/static/NEWA/apple_biofix_%s.txt' % now.year, 'w') for key in request.form.keys(): if key != 'submit field' and request.form[key] != '': outfil.write('%s, %s\n' % (key, request.form[key])) outfil.close return newaInput_io.apple_biofix_results( 'Biofix results saved.') else: return newaInput_io.apple_biofix_results( 'No changes saved.') except: print_exception() raise program_exit('Error processing form') else: return newaModel_io.errmsg('Error processing form; no form') except program_exit, msg: print msg return newaModel_io.errmsg('Error processing form')
def enter(self): """ 处理具体业务 :return: 0/不回包给前端,pb/正确返回,timeout/超时 """ try: if 508 != self.cmd: # alipay异步通知 # 验证登录态,某些命令可能不需要登录态,此处做判断 code, message = verify_session_key(self.numbers, self.session_key) if 10400 != code: g_log.debug("verify session key failed, %s, %s", code, message) return package.error_response(self.cmd, self.seq, 60001, "invalid session key") command_handle = {501: self.merchant_credit_flow_retrieve, 502: self.merchant_allow_exchange_in, 503: self.merchant_recharge, 504: self.merchant_withdrawals, 505: self.balance_record_retrieve, 506: self.balance_retrieve, 507: self.recharge_trade_no_retrieve, 508: self.alipay_async_notify} result = command_handle.get(self.cmd, self.dummy_command)() if result == 0: # 错误或者异常,不回包 response = 0 elif result == 1: # 错误,且回包 response = package.error_response(self.cmd, self.seq, self.code, self.message) else: # 正确,回包 response = result return response except Exception as e: from print_exception import print_exception print_exception() g_log.error("%s", e) return 0
def enter(self): """ 处理具体业务 :return: 0/不回包给前端,pb/正确返回,timeout/超时 """ try: # 验证登录态,某些命令可能不需要登录态,此处做判断 code, message = verify_session_key(self.numbers, self.session_key) if 10400 != code: g_log.debug("verify session key failed, %s, %s", code, message) return package.error_response(self.cmd, self.seq, 80001, "invalid session key") command_handle = {801: self.consumer_retrieve_voucher, 802: self.merchant_retrieve_voucher, 803: self.confirm_voucher} result = command_handle.get(self.cmd, self.dummy_command)() if result == 0: # 错误或者异常,不回包 response = 0 elif result == 1: # 错误,且回包 response = package.error_response(self.cmd, self.seq, self.code, self.message) else: # 正确,回包 response = result return response except Exception as e: from print_exception import print_exception print_exception() g_log.error("%s", e) return 0
def run_stationInfo(stn): from get_downloadtime import get_downloadtime station_dict = {} try: from newaCommon.stn_info import stn_info stn = stn.lower() sdict = {} if stn_info.has_key(stn): sdict['id'] = stn for item in ['lat','lon','elev','name','network','vars']: if stn_info[stn].has_key(item): sdict[item] = stn_info[stn][item] dt = get_downloadtime(stn,sdict['network']) if dt == -999: sdict['lasthour'] = "Unknown" else: if dt.hour < 12: hr_str = str(dt.hour)+" AM" elif dt.hour == 12: hr_str = "12 PM" else: hr_str = str(dt.hour-12)+" PM" sdict['lasthour'] = "%d/%d/%d %s" % (dt.month,dt.day,dt.year,hr_str) station_dict['metadata'] = sdict except: print 'Error processing request for',stn print_exception() station_dict['metadata'] = {} json_return = json.dumps(station_dict) return json_return
def get_hourly2 (self, stn, start_date_dt, end_date_dt): hourly_data = [] download_time = '' station_name = '' avail_vars = [] try: if stn[0:1] >= '1' and stn[0:1] <= '9' and stn[1:2] >= '0' and stn[1:2] <= '9': station_type = 'njwx' elif len(stn) == 4: station_type = 'icao' elif stn[0:3] == "cu_" or stn[0:3] == "um_" or stn[0:3] == "uc_" or stn[0:3] == "un_": station_type = 'cu_log' elif stn[0:3] == "ew_": stn = stn[3:] station_type = 'miwx' elif len(stn) == 3 or len(stn) == 6: station_type = 'newa' else: return newaCommon_io.errmsg('Error processing form; check station input') # get ucanid and station name from metadata ucanid,station_name = get_metadata (stn, station_type) if station_type == 'icao': staid = stn.upper() else: staid = ucanid # obtain all hourly data for station hourly_data,daily_data,avail_vars = get_newa_data (staid,stn,start_date_dt,end_date_dt,station_type) if len(hourly_data) > 0: # save time of last hour downloaded download_time = hourly_data[-1][0] except: print_exception() return hourly_data, download_time, station_name, avail_vars
def run_stationInfo(stn): from get_downloadtime import get_downloadtime station_dict = {} try: from stn_info import stn_info stn = stn.lower() sdict = {} if stn_info.has_key(stn): sdict['id'] = stn for item in ['lat','lon','elev','name','network','vars']: if stn_info[stn].has_key(item): sdict[item] = stn_info[stn][item] dt = get_downloadtime(stn,sdict['network']) if dt == -999: sdict['lasthour'] = "Unknown" else: if dt.hour < 12: hr_str = str(dt.hour)+" AM" elif dt.hour == 12: hr_str = "12 PM" else: hr_str = str(dt.hour-12)+" PM" sdict['lasthour'] = "%d/%d/%d %s" % (dt.month,dt.day,dt.year,hr_str) station_dict['metadata'] = sdict except: print 'Error processing request for',stn print_exception() station_dict['metadata'] = {} json_return = json.dumps(station_dict) return json_return
def run_stationModels(stn): station_dict = {} model_list = [] try: from stn_info import stn_info from pest_models import pest_models stn = stn.lower() if stn_info.has_key(stn): stn_vars = copy.deepcopy(stn_info[stn]['vars']) if 'lwet' in stn_info[stn]['vars'] or 'rhum' in stn_info[stn]['vars']: stn_vars.append('eslw') for mdl in pest_models: mtitle,murl,mvars = mdl for model_var in mvars: if not model_var in stn_vars: break else: whole_url = "http://newa.nrcc.cornell.edu/"+murl+"/"+stn if murl == "newaDisease/onion_dis" or murl == "newaDisease/onion_onlog" or murl == "newaDisease/onion_smbalog" or murl == "newaDisease/onion_sbalog": whole_url = whole_url+"/9999" elif murl == "newaDisease/tomato_for": whole_url = whole_url+"/9999/5" elif murl == "newaDisease/potato_pdays" or murl == "newaDisease/potato_lb": whole_url = whole_url+"/9999/5/1" model_list.append([mtitle,whole_url]) except: print 'Error processing request for',stn print_exception() # station_dict['models'] = model_list sorted_model_list = row_major_list(model_list,3) station_dict['models'] = sorted_model_list json_return = json.dumps(station_dict) return json_return
def on_response(self, response): try: super(Credit, self).on_response(response) if not self.response: g_log.error("illegal response") self.write(json.dumps({"c": 1040003, "m": "exception"})) else: features_response = {"consumption": self.create_consumption_response, "credit_list": self.consumer_fetch_all_credit_response, "credit_list_of_merchant": self.consumer_fetch_credit_of_merchant_response, "credit_list_detail": self.consumer_fetch_all_credit_detail_response, "credit_list_m": self.merchant_fetch_all_credit_response, "credit_list_m_of_consumer": self.merchant_fetch_credit_of_consumer_response, "apply_list_m": self.merchant_fetch_apply_credit_response, "credit_detail": self.consumer_fetch_credit_detail_response, "confirm": self.confirm_apply_credit_response, "refuse": self.refuse_apply_credit_response, "interchange": self.credit_interchange_response, "allow_out_credit": self.consumer_fetch_allow_out_credit_response} self.code, self.message = features_response.get(self.mode, self.dummy_command)(self.response) if self.code == 1: self.write(json.dumps({"c": self.code, "r": self.message})) else: self.write(json.dumps({"c": self.code, "m": self.message})) except Exception as e: from print_exception import print_exception print_exception() g_log.error("<%s> %s", e.__class__, e) self.write(json.dumps({"c": 1040004, "m": "exception"})) g_log.debug("[credit.%s.response]", self.mode) self.finish()
def process_update (request,path): try: pest = None altref = None tech_choice = 'conventional' # retrieve input if path is None: if request and request.form: try: if request.form.has_key('pest'): pest = request.form['pest'] if request.form.has_key('altref'): altref = request.form['altref'] if request.form.has_key('tech_choice'): tech_choice = request.form['tech_choice'] except: print_exception() raise program_exit('Error processing request') else: return newaCommon_io.errmsg('Error processing form; check input') else: return newaCommon_io.errmsg('Error processing input') # send input to appropriate routine if pest and altref: return Crucifer().run_crucifer_update(pest,altref,tech_choice) else: return newaCommon_io.errmsg('Error processing input') except program_exit,msg: print msg return newaCommon_io.errmsg('Error processing input')
def get_records (thr_id,rn,infile): records_dict = {} name = '' start_yr = 9999 end_yr = 9999 por = (9999,9999) try: thrdx_dict = hashopen(infile,'r') if thrdx_dict.has_key(thr_id): thr_recs = loads(thrdx_dict[thr_id]) name = '%s, %s' % (thr_recs['name'],thr_recs['state']) start_yr = min(thr_recs['maxt']['start_yr'],thr_recs['mint']['start_yr']) end_yr = max(thr_recs['maxt']['end_yr'],thr_recs['mint']['end_yr']) por = (start_yr,end_yr) for (element,hilo) in [('maxt','-hi'),('mint','-lo'),('maxt','-lo'),('mint','-hi'),('pcpn','-hi')]: k = element+hilo records = thr_recs[element][hilo] reclist = [] for tt in range(1,60): reclist.append((records[tt][rn-1][0],records[tt][rn-1][1])) reclist.append((records[366][rn-1][0],records[366][rn-1][1])) for tt in range(60,366): reclist.append((records[tt][rn-1][0],records[tt][rn-1][1])) records_dict[k] = reclist thrdx_dict.close() except: print_exception() return records_dict, name, por
def get_srfcst_data(stn, start_date, end_date): hourly_fcst = {} try: start_date_dt = DateTime.DateTime(*start_date) end_date_dt = DateTime.DateTime(*end_date) stn = stn.upper() forecast_db = hashopen('/ndfd/hourly_forecasts.db', 'r') stn_dict = loads(forecast_db[stn]) latlon = stn_dict['ll'] for requested_var in ['tsky', 'dwpt']: if stn_dict.has_key(requested_var): if not hourly_fcst.has_key(requested_var): hourly_fcst[requested_var] = {} for dkey in stn_dict[requested_var].keys(): dkey_dt = DateTime.DateTime(*dkey) if dkey_dt >= start_date_dt and dkey_dt <= end_date_dt: for h in range(0, 24): if stn_dict[requested_var][dkey][h] != miss: tkey = (dkey[0], dkey[1], dkey[2], h) hourly_fcst[requested_var][tkey] = stn_dict[ requested_var][dkey][h] forecast_db.close() except: print_exception() return latlon, hourly_fcst
def process_changes(request): try: station = request.form['thr_id'] crnt = '11.0' crntfile = '/Users/keith/Sites/data/threaded_records.db' prev = '10.1' prevfile = '/Users/keith/progs/Threading/Version_10.1/threaded_records.db' # process data and send results for output crnt_recs, name, crnt_por = get_records (station,1,crntfile) prev_recs, name, prev_por = get_records (station,1,prevfile) change_dict = {} for var in ['maxt-hi','mint-lo','maxt-lo','mint-hi','pcpn-hi']: change_list = [] for dy in range(0,366): if abs(crnt_recs[var][dy][0]-prev_recs[var][dy][0]) >= 0.01 or abs(crnt_recs[var][dy][1]-prev_recs[var][dy][1]) >= 0.01: # if crnt_recs[var][dy][0] != prev_recs[var][dy][0] or crnt_recs[var][dy][1] != prev_recs[var][dy][1]: if var == 'pcpn-hi': change_list.append((doy_to_date(dy), "%5.2f"%round(crnt_recs[var][dy][0],2), crnt_recs[var][dy][1], "%5.2f"%round(prev_recs[var][dy][0],2), prev_recs[var][dy][1])) else: change_list.append((doy_to_date(dy), int(crnt_recs[var][dy][0]), crnt_recs[var][dy][1], int(prev_recs[var][dy][0]), prev_recs[var][dy][1])) if len(change_list) > 0: change_dict[var] = change_list return ThreadExRecords_io.display_changes(crnt,prev,name,crnt_por,prev_por,change_dict) except: print_exception() return ThreadExRecords_io.bad_input(None)
def run_stationModels(stn): station_dict = {} model_list = [] try: from newaCommon.stn_info import stn_info from pest_models import pest_models stn = stn.lower() if stn_info.has_key(stn): stn_vars = copy.deepcopy(stn_info[stn]['vars']) if 'lwet' in stn_info[stn]['vars'] or 'rhum' in stn_info[stn]['vars']: stn_vars.append('eslw') for mdl in pest_models: mtitle,murl,mvars = mdl for model_var in mvars: if not model_var in stn_vars: break else: if murl == "newaModel/apple_scab" or murl == "newaModel/fire_blight" or murl == "newaModel/sooty_blotch" or \ murl == "newaModel/berry_moth" or murl == "newaModel/grape_dis" or murl == "newaModel/dmcast" or \ murl == "newaDisease/onion_maggot": whole_url = "http://newa.nrcc.cornell.edu/"+murl+"/"+stn_info[stn]['state']+"/"+stn elif "newaModel/apple_pest" in murl: whole_url = "http://newa.nrcc.cornell.edu/"+murl.replace('/apple_pest','')+"/"+stn_info[stn]['state']+"/"+stn elif murl != "newaDisease/onion_dis": whole_url = "http://newa.nrcc.cornell.edu/"+murl+"/"+stn else: whole_url = "http://newa.nrcc.cornell.edu/"+murl model_list.append([mtitle,whole_url]) except: print 'Error processing request for',stn print_exception() # station_dict['models'] = model_list sorted_model_list = row_major_list(model_list,3) station_dict['models'] = sorted_model_list json_return = json.dumps(station_dict) return json_return
def run_ddrange(stn, ddtype, accstr, accend): try: base = newaCommon.Base() cabbage = newaDisease.Cabbage() smry_dict = {'ddtype': ddtype.replace("dd", "")} now = DateTime.now() if not accend: accend = now end_date_dt = accend if not accstr: accstr = DateTime.DateTime(end_date_dt.year, 1, 1, 0) start_date_dt = accstr if start_date_dt > end_date_dt: return newaCommon_io.errmsg('Start date must be before end data.') if end_date_dt.year != now.year: smry_dict['this_year'] = False end_date_dt = end_date_dt + DateTime.RelativeDate(days=+6) else: smry_dict['this_year'] = True hourly_data, daily_data, download_time, station_name, avail_vars = base.get_hddata2( stn, start_date_dt, end_date_dt) smry_dict['last_time'] = download_time # add forecast data if smry_dict['this_year']: start_fcst_dt = DateTime.DateTime( *download_time) + DateTime.RelativeDate(hours=+1) end_fcst_dt = end_date_dt + DateTime.RelativeDate(days=+6) hourly_data = newaDisease.add_hrly_fcst(stn, hourly_data, start_fcst_dt, end_fcst_dt) daily_data = newaDisease.hrly_to_dly(hourly_data) else: start_fcst_dt = end_date_dt + DateTime.RelativeDate(hours=+1) end_fcst_dt = end_date_dt end_date_dt = end_date_dt + DateTime.RelativeDate(days=-6) if len(daily_data) > 0: degday_dict = base.degday_calcs(daily_data, start_date_dt, end_fcst_dt, ddtype, "accum") if len(degday_dict) > 0: # get dates for gdd table smry_dict = cabbage.setup_dates(smry_dict, end_date_dt) # get dd for days of interest (including forecast) smry_dict = cabbage.add_ddays(smry_dict, degday_dict, start_date_dt, end_date_dt) return newaLister_io.ddrange_html(station_name, smry_dict, degday_dict) else: return self.nodata(stn, station_name, start_date_dt, end_date_dt) else: return self.nodata(stn, station_name, start_date_dt, end_date_dt) except: print_exception() return
def ascospore_for_grf(dd_data, daily_data): ascospore_dict = {} try: daily_prec = [] # need precip for six days preceding greentip (first day in dd_data) first_dt = DateTime.DateTime(dd_data[0][0][0], dd_data[0][0][1], dd_data[0][0][2]) minus6_dt = first_dt + DateTime.RelativeDate(days=-6) minus6_list = [minus6_dt.year, minus6_dt.month, minus6_dt.day] for i in range(len(daily_data)): dly_dt, tave_hr, tmax, tmin, prcp, lwet, rhum, wspd, srad, st4a, st4x, st4n, dflags = daily_data[ i] if dly_dt == minus6_list: daily_prec.append(daily_data[i][4]) daily_prec.append(daily_data[i + 1][4]) daily_prec.append(daily_data[i + 2][4]) daily_prec.append(daily_data[i + 3][4]) daily_prec.append(daily_data[i + 4][4]) daily_prec.append(daily_data[i + 5][4]) accum_dd = 0. date_list = [] matur_list = [] error_list = [] for dly_dt, tmax, tmin, ddval, prec in dd_data: fdate = "%d-%d-%d" % (dly_dt[0], dly_dt[1], dly_dt[2]) if prec != miss: daily_prec.append(prec) else: daily_prec.append(0.) if len(daily_prec) > 7: del daily_prec[0] prec7dy = sum(daily_prec) else: prec7dy = miss if prec7dy > 0.00 or prec7dy == miss: if ddval != miss: accum_dd = accum_dd + ddval else: pass #don't accumulate anything during dry period if ddval != miss: comp = math.exp( (math.pi / math.sqrt(3)) * ((-2.49 + (0.01 * accum_dd)))) maturity = 100. * (comp) / (1. + comp) comp2 = math.exp( (math.pi / math.sqrt(3)) * ((-1.676 + (0.01 * accum_dd)))) error = 100. * (comp2 / (1. + comp2)) - maturity else: maturity = miss error = miss if maturity != miss: date_list.append(fdate) matur_list.append(maturity) error_list.append(error) ascospore_dict['dates'] = date_list ascospore_dict['maturity'] = matur_list ascospore_dict['error'] = error_list except: print_exception() return ascospore_dict
def apple_thin_json(data_dict, biofix_dt, bloom_dt, percentflowerspurs): results_list = [] notes_list = [] try: results_list = [] notes_list = [] tkeys = data_dict.keys() tkeys.sort() if bloom_dt: recommendEnd = bloom_dt + DateTime.RelativeDate(days=+35) else: recommendEnd = None if len(tkeys) >= 4: list7day = [miss, miss, miss, data_dict[0]['thinIndex'], data_dict[1]['thinIndex'], \ data_dict[2]['thinIndex'], data_dict[3]['thinIndex']] else: list7day = [] for key in tkeys: t_dt = biofix_dt + DateTime.RelativeDate(days=+key, hour=0, minute=0, second=0.0) fdate = "%d-%02d-%02d" % (t_dt.year,t_dt.month,t_dt.day) if data_dict[key]['maxt'] == miss or data_dict[key]['mint'] == miss or data_dict[key]['srad'] == miss: data_dict[key]['thinIndex'] = miss if key+4 < len(tkeys) and data_dict[key+4]['maxt'] != miss and data_dict[key+4]['mint'] != miss and data_dict[key+4]['srad'] != miss: list7day.append(data_dict[key+4]['thinIndex']) else: list7day.append(miss) list7day.pop(0) if len(list7day) == 7 and not miss in list7day: #avg7day_STRAIGHT = round((sum(list7day)/7.0), 2) # changed straight average to 7-day weighted average - kle 2021-3-18 wtsum = 0 wtsum += list7day[0] * 0.3 wtsum += list7day[1] * 0.5 wtsum += list7day[2] wtsum += list7day[3] wtsum += list7day[4] wtsum += list7day[5] * 0.8 wtsum += list7day[6] * 0.6 avg7day = round((wtsum / 5.2), 2) else: avg7day = "-" if bloom_dt and t_dt >= bloom_dt and t_dt <= recommendEnd: recommend = get_recommend(avg7day, percentflowerspurs, mround(data_dict[key]['dd4cAccum'],0)) else: recommend = {"efficacy": "NA", "riskColor": 0, "recommend": "-"} # results_list.append([fdate, ctof(data_dict[key]['maxt']), ctof(data_dict[key]['mint']),\ # mround(data_dict[key]['srad'],1), mround(data_dict[key]['thinIndex'],2),avg7day, mround(data_dict[key]['dd4cAccum'],1), recommend]) day_results = {'date': fdate, 'maxt':ctof(data_dict[key]['maxt']), 'mint': ctof(data_dict[key]['mint']),\ 'srad': mround(data_dict[key]['srad'],1), 'thinIndex': mround(data_dict[key]['thinIndex'],2), \ 'avg7day': avg7day, 'dd4cAccum': mround(data_dict[key]['dd4cAccum'],1)} day_results.update(recommend) results_list.append(day_results) if bloom_dt and (bloom_dt - biofix_dt).days < 21: notes_list.append('Difference between Green tip and Bloom is less than 21 days. Results may be unreliable.') except: print_exception() return {"data":results_list, "notes":notes_list}
def run_crucifer_disease(self, stn, pest, accend, tech_choice, output): try: smry_dict = {} smry_dict['pest'] = pest if not accend: accend = DateTime.now() smry_dict['output'] = output if output == 'standalone': smry_dict['stn'] = stn smry_dict['accend'] = accend # determine information needed for particular disease pest_status_management = import_info_dict(pest) if not pest_status_management: return newaCommon_io.errmsg('A model is not available for the disease you selected.') smry_dict['pest_name'] = pest_status_management['pest_name'] smry_dict['crop_stages'] = pest_status_management['messages'].keys() # get station name - don't need this for Crucifers # ucanid,smry_dict['station_name'] = get_metadata (stn) # get status and recommendations smry_dict['stage'] = "Not defined" smry_dict['status'] = "Not defined" smry_dict['manage'] = "Not defined" for k in smry_dict['crop_stages']: psmk = pest_status_management['messages'][k] if psmk.has_key('datelo'): datelo = DateTime.DateTime(accend.year,*psmk['datelo']) datehi = DateTime.DateTime(accend.year,*psmk['datehi']) if accend >= datelo and accend <= datehi: smry_dict = self.filldict(psmk,k,tech_choice,smry_dict) break else: # didn't fall within any date ranges; now get dd values and check dd ranges start_date_dt = DateTime.DateTime(accend.year,1,1) daily_data, station_name = self.get_daily (stn, start_date_dt, accend) smry_dict['station_name'] = station_name if len(daily_data) > 0: degday_data = self.degday_calcs (daily_data,start_date_dt,accend,'dd4c','accum') if len(degday_data) > 0 and degday_data[-1][4] != miss: ddaccum = degday_data[-1][4] for k in smry_dict['crop_stages']: psmk = pest_status_management['messages'][k] if psmk.has_key('ddlo'): if ddaccum >= psmk['ddlo'] and ddaccum <= psmk['ddhi']: smry_dict = self.filldict(psmk,k,tech_choice,smry_dict) break else: print "Error determining recommendations:",pest,stn,accend else: return self.nodata(stn, station_name, start_date_dt, accend) else: return self.nodata(stn, station_name, start_date_dt, accend) return newaVegModel_io.crucifer_results(smry_dict) except: print_exception()
def run_apple_et (stn,accend,greentip,output): et_dict = {} try: #date range start_date_dt = accend + DateTime.RelativeDate(days=-7) + DateTime.RelativeDate(hour=0,minute=0,second=0.0) end_date_dt = accend + DateTime.RelativeDate(days=+6) + DateTime.RelativeDate(hour=23,minute=0,second=0.0) id_parts = stn.split(" ") if len(id_parts) == 1: if stn[0:3] == '42.' or stn[0:3] == '43.': station_type = 'ucc' elif stn[0:1] >= '1' and stn[0:1] <= '9' and stn[1:2] >= '0' and stn[1:2] <= '9': station_type = 'njwx' elif len(stn) == 4 and stn[0:1].upper() == 'K': station_type = 'icao' elif len(stn) == 4: station_type = 'oardc' elif stn[0:3] == 'cu_' or stn[0:3] == 'um_': station_type = 'cu_log' elif stn[0:3] == "ew_": stn = stn[3:] station_type = 'miwx' elif stn[0:5] == "nysm_": stn = stn[5:] station_type = 'nysm' elif len(stn) == 7 and stn[2:3] == "_": station_type = 'nwon' elif len(stn) == 3 or len(stn) == 6: station_type = 'newa' else: raise StationProblem('Cannot determine station type for %s'%stn) else: stn = id_parts[0] station_type = id_parts[1] try: biofix_dt = greentip + DateTime.RelativeDate(hour=0, minute=0, second=0.0) except TypeError: return newaTools_io.apple_et_results(None) # get daily modeled et, solar rad and precipitation et_dict['data'] = apple_hourly (stn, start_date_dt, end_date_dt, biofix_dt, station_type) except: print_exception() if output == 'json': import json results_list = [] etkeys = et_dict['data'].keys() etkeys.sort() for key in etkeys: fdate = "%d-%02d-%02d" % (key[0],key[1],key[2]) results_list.append([fdate,round(et_dict['data'][key]['et'],2),round(et_dict['data'][key]['prcp'],2)]) json_dict = json.dumps({"data":results_list}) return json_dict else: return newaTools_io.apple_et_results(et_dict)
def run_apple_thin (stn,accend,greentip,bloom,percentflowerspurs,output): try: id_parts = stn.split(" ") if len(id_parts) == 1: if stn[0:3] == '42.' or stn[0:3] == '43.': station_type = 'ucc' elif stn[0:1] >= '1' and stn[0:1] <= '9' and stn[1:2] >= '0' and stn[1:2] <= '9': station_type = 'njwx' elif len(stn) == 4 and stn[0:1].upper() == 'K': station_type = 'icao' elif len(stn) == 4: station_type = 'oardc' elif stn[0:3] == 'cu_' or stn[0:3] == 'um_' or stn[0:3] == 'un_' or stn[0:3] == 'uc_': station_type = 'cu_log' elif stn[0:3] == "ew_": stn = stn[3:] station_type = 'miwx' elif stn[0:5] == "nysm_": stn = stn[5:] station_type = 'nysm' elif len(stn) == 7 and stn[2:3] == "_": station_type = 'nwon' elif len(stn) == 3 or len(stn) == 6: station_type = 'newa' else: raise StationProblem('Cannot determine station type for %s'%stn) else: stn = id_parts[0] station_type = id_parts[1] try: biofix_dt = greentip + DateTime.RelativeDate(hour=0, minute=0, second=0.0) except TypeError: return newaTools_io.apple_thin_results(None) try: bloom_dt = bloom + DateTime.RelativeDate(hour=0, minute=0, second=0.0) except TypeError: bloom_dt = None #date range accend = accend + DateTime.RelativeDate(hour=0,minute=0,second=0.0) ## start_date_dt = accend + DateTime.RelativeDate(days=-7) + DateTime.RelativeDate(hour=0,minute=0,second=0.0) start_date_dt = biofix_dt end_date_dt = accend + DateTime.RelativeDate(days=+6) + DateTime.RelativeDate(hour=23,minute=0,second=0.0) # get model results data_dict = apple_thinning_model (stn, start_date_dt, end_date_dt, bloom_dt, station_type) json_dict = apple_thin_json(data_dict, biofix_dt, bloom_dt,percentflowerspurs) except: print_exception() if output == 'json': return json.dumps(json_dict) else: thin_dict = {'selectedDate': accend, 'greentipDate': biofix_dt, 'bloomDate': bloom_dt} json_dict.update(thin_dict) return newaTools_io.apple_thin_results(json_dict)
def process_records (request): from threads_dict import threads_dict try: if request.form and request.form.has_key('variable') and request.form.has_key('thr_id'): variable = request.form['variable'] if variable.find('Select') != -1: return ThreadExRecords_io.bad_input('Select a variable from the menu.') elif variable in ['maxt','mint','pcpn']: return process_coverage(request) elif variable == 'changes': return process_changes(request) hilo = variable[0:3] element = variable[3:7] thr_id = request.form['thr_id'] call = thr_id[0:3] if threads_dict.has_key(call): thr_list = [] threads = threads_dict[call] for line in threads: seq,coop,wban,icao,name,por,pot = line.split('|') if coop.find('-') == -1: station = coop.strip() srchmthd = 'COOPID' elif wban.find('-') == -1: station = wban.strip() srchmthd = 'WBAN' elif icao.find('-') == -1: station = icao.strip() srchmthd = 'CallSign' thr_list.append((seq,station,srchmthd,name,pot)) else: return ThreadExRecords_io.bad_input('Select a valid station.') if variable == 'thread': return process_threads(request,thr_list) elif variable in ['-himaxt','-lomaxt','-himint','-lomint','-hipcpn']: records_dict = hashopen('/Users/keith/Sites/data/threaded_records.db','r') if records_dict.has_key(thr_id): thr_recs = loads(records_dict[thr_id]) records = thr_recs[element][hilo] name = '%s, %s' % (thr_recs['name'],thr_recs['state']) start_yr, end_yr = thr_recs[element]['start_yr'],thr_recs[element]['end_yr'] records_dict.close() return ThreadExRecords_io.display_records(thr_list, records, name, element, hilo, thr_id, start_yr, end_yr) else: records_dict.close() return ThreadExRecords_io.bad_input('Select a valid station.') else: return ThreadExRecords_io.bad_input(None) else: return ThreadExRecords_io.bad_input(None) except: print_exception() return ThreadExRecords_io.bad_input(None)
def get_precip_forecast(stn, start_date_dt, end_date_dt): hourly_fcst = [] miss = -999 try: stn = stn.upper() pdict = hashopen('/ndfd/hourly_forecasts.db', 'r') if pdict.has_key(stn): stndict = loads(pdict[stn]) pdict.close() firstday_hour = start_date_dt.hour lastday_hour = end_date_dt.hour start_date_dt = start_date_dt + DateTime.RelativeDate(hour=0) end_date_dt = end_date_dt + DateTime.RelativeDate(hour=0) theDate_dt = start_date_dt while theDate_dt <= end_date_dt: theDate = (theDate_dt.year, theDate_dt.month, theDate_dt.day) if stndict['qpf'].has_key(theDate): qpf = stndict['qpf'][theDate] else: qpf = [miss] * 24 if stndict['pop12'].has_key(theDate): pop12 = stndict['pop12'][theDate] else: pop12 = [miss] * 24 if theDate_dt == start_date_dt: shour = firstday_hour else: shour = 0 if theDate_dt == end_date_dt: ehour = lastday_hour else: ehour = 23 for hr in range(shour, ehour + 1): theTime = (theDate_dt.year, theDate_dt.month, theDate_dt.day, hr) hourly_fcst.append((theTime, qpf[hr], pop12[hr])) # distribute precipitation over last 6 hours if qpf[hr] != miss: x = len(hourly_fcst) - 1 for i in range(x, x - 6, -1): if i >= 0: hourly_fcst[i] = hourly_fcst[i][0:1] + ( qpf[hr] / 6., ) + hourly_fcst[i][2:] theDate_dt = theDate_dt + DateTime.RelativeDate(days=+1) except: print_exception() return hourly_fcst #stn = 'cli' #start_date_dt = DateTime.DateTime(2009,4,16,8) #end_date_dt = DateTime.DateTime(2009,4,22,23) #forecast_dict = get_precip_forecast(stn,start_date_dt,end_date_dt) #for item in forecast_dict: # print item
def get_downy_mildew_weather(self): if ((len(self.dates) != len(self.tmp_vals)) or (len(self.tmp_vals) != len(self.rh_vals)) or (len(self.rh_vals) != len(self.prcp_vals)) or (len(self.prcp_vals) != len(self.lwet_vals))): print 'length problem', len(self.dates), len(self.tmp_vals), len( self.rh_vals), len(self.prcp_vals), len(self.lwet_vals) return [0] days = [ 0, ] * len(self.dates) hours = [ 0, ] * len(self.dates) yearOfHours = 24 * 366 ok = [ 0, ] * yearOfHours for index in range(len(self.dates)): try: this_date = self.dates[index] (day, hour) = getLocalFromEST(this_date) days[index] = day hours[index] = hour tmpFlg = self.tmp_flgs[index] rhFlg = self.rh_flgs[index] prcpFlg = self.prcp_flgs[index] lwetFlg = self.lwet_flgs[index] if (tmpFlg) and (rhFlg) and (prcpFlg) and (lwetFlg): ok[index] = 1 except: print_exception() break # -------------------------------------------- # We need our lists to be 366*24, for the # downy mildew program (dmcast) # -------------------------------------------- diffNum = len(ok) - len(self.dates) missing_list = [ -999.0, ] * diffNum tmp = self.tmp_vals + missing_list rh = self.rh_vals + missing_list prcp = self.prcp_vals + missing_list lwet = self.lwet_vals + missing_list (missing_days, missing_hours) = self.get_mildew_missing() days = days + missing_days hours = hours + missing_hours return (self.statFlg, self.dates, hours, days, tmp, rh, prcp, lwet, ok)
def ascospore_for_grf (dd_data,daily_data): ascospore_dict = {} try: daily_prec = [] # need precip for six days preceding greentip (first day in dd_data) first_dt = DateTime.DateTime(dd_data[0][0][0],dd_data[0][0][1],dd_data[0][0][2]) minus6_dt = first_dt + DateTime.RelativeDate(days=-6) minus6_list = [minus6_dt.year,minus6_dt.month,minus6_dt.day] for i in range(len(daily_data)): dly_dt,tave_hr,tmax,tmin,prcp,lwet,rhum,wspd,srad,st4a,st4x,st4n,dflags = daily_data[i] if dly_dt == minus6_list: daily_prec.append(daily_data[i][4]) daily_prec.append(daily_data[i+1][4]) daily_prec.append(daily_data[i+2][4]) daily_prec.append(daily_data[i+3][4]) daily_prec.append(daily_data[i+4][4]) daily_prec.append(daily_data[i+5][4]) accum_dd = 0. date_list = [] matur_list = [] error_list = [] for dly_dt,tmax,tmin,ddval,prec in dd_data: fdate = "%d-%d-%d" % (dly_dt[0],dly_dt[1],dly_dt[2]) if prec != miss: daily_prec.append(prec) else: daily_prec.append(0.) if len(daily_prec) > 7: del daily_prec[0] prec7dy = sum(daily_prec) else: prec7dy = miss if prec7dy > 0.00 or prec7dy == miss: if ddval != miss: accum_dd = accum_dd + ddval else: pass #don't accumulate anything during dry period if ddval != miss: comp = math.exp((math.pi/math.sqrt(3))*((-2.49+(0.01*accum_dd)))) maturity = 100.*(comp)/(1.+comp) comp2 = math.exp((math.pi/math.sqrt(3))*((-1.676+(0.01*accum_dd)))) error = 100. * (comp2/(1.+comp2))-maturity else: maturity = miss error = miss if maturity != miss: date_list.append(fdate) matur_list.append(maturity) error_list.append(error) ascospore_dict['dates'] = date_list ascospore_dict['maturity'] = matur_list ascospore_dict['error'] = error_list except: print_exception() return ascospore_dict
def run_ddrange(stn, ddtype, accstr, accend): try: base = newaCommon.Base() cabbage = newaDisease.Cabbage() smry_dict = {"ddtype": ddtype.replace("dd", "")} now = DateTime.now() if not accend: accend = now end_date_dt = accend if not accstr: accstr = DateTime.DateTime(end_date_dt.year, 1, 1, 0) start_date_dt = accstr if start_date_dt > end_date_dt: return newaCommon_io.errmsg("Start date must be before end data.") if end_date_dt.year != now.year: smry_dict["this_year"] = False end_date_dt = end_date_dt + DateTime.RelativeDate(days=+6) else: smry_dict["this_year"] = True hourly_data, daily_data, download_time, station_name, avail_vars = base.get_hddata2( stn, start_date_dt, end_date_dt ) smry_dict["last_time"] = download_time # add forecast data if smry_dict["this_year"]: start_fcst_dt = DateTime.DateTime(*download_time) + DateTime.RelativeDate(hours=+1) end_fcst_dt = end_date_dt + DateTime.RelativeDate(days=+6) hourly_data = newaDisease.add_hrly_fcst(stn, hourly_data, start_fcst_dt, end_fcst_dt) daily_data = newaDisease.hrly_to_dly(hourly_data) else: start_fcst_dt = end_date_dt + DateTime.RelativeDate(hours=+1) end_fcst_dt = end_date_dt end_date_dt = end_date_dt + DateTime.RelativeDate(days=-6) if len(daily_data) > 0: degday_dict = base.degday_calcs(daily_data, start_date_dt, end_fcst_dt, ddtype, "accum") if len(degday_dict) > 0: # get dates for gdd table smry_dict = cabbage.setup_dates(smry_dict, end_date_dt) # get dd for days of interest (including forecast) smry_dict = cabbage.add_ddays(smry_dict, degday_dict, start_date_dt, end_date_dt) return newaLister_io.ddrange_html(station_name, smry_dict, degday_dict) else: return self.nodata(stn, station_name, start_date_dt, end_date_dt) else: return self.nodata(stn, station_name, start_date_dt, end_date_dt) except: print_exception() return
def tp_for_grf(stn, daily_data, smry_dict, start_date_dt, end_date_dt): obs_dict = {} forecast_data = None try: mint = [] maxt = [] prcpl = [] obs_days = [] first = 1 for dly_dt, tave_hr, tmax, tmin, prcp, lwet, rhum, wspd, srad, st4a, st4x, st4n, dflags in daily_data: this_day_dt = DateTime.DateTime(dly_dt[0], dly_dt[1], dly_dt[2]) if this_day_dt < start_date_dt: continue if tmax != miss and tmin != miss: if first: first = 0 mint.append(int(round(tmin, 0))) maxt.append(int(round(tmax, 0))) prcpl.append(prcp) obs_days.append("%d-%d-%d" % (dly_dt[0], dly_dt[1], dly_dt[2])) obs_dict['maxt'] = maxt obs_dict['mint'] = mint obs_dict['prcp'] = prcpl obs_dict['obs_days'] = obs_days obs_dict['fmaxt'] = [] obs_dict['fmint'] = [] obs_dict['fprcp'] = [] obs_dict['frobs_days'] = [] # get daily forecast data start_fcst_dt = DateTime.DateTime( *daily_data[-1][0]) + DateTime.RelativeDate(days=+1) end_fcst_dt = end_date_dt + DateTime.RelativeDate(days=+6) if end_fcst_dt >= start_fcst_dt: fmint = [] fmaxt = [] fprcp = [] fobs_days = [] forecast_data = get_daily_forecast(stn, start_fcst_dt, end_fcst_dt) for dly_dt, tave_hr, tmax, tmin, prcp, lwet, rhum, wspd, srad, st4a, st4x, st4n, dflags in forecast_data: if tmax != miss and tmin != miss: if first: first = 0 fmint.append(int(round(tmin, 0))) fmaxt.append(int(round(tmax, 0))) fprcp.append(prcp) fobs_days.append("%d-%d-%d" % (dly_dt[0], dly_dt[1], dly_dt[2])) obs_dict['fmaxt'] = fmaxt obs_dict['fmint'] = fmint obs_dict['fprcp'] = fprcp obs_dict['frobs_days'] = fobs_days except: print_exception() return obs_dict, smry_dict, forecast_data
def import_info_dict(pest): if pest in newaVegModel_io.disease_dict: name = pest + '_info_dict' else: return None try: file, pathname, description = imp.find_module(name, ['newaVegModel']) pmd = imp.load_module(name, file, pathname, description) return pmd.pest_status_management except: print_exception() return None
def trim_missing(values, vdates, miss): # remove missing values at the end of the list try: for i in range(len(values) - 1, -1, -1): if values[i] == miss: del values[i] del vdates[i] else: break except: print_exception() return values, vdates
def deghr_for_grf(hourly_data, start_date, end_date): deghr_dict = {} try: dly_sum = 0. dly_msg = 0. d4_dh = [] dly_d4dh_list = [] date_list = [] start_date = start_date + DateTime.RelativeDate( days=+1) #start deghr accumulation day after biofix start_date = start_date + DateTime.RelativeDate( hour=0, minute=0, second=0) end_date = end_date + DateTime.RelativeDate( hour=23, minute=59, second=59) for theTime, temp, prcp, lwet, rhum, wspd, wdir, srad, st4i, eflags in hourly_data: temp_eflag, prcp_eflag, lwet_eflag, rhum_eflag, wspd_eflag, wdir_eflag, srad_eflag, st4i_eflag = eflags this_date = DateTime.DateTime(*theTime) if this_date >= start_date and this_date <= end_date: if temp != miss: ddval = newaModel.Apple().get_dhr(temp) dly_sum = dly_sum + ddval else: dly_msg = dly_msg + 1 # save daily values if theTime[3] == 23: if dly_msg >= 2: dly_sum = miss if len(d4_dh) == 4: del d4_dh[0] #keep total for last 4 days d4_dh.append(dly_sum) if d4_dh.count(miss) == 0 and len(d4_dh) > 0: fdate = "%d-%d-%d" % (theTime[0], theTime[1], theTime[2]) date_list.append(fdate) dly_d4dh_list.append(int(sum(d4_dh))) dly_sum = 0. dly_msg = 0 # get last partial day if theTime[3] != 23: if dly_msg >= 2: dly_sum = miss if len(d4_dh) == 4: del d4_dh[0] #keep total for last 4 days d4_dh.append(dly_sum) if d4_dh.count(miss) == 0 and len(d4_dh) > 0: fdate = "%d-%d-%d" % (theTime[0], theTime[1], theTime[2]) date_list.append(fdate) dly_d4dh_list.append(int(sum(d4_dh))) deghr_dict['dates'] = date_list deghr_dict['d4dh'] = dly_d4dh_list except: print 'Error calculating degree hours' print_exception() return deghr_dict
def trim_missing (values,vdates,vflags): # remove missing values at the end of the list try: for i in range(len(values)-1,-1,-1): if values[i] == miss: del values[i] del vdates[i] del vflags[i] else: break except: print_exception() return values,vdates,vflags
def apple_biofix_input(): try: now = DateTime.now() biofix_dict = {} outfil = open('/static/NEWA/apple_biofix_%s.txt' % now.year, 'r') for line in outfil.readlines(): key, val = line.split(',') biofix_dict[key] = val outfil.close return newaInput_io.apple_biofix_input(biofix_dict) except: print_exception() return newaModel_io.errmsg('Error obtaining previous data')
def collect_hourly_input(native_id, start_date_dt, end_date_dt, vars, station_type="newa"): hourly_data = {} try: if station_type == 'miwx' and len(native_id) == 3: native_id = 'ew_%s' % native_id for requested_var in vars: # get forecast data fcst_data = get_fcst_data(native_id, requested_var, start_date_dt, end_date_dt) # build hourly data dictionary, filling with estimated and forecast data when obs not available hourly_data = get_hourly_data(native_id,requested_var,start_date_dt,end_date_dt,hourly_data,fcst_data,station_type) except: print_exception() return hourly_data
def apple_biofix_input (): try: now = DateTime.now() biofix_dict = {} outfil = open('/Users/keith/Sites/NEWA/apple_biofix_%s.txt'%now.year,'r') for line in outfil.readlines(): key,val = line.split(',') biofix_dict[key] = val outfil.close return newaInput_io.apple_biofix_input(biofix_dict) except: print_exception() return newaModel_io.errmsg('Error obtaining previous data')
def get_daily_forecast (stn,start_date_dt,end_date_dt): daily_fcst = [] miss = -999 try: stn = stn.upper() ##NEW: next line forecast_dict = hashopen('/ndfd/daily_forecasts.db','r') if forecast_dict.has_key(stn): # get all daily precip for period hourly_fcst = get_precip_forecast(stn,start_date_dt,end_date_dt) dly_ppt = {} ppt_cnt = 0 ppt_sum = 0.0 for dt,qpf,pop in hourly_fcst: if qpf != miss: ppt_sum = ppt_sum + qpf ppt_cnt = ppt_cnt + 1 if dt[3] == 23: if ppt_cnt > 0: dly_ppt[dt[0:3]] = ppt_sum ppt_cnt = 0 ppt_sum = 0.0 # get temps and combine with precip, if available stndict = loads(forecast_dict[stn]) theDate_dt = start_date_dt while theDate_dt <= end_date_dt: int_date = (theDate_dt.year,theDate_dt.month,theDate_dt.day) if stndict.has_key(int_date): tmax,tmin = stndict[int_date] tave = (tmax+tmin)/2. eflags = ('', '', '', 'M', 'M', 'M', 'M', 'M') else: tmax,tmin,tave = miss,miss,miss eflags = ('M', 'M', 'M', 'M', 'M', 'M', 'M', 'M') if dly_ppt.has_key(int_date): rain = dly_ppt[int_date] eflags = (eflags[0],eflags[1],eflags[2],"",'M', 'M', 'M', 'M') else: rain = miss daily_fcst.append(([int_date[0],int_date[1],int_date[2]],tave,tmax,tmin,rain,miss,miss,miss,miss,miss,miss,miss,eflags)) theDate_dt = theDate_dt + DateTime.RelativeDate(days = +1) forecast_dict.close() except: print_exception() return daily_fcst #stn = 'cu_gfr' #start_date_dt = DateTime.DateTime(2009,5,1) #end_date_dt = DateTime.DateTime(2009,5,18) #forecast_dict = get_daily_forecast(stn,start_date_dt,end_date_dt) #for item in forecast_dict: # print item
def process_threads (request,threads): try: thr_id = request.form['thr_id'] coverage_dict = hashopen('/Users/keith/Sites/data/threaded_coverage.db','r') if coverage_dict.has_key(thr_id): thr_cov = loads(coverage_dict[thr_id]) name = '%s, %s' % (thr_cov['name'],thr_cov['state']) else: name = '' coverage_dict.close() except: print_exception() return ThreadExRecords_io.bad_input(None) return ThreadExRecords_io.display_thread(threads, name)
def calc_dewpoint(temp, rh): dewpt = miss try: if temp != miss and rh != miss and rh > 0: tempc = (5. / 9.) * (temp - 32.) sat = 6.11 * 10.**(7.5 * tempc / (237.7 + tempc)) vp = (rh * sat) / 100. logvp = math.log(vp) dewptc = (-430.22 + 237.7 * logvp) / (-logvp + 19.08) dewpt = ((9. / 5.) * dewptc) + 32. except: print 'Bad data in dewpoint calculation:', temp, rh print_exception() return dewpt
def filldict (self, psmk, k, tech_choice, smry_dict): try: smry_dict['stage'] = k smry_dict['status'] = psmk['status'] smry_dict['manage'] = psmk['management_oc'] if tech_choice == 'organic': smry_dict['manage'] += psmk['management_o'] else: smry_dict['manage'] += psmk['management_c'] if psmk.has_key('pesticide_link'): smry_dict['manage'] += '<a href="%s" target="_blank">Pesticide information</a>' % (psmk['pesticide_link']) except: print_exception() return smry_dict
def run_fire_blight_plots (stn,end_date_dt,firstblossom,orchard_history,output): try: smry_dict = {} smry_dict['biofix_name'] = 'First blossom open' daily_data = None hourly_data = None if not end_date_dt: end_date_dt = DateTime.now() start_date_dt = DateTime.DateTime(end_date_dt.year,4,1,0) end_date_dt = min(end_date_dt, DateTime.DateTime(end_date_dt.year,6,15,23)) # firstblossom can either be passed into this program, read from a file, or estimated from degree day accumulation if not firstblossom: firstblossom = newaModel.Models().get_biofix(stn,'as',end_date_dt.year) #from file if not firstblossom: jan1_dt = DateTime.DateTime(end_date_dt.year,1,1) hourly_data, daily_data, download_time, station_name = newaCommon.Base().get_hddata (stn, jan1_dt, end_date_dt) biofix_dd = phen_events_dict['macph_firstblossom_43']['dd'][2] #by degree day accumulation ret_bf_date, ddaccum, ddmiss = newaModel.Models().accum_degday(daily_data, jan1_dt, end_date_dt, 'dd43be', biofix_dd, stn, station_name) if ret_bf_date: firstblossom = ret_bf_date + DateTime.RelativeDate(hour=23) smry_dict['biofix'] = "%s-%s-%s" % (firstblossom.year,firstblossom.month,firstblossom.day) if firstblossom < start_date_dt: start_date_dt = firstblossom if not orchard_history: orchard_history = 2 smry_dict['orchard_history'] = orchard_history # obtain daily data if not daily_data: hourly_data, daily_data, download_time, station_name = newaCommon.Base().get_hddata (stn, start_date_dt, end_date_dt) smry_dict['station_name'] = station_name # format for plot routine obs_dict, smry_dict, dly_forecast_data = tp_for_grf(stn, daily_data, smry_dict, start_date_dt, end_date_dt) # add hourly forecast data start_fcst_dt = DateTime.DateTime(*download_time) + DateTime.RelativeDate(hours = +1) end_fcst_dt = end_date_dt + DateTime.RelativeDate(days = +5) hourly_data = newaModel.Models().add_hrly_fcst(stn,hourly_data,start_fcst_dt,end_fcst_dt) if firstblossom and len(hourly_data) > 0: # calculate degree hours using Tim Smith's table deghr_dict = deghr_for_grf (hourly_data,firstblossom,end_fcst_dt) else: deghr_dict = [] # produce plot onLoadFunction = "produce_fireblight_graph(%s, %s, %s);" % (smry_dict, obs_dict, deghr_dict) return newaGraph_io.apple_disease_plot(onLoadFunction) except: print_exception()
def calc_dewpoint(self,temp,rh): dewpt = miss try: if temp != miss and rh != miss and rh > 0: tempc = (5./9.)*(temp-32.) sat = 6.11*10.**(7.5*tempc/(237.7+tempc)) vp = (rh*sat)/100. logvp = math.log(vp) dewptc = (-430.22+237.7*logvp)/(-logvp+19.08) dewpt = ((9./5.)*dewptc) + 32. except: print 'Bad data in dewpoint calculation:',temp,rh print_exception() return dewpt
def process_input (request,path): try: # retrieve input if path[0] in ['stationList','stateStationList','stateInactiveStationList','diseaseStations','getForecastUrl','stationInfo','stationModels']: try: smry_type = path[0] if len(path) > 1: if path[0] == 'stateStationList' or path[0] == 'stateInactiveStationList': list_options = {} list_options['reqvar'] = path[1] if len(path) > 2: list_options['state'] = path[2].upper() else: list_options['state'] = '' else: list_options = path[1] if list_options == 'robots.txt': return newaUtil_io.robots() else: list_options = None except IndexError: raise program_exit('Error processing request') except: print_exception() raise program_exit('Error processing request') elif path[0] == 'robots.txt': return newaUtil_io.robots() else: return program_exit('Error processing input') # send input to appropriate routine if smry_type == 'stationList': return run_stationList(list_options) if smry_type == 'stateStationList': return run_stateStationList(list_options) if smry_type == 'stateInactiveStationList': return run_stateInactiveStationList(list_options) if smry_type == 'stationInfo': return run_stationInfo(list_options) if smry_type == 'stationModels': return run_stationModels(list_options) elif smry_type == 'diseaseStations': return run_diseaseStations(list_options) elif smry_type == 'getForecastUrl': return getForecastUrl(list_options) else: return program_exit('Error processing request') except program_exit,msg: print msg return newaCommon_io.errmsg('Error processing request')
def getHourlyVars (v, start_date, end_date, stn): values = [] vdates = [] if v: try: v.setDateRange (start_date,end_date) vdates = v.getDateArraySeq() values = v.getDataSeqAsFloat () except Data.TSVar.UnavailableDateRange: # print 'unavailable data range',start_date,end_date pass except: print "Error processing:",stn,v,start_date,end_date print_exception() return values, vdates
def get_fcst_hour (stn, requested_var, date_dt): hourly_fcst = miss try: if requested_var in ['temp','rhum']: stn = stn.upper() forecast_db = hashopen('/Users/keith/NDFD/hourly_forecasts.db','r') stn_dict = loads(forecast_db[stn]) forecast_db.close() if stn_dict.has_key(requested_var): dkey = (date_dt.year, date_dt.month, date_dt.day) if stn_dict[requested_var].has_key(dkey): hourly_fcst = stn_dict[requested_var][dkey][date_dt.hour] except: print_exception() return hourly_fcst
def calc_degday (self, tmax, tmin, smry_type): try: if tmax != miss and tmin != miss: if smry_type == 'dd4c': tave = (tmax+tmin)/2. tave_c = (5./9.) * (tave-32.) ddval = tave_c - 4. elif smry_type == 'dd0c': tave = (tmax+tmin)/2. tave_c = (5./9.) * (tave-32.) ddval = tave_c - 0. elif smry_type == 'dd8650': if tmax > 86: adjtmax = 86. else: adjtmax = tmax if tmin < 50: adjtmin = 50. else: adjtmin = tmin tave = (adjtmax+adjtmin)/2. ddval = tave - 50. elif smry_type == 'dd43be' or smry_type == 'dd50be' or smry_type == 'dd55be': base = float(smry_type[2:4]) if tmin >= base: tave = (tmax+tmin)/2. ddval = tave - base elif tmax <= base: ddval = 0. else: tave = (tmax + tmin) / 2. tamt = (tmax - tmin) / 2. t1 = math.sin((base-tave)/tamt) ddval = ((tamt*math.cos(t1))-((base-tave)*((3.14/2.)-t1)))/3.14 else: try: base = int(smry_type[2:4]) tave = (tmax+tmin)/2. ddval = tave - float(base) except: ddval = miss # can't be below zero if ddval < 0: ddval = 0. else: ddval = miss except: print_exception() return ddval
def get_downloadtime (stn,station_type): download_time_dt = miss try: if station_type == 'icao': staid = stn.upper() else: staid = get_metadata (stn, station_type) if not staid: print 'Exiting get_downloadtime: Error retrieving metadata for',stn,station_type return download_time_dt end_date_dt = DateTime.now() # adjust for DST if necessary (don't worry about this for start_date) if end_date_dt.dst == 1: end_date_dt = end_date_dt + DateTime.RelativeDate(hours=-1) start_date_dt = end_date_dt + DateTime.RelativeDate(months=-12, hour=0, minute=0, second=0) # set for non-inclusive end date end_date_dt = end_date_dt + DateTime.RelativeDate(hours=+1) # setup TSVar temp0 = initHourlyVar (staid, station_type) # make first chunk just 2 days, 30 days in successive calls start_period_dt = end_date_dt + DateTime.RelativeDate(days=-2) end_period_dt = end_date_dt while end_period_dt >= start_date_dt: # convert dates to tuples for tsvar calls start_period = start_period_dt.tuple()[:4] end_period = end_period_dt.tuple()[:4] # get necessary hourly data for the period temp,temp_dates = getHourlyVars(temp0,start_period,end_period,stn) # process data if len(temp) > 0: for i in range(len(temp)-1,-1,-1): if temp[i] != miss: temp0.release() theDate = DateTime.DateTime(temp_dates[i][0],temp_dates[i][1],temp_dates[i][2],temp_dates[i][3]) download_time_dt = theDate + DateTime.RelativeDate(hours=+theDate.dst) return download_time_dt # reset for previous 30-day chunk start_period_dt = start_period_dt + DateTime.RelativeDate(days=-30) end_period_dt = start_period_dt + DateTime.RelativeDate(days=+30) except: print_exception() # release TSVar if temp0: temp0.release() return download_time_dt #Tests #for id,type in [('KALB','icao'),('alb','newa'),('cu_gfr','cu_log'),('pav','newa'),('xxx','newa'),('zzz','bogus')]: # result = get_downloadtime(id,type) # print 'Result for ',id,type,result
def fill_with_missing (start_period_dt,end_period_dt,miss): values = [] vdates = [] vflags = [] try: fill_time = start_period_dt count = 0 while fill_time < end_period_dt: values.append(miss) vdates.append([fill_time.year,fill_time.month,fill_time.day,fill_time.hour]) vflags.append(0) count = count + 1 fill_time = fill_time + DateTime.RelativeDate(hours=+1) except: print_exception() return values,vdates,vflags
def filldict(self, psmk, k, tech_choice, smry_dict): try: smry_dict['stage'] = k smry_dict['status'] = psmk['status'] smry_dict['manage'] = psmk['management_oc'] if tech_choice == 'organic': smry_dict['manage'] += psmk['management_o'] else: smry_dict['manage'] += psmk['management_c'] if psmk.has_key('pesticide_link'): smry_dict[ 'manage'] += '<a href="%s" target="_blank">Pesticide information</a>' % ( psmk['pesticide_link']) except: print_exception() return smry_dict
def format_time (self,hr): ampm = '' try: if hr > 12: hr = hr-12 ampm = 'PM' elif hr == 12: ampm = 'PM' elif hr == 0: hr = 12 ampm = 'AM' else: ampm = 'AM' except: print_exception() return hr,ampm
def get_fcst_data (self, stn, requested_var, requested_time): hourly_fcst = -999 try: forecast_db = hashopen('/Users/keith/NDFD/hourly_forecasts.db','r') stn_dict = loads(forecast_db[stn.upper()]) forecast_db.close() if requested_var == 'prcp': requested_var = 'qpf' dkey = tuple(requested_time[0:3]) if stn_dict.has_key(requested_var) and stn_dict[requested_var].has_key(dkey): hr = requested_time[3] hourly_fcst = stn_dict[requested_var][dkey][hr] # else: # print 'stn_dict does not have key',stn_dict.has_key(requested_var),stn_dict[requested_var].has_key(dkey) except: print_exception() return hourly_fcst
def get_metadata(station_id, id_type): ucanid = None station_name = station_id query = ucan.get_query() try: r = query.getUcanFromIdAsSeq(station_id, id_type) if len(r) > 0: ucanid = r[-1].ucan_id info = query.getInfoForUcanIdAsSeq(ucanid, ()) fields = ucanCallMethods.NameAny_to_dict(info[-1].fields) station_name = fields['name'] query.release() except: query.release() print_exception() raise return ucanid, station_name
def run_crucifer_help(self, pest, tech_choice): try: pest_status_management = import_info_dict(pest) if pest_status_management: key_char = pest_status_management['keychar_oc'] if tech_choice == 'organic': key_char += pest_status_management['keychar_o'] else: key_char += pest_status_management['keychar_c'] help_links = [(key_char, "")] for htup in pest_status_management['help_links']: help_links.append(htup) return newaVegModel_io.helppage(help_links) else: return newaCommon_io.errmsg( 'Help is not available for the disease you selected.') except: print_exception()
def getHourlyVars(stn, var, v, start_date, end_date, miss): values = [] vdates = [] if v: try: v.setDateRange(start_date, end_date) vdates = v.getDateArraySeq() values = v.getDataSeqAsFloat() values, vdates = trim_missing(values, vdates, miss) except Data.TSVar.UnavailableDateRange: # print 'unavailable data range',start_date,end_date # vldrange = v.getValidDateRange() # print 'valid date range',vldrange pass except: # print "Error processing:",var,start_date,end_date print_exception() return values, vdates
def find_biofix (self, hourly_data, jan1_dt, end_date_dt, smry_type, biofix_dd): biofix_date = None ddmiss = None try: ddaccum = 0. ddmiss = 0 dly_max = -999 dly_min = 999 dly_miss = 0 ks = hourly_data.keys() ks.sort() for key_date in ks: theDate = DateTime.DateTime(*key_date) hourly_temp = hourly_data[key_date]['temp'][0] if hourly_temp != miss: if hourly_temp > dly_max: dly_max = copy.deepcopy(hourly_temp) if hourly_temp < dly_min: dly_min = copy.deepcopy(hourly_temp) else: dly_miss = dly_miss + 1 # end of day update if theDate.hour == 23: if dly_miss == 0: dly_dd = BaseTools().calc_degday(dly_max, dly_min, smry_type) else: dly_dd = miss # check to see if biofix gdd accum has been reached if dly_dd != miss: ddaccum = ddaccum + dly_dd else: ddmiss = ddmiss + 1 if round(ddaccum,0) >= biofix_dd: biofix_date = theDate + DateTime.RelativeDate(hours=0) break dly_max = -999 dly_min = 999 dly_miss = 0 except: print_exception() return biofix_date, ddmiss
def apple_thin_json(thin_dict, biofix_dt, bloom_dt): results_list = [] notes_list = [] try: import json results_list = [] notes_list = [] tkeys = thin_dict['data'].keys() tkeys.sort() recommendEnd = bloom_dt + DateTime.RelativeDate(days=+35) if len(tkeys) >= 3: list4day = [miss, thin_dict['data'][0]['thinIndex'], thin_dict['data'][1]['thinIndex'], thin_dict['data'][2]['thinIndex']] else: list4day = [] for key in tkeys: t_dt = thin_dict['greentipDate'] + DateTime.RelativeDate(days=+key, hour=0, minute=0, second=0.0) fdate = "%d-%02d-%02d" % (t_dt.year,t_dt.month,t_dt.day) if thin_dict['data'][key]['maxt'] == miss or thin_dict['data'][key]['mint'] == miss or thin_dict['data'][key]['srad'] == miss: thin_dict['data'][key]['dlyCarbonBal'] = miss thin_dict['data'][key]['totalDemand'] = miss thin_dict['data'][key]['thinIndex'] = miss if key+3 < len(tkeys) and thin_dict['data'][key+3]['maxt'] != miss and thin_dict['data'][key+3]['mint'] != miss and thin_dict['data'][key+3]['srad'] != miss: list4day.append(thin_dict['data'][key+3]['thinIndex']) else: list4day.append(miss) list4day.pop(0) if len(list4day) == 4 and not miss in list4day: avg4day = round((sum(list4day)/4.0), 2) else: avg4day = "-" if t_dt >= bloom_dt and t_dt <= recommendEnd: recommend = get_recommend(avg4day) else: recommend = "-" results_list.append([fdate, ctof(thin_dict['data'][key]['maxt']), ctof(thin_dict['data'][key]['mint']),\ mround(thin_dict['data'][key]['srad'],1), mround(thin_dict['data'][key]['dlyCarbonBal'],2),\ mround(thin_dict['data'][key]['totalDemand'],2), mround(thin_dict['data'][key]['thinIndex'],2),\ avg4day, recommend]) if (bloom_dt - biofix_dt).days < 21: notes_list.append('Difference between Green tip and Bloom is less than 21 days. Results may be unreliable.') except: print_exception() json_dict = json.dumps({"data":results_list, "notes":notes_list}) return json_dict
def get_stations_with_var (state,varMajors=None,start=None,end=None) : import Meta from omniORB import CORBA import ucanCallMethods any = CORBA.Any tc = CORBA.TypeCode tc_short = CORBA.TC_short tc_long = CORBA.TC_long tc_string = CORBA.TC_string tc_nativeId = CORBA.TypeCode(Meta.MetaQuery.NativeId) tc_shortSeq = tc(Meta.ShortSeq) tc_floatSeq = tc(Meta.FloatSeq) NativeId = Meta.MetaQuery.NativeId NameAny = Meta.MetaQuery.NameAnyPair # set up ucan ucan = ucanCallMethods.general_ucan() dictionary = {} try: postal = state.upper() if varMajors == None: varMajors = [1,2,4] if start == None: start = (0001,1,1) if end == None : end = (9999,12,31) query = ucan.get_query() qualifier = [ NameAny ('postal', any(tc_string,postal) )] qualifier.append ( NameAny ('var_major_id',any(tc_shortSeq,varMajors) ) ) qualifier.append ( NameAny ('begin_date', any(tc_shortSeq,start) ) ) qualifier.append ( NameAny ('end_date', any(tc_shortSeq,end) ) ) results = query.getStnInfoAsSeq(qualifier,()) query.release() if len(results) == 0: return {} else: dictionary = {} for item in results : r = NameAny_to_dict(item) dictionary[r['ucan_id']] = r except: print_exception() return dictionary
def tp_for_grf2(daily_data, start_date_dt, start_fcst_dt, useqpf=True): obs_dict = {} forecast_data = None start_fcst_dt = start_fcst_dt + DateTime.RelativeDate( hour=0, minute=0, second=0) try: mint = [] maxt = [] prcpl = [] obs_days = [] fmint = [] fmaxt = [] fprcp = [] fobs_days = [] for dly_dt, tave_hr, tmax, tmin, prcp, lwet, rhum, wspd, srad, qpf, st4x, st4n, dflags in daily_data: this_day_dt = DateTime.DateTime(dly_dt[0], dly_dt[1], dly_dt[2]) if this_day_dt < start_date_dt: continue if not useqpf: qpf = prcp if tmax != miss and tmin != miss: if this_day_dt < start_fcst_dt: mint.append(int(round(tmin, 0))) maxt.append(int(round(tmax, 0))) prcpl.append(qpf) obs_days.append("%d-%d-%d" % (dly_dt[0], dly_dt[1], dly_dt[2])) else: fmint.append(int(round(tmin, 0))) fmaxt.append(int(round(tmax, 0))) fprcp.append(qpf) fobs_days.append("%d-%d-%d" % (dly_dt[0], dly_dt[1], dly_dt[2])) obs_dict['maxt'] = maxt obs_dict['mint'] = mint obs_dict['prcp'] = prcpl obs_dict['obs_days'] = obs_days obs_dict['fmaxt'] = fmaxt obs_dict['fmint'] = fmint obs_dict['fprcp'] = fprcp obs_dict['frobs_days'] = fobs_days except: print_exception() return obs_dict
def loop_through_time(self): obsTime = DateTime.DateTime(self.sTime.year, self.sTime.month, self.sTime.day, self.sTime.hour) nexTime = obsTime + DateTime.RelativeDate(months=+1) while obsTime < self.eTime: if nexTime > self.eTime: end = (self.eTime.year, self.eTime.month, self.eTime.day, self.eTime.hour) else: end = (nexTime.year, nexTime.month, nexTime.day, nexTime.hour) start = (obsTime.year, obsTime.month, obsTime.day, obsTime.hour) try: self.get_temperature(start, end) self.get_precipitation(start, end) self.get_rh(start, end) self.get_lwet(start, end) except weatherError: break except: print_exception() break obsTime = DateTime.DateTime(nexTime.year, nexTime.month, nexTime.day, nexTime.hour) nexTime = obsTime + DateTime.RelativeDate(months=+1) self.stn.release_tmp() if self.tmpVar: self.tmpVar.release_tmp() self.stn.release_rh() if self.rhVar: self.rhVar.release_rh() self.stn.release_prcp() if self.prcpVar: self.prcpVar.release_prcp() self.stn.release_lwet() if self.lwetVar: self.lwetVar.release_lwet()
def getHourlySolar(stn, start_date, end_date, miss, stpr0, wthr0, dwpt0, visi0, ccnd0, chgt0, ceil0, tsky0): import string, Data from mx import DateTime from print_exception import print_exception from solar_main_routine import SOLAR_MAIN values = [] vdates = [] try: srdates, srvalues = SOLAR_MAIN(stn, start_date, end_date, stpr0, wthr0, dwpt0, visi0, ccnd0, chgt0, ceil0, tsky0) start_date_dt = DateTime.DateTime(*start_date) end_date_dt = DateTime.DateTime(*end_date) sr_start_dt = DateTime.DateTime(*srdates[0]) sr_end_dt = DateTime.DateTime(*srdates[-1]) # pad beginning, if necessary theDate = start_date_dt while theDate < sr_start_dt: vdates.append(theDate.tuple()[:4]) values.append(miss) theDate = theDate + DateTime.RelativeDate(hours=+1) # only retain values in requested date range for vdy in range(len(srdates)): srdate_dt = DateTime.DateTime(*srdates[vdy]) if srdate_dt >= start_date_dt and srdate_dt < end_date_dt: vdates.append(srdates[vdy]) # convert from string to value values.append(string.atof(srvalues[vdy])) # pad end, if necessary theDate = sr_end_dt + DateTime.RelativeDate(hours=+1) while theDate < end_date_dt: vdates.append(theDate.tuple()[:4]) values.append(miss) theDate = theDate + DateTime.RelativeDate(hours=+1) except: print_exception() return (values, vdates)
def run_crucifer_update(self, pest, altref, tech_choice): try: smry_dict = {} # determine information needed for calculations for particular disease pest_status_management = import_info_dict(pest) if not pest_status_management: return newaCommon_io.errmsg( 'A model is not available for the disease you selected.') if pest_status_management['messages'].has_key(altref): psmk = pest_status_management['messages'][altref] smry_dict['crop_stages'] = pest_status_management[ 'messages'].keys() smry_dict = self.filldict(psmk, altref, tech_choice, smry_dict) else: smry_dict['stage'] = "Not defined" smry_dict['status'] = "Not defined" smry_dict['manage'] = "Not defined" return newaVegModel_io.crucifer_sm_table(smry_dict) except: print_exception()