full_year_times = \ [(start_date + rdatetime.timedelta(hours = n)) for n in range(8760)] toadd, desctemp = qload("temps_"+k+".pkl",loc=data_loc + '2011/') ### fix it #times, tempe = zip(*toadd) # temp = {} # for date, number in toadd: # if date not in temp: # we see this key for the first time # temp[date] = (date, number) # result = temp.values() result = list(set(toadd)).sorted() full_temps, temps_oriflag = fill_in(result,full_year_times) temp_times, temp_vals = zip(*full_temps) temps = (np.array(temp_vals), np.array(temps_oriflag)) return data if __name__== "__main__": facilities = {'1636483694' :'Danville', '1988756172' :'Galesburg', '2550170006' :'Vienna', '5379783532' :'Pinckneyville'} data, desc = qload("state_b_records_2011_with_temps.pkl",loc = data_loc) for k in facilities: data = toadd_to_building_pkls(data,k) qdump((data,desc),"state_b_records_2011_with_temps.pkl",loc=data_loc)
def query_temps(facility,city,country,state,year,llaves,usedkey,conta): info = {u'heatindexm': [], u'windchillm': [], u'wdire': [], u'wdird': [], \ u'windchilli': [], u'hail': [], u'heatindexi': [], u'precipi': [], \ u'thunder': [], u'pressurei': [], u'snow': [], u'pressurem': [], \ u'fog': [], u'icon': [], u'precipm': [], u'conds': [], u'tornado': [], \ u'hum': [], u'tempi': [], u'tempm': [], u'dewptm': [], u'rain': [], \ u'dewpti': [], u'visi': [], u'vism': [], u'wgusti': [], u'metar': [], \ u'wgustm': [], u'wspdi': [], u'wspdm': []} locd = [] utcd = [] # Make a vector of all times start_date = datetime.strptime("1/1/2011 00:00:00", "%m/%d/%Y %H:%M:%S").replace(tzinfo = tz_used) #full_year_times = \ # [(start_date + rdatetime.timedelta(hours = n)) for n in range(8760)] full_year_times = \ [(start_date + rdatetime.timedelta(hours = n)) for n in range(8760)] #facility = '0579171006' #city = "Mount_Sterling" #country = "US" #state = "IL" #year = 2011 i=0 while i < len(full_year_times): if i%24 == 0: if full_year_times[i].month<10: mm = "0"+str(full_year_times[i].month) else: mm = str(full_year_times[i].month) if full_year_times[i].day<10: dd = "0"+str(full_year_times[i].day) else: dd = str(full_year_times[i].day) if conta > 500: conta = 1 usedkey = llaves[llaves.index(usedkey) + 1] url = key+usedkey+"history_"+str(year)+mm+dd+"/q/"+state+"/"+city+".json" try: f = urllib2.urlopen(url) json_string = f.read() parsed_json = json.loads(json_string) f.close() _add_day(parsed_json,info,locd,utcd) i+=1 conta+=1 print i time.sleep(2) except: time.sleep(60) continue else: i+=1 temp = [float(k) for k in info[u'tempi']] temptemp = zip(locd,temp) #full_temps, temps_oriflag = fill_in(zip(locd,temp),full_year_times) #temp_times, temp_vals = zip(*full_temps) #temps = (np.array(temp_vals), np.array(temps_oriflag)) qdump(([info,locd,utcd],\ "All historic info from wunderground for facility "+facility+"_"+city+"_"+state+"_"+str(year)),facility+'.pkl',loc="") qdump((temptemp, "Queried temperatures in farenheit from wunderground for facility "+facility+"_"+city+"_"+state+"_"+str(year)),"temps_"+facility+".pkl",loc="") return [temptemp,llaves,usedkey,conta]
# '5379783532' :'Pinckneyville'} country = "US" state = "IL" year = 2012 usedkey = llaves[0] conta = 1 toadd = [] for k in facilities: facility = k city = facilities[k] country = "US" state = "IL" year = 2011 temps,llaves,usedkey,conta=query_temps(facility,city,country,state,year,llaves,usedkey,conta) toadd.append(temps) qdump((toadd,"Temporal con las 5 prisiones"),"toadd.pkl",loc="") #data, desc = qload("state_b_records_2011_with_temps.pkl",loc = "") #print desc #for k in range(len(toadd)-1): # data[k+1]['temps'] = toadd[k] # print "Changed ------", k+1 #qdump((data,desc),"state_b_records_2011_with_temps.pkl",loc="") #print "Len full_year_times-----", len(full_year_times) #print "len of times in json----", len(locd) #print "len info ---------------", len(info) #print "len tempi in info-------", len(info[u'tempi']) #print "temps ----------------------------" #print info[u'tempi']
"%m/%d/%Y %H:%M:%S").replace(tzinfo=tz_used) full_year_times = \ [(start_date + rdatetime.timedelta(hours = n)) for n in range(8760)] toadd, desctemp = qload("temps_" + k + ".pkl", loc=data_loc + '2011/') ### fix it #times, tempe = zip(*toadd) # temp = {} # for date, number in toadd: # if date not in temp: # we see this key for the first time # temp[date] = (date, number) # result = temp.values() result = list(set(toadd)).sorted() full_temps, temps_oriflag = fill_in(result, full_year_times) temp_times, temp_vals = zip(*full_temps) temps = (np.array(temp_vals), np.array(temps_oriflag)) return data if __name__ == "__main__": facilities = { '1636483694': 'Danville', '1988756172': 'Galesburg', '2550170006': 'Vienna', '5379783532': 'Pinckneyville' } data, desc = qload("state_b_records_2011_with_temps.pkl", loc=data_loc) for k in facilities: data = toadd_to_building_pkls(data, k) qdump((data, desc), "state_b_records_2011_with_temps.pkl", loc=data_loc)
def query_temps(facility, city, country, state, year, llaves, usedkey, conta): info = {u'heatindexm': [], u'windchillm': [], u'wdire': [], u'wdird': [], \ u'windchilli': [], u'hail': [], u'heatindexi': [], u'precipi': [], \ u'thunder': [], u'pressurei': [], u'snow': [], u'pressurem': [], \ u'fog': [], u'icon': [], u'precipm': [], u'conds': [], u'tornado': [], \ u'hum': [], u'tempi': [], u'tempm': [], u'dewptm': [], u'rain': [], \ u'dewpti': [], u'visi': [], u'vism': [], u'wgusti': [], u'metar': [], \ u'wgustm': [], u'wspdi': [], u'wspdm': []} locd = [] utcd = [] # Make a vector of all times start_date = datetime.strptime("1/1/2011 00:00:00", "%m/%d/%Y %H:%M:%S").replace(tzinfo=tz_used) #full_year_times = \ # [(start_date + rdatetime.timedelta(hours = n)) for n in range(8760)] full_year_times = \ [(start_date + rdatetime.timedelta(hours = n)) for n in range(8760)] #facility = '0579171006' #city = "Mount_Sterling" #country = "US" #state = "IL" #year = 2011 i = 0 while i < len(full_year_times): if i % 24 == 0: if full_year_times[i].month < 10: mm = "0" + str(full_year_times[i].month) else: mm = str(full_year_times[i].month) if full_year_times[i].day < 10: dd = "0" + str(full_year_times[i].day) else: dd = str(full_year_times[i].day) if conta > 500: conta = 1 usedkey = llaves[llaves.index(usedkey) + 1] url = key + usedkey + "history_" + str( year) + mm + dd + "/q/" + state + "/" + city + ".json" try: f = urllib2.urlopen(url) json_string = f.read() parsed_json = json.loads(json_string) f.close() _add_day(parsed_json, info, locd, utcd) i += 1 conta += 1 print i time.sleep(2) except: time.sleep(60) continue else: i += 1 temp = [float(k) for k in info[u'tempi']] temptemp = zip(locd, temp) #full_temps, temps_oriflag = fill_in(zip(locd,temp),full_year_times) #temp_times, temp_vals = zip(*full_temps) #temps = (np.array(temp_vals), np.array(temps_oriflag)) qdump(([info,locd,utcd],\ "All historic info from wunderground for facility "+facility+"_"+city+"_"+state+"_"+str(year)),facility+'.pkl',loc="") qdump( (temptemp, "Queried temperatures in farenheit from wunderground for facility " + facility + "_" + city + "_" + state + "_" + str(year)), "temps_" + facility + ".pkl", loc="") return [temptemp, llaves, usedkey, conta]
state = "IL" year = 2012 usedkey = llaves[0] conta = 1 toadd = [] for k in facilities: facility = k city = facilities[k] country = "US" state = "IL" year = 2011 temps, llaves, usedkey, conta = query_temps(facility, city, country, state, year, llaves, usedkey, conta) toadd.append(temps) qdump((toadd, "Temporal con las 5 prisiones"), "toadd.pkl", loc="") #data, desc = qload("state_b_records_2011_with_temps.pkl",loc = "") #print desc #for k in range(len(toadd)-1): # data[k+1]['temps'] = toadd[k] # print "Changed ------", k+1 #qdump((data,desc),"state_b_records_2011_with_temps.pkl",loc="") #print "Len full_year_times-----", len(full_year_times) #print "len of times in json----", len(locd) #print "len info ---------------", len(info) #print "len tempi in info-------", len(info[u'tempi']) #print "temps ----------------------------" #print info[u'tempi'] #print "times ----------------------------" #print full_year_times