def getAggregateForecast(allForecasts, allWeights, cities_location, sample_date): cityDict = getForecasts.getCities(cities_location) return { cityState: getAggregateForecastByCityDay(allForecasts, allWeights, cityState, sample_date) for cityState in cityDict }
def getAllForecasts(data_directory, cities_location): file_dictionary = getFiles(data_directory) cityDict = getForecasts.getCities(cities_location) cities = cityDict.keys() services = file_dictionary.keys() cities.sort() services.sort() allResults = {"AccuWeather" : {}, "DarkSky": {}, "NWS": {}, "WUnderground": {}} for service in file_dictionary: for file_name in file_dictionary[service]: cityState = whichCity(cities, file_name) allResults[service][cityState] = useService(service, data_directory, file_name) return allResults
def getPosteriors(): date_time_string = "2017-07-22_11" #Date time to get forecast data from. Name of the directory. date_of_forecast = datetime.date( 2017, 07, 22) #The date that the forecasts were sampled from. date_to_predict = datetime.date(2017, 07, 23) #The date to predict past_days = 5 #The number of prior days to use #Likelihood weight_directory = "G:\School\VA Tech Courses\ZhangLab\DataIncubator\ForecastAdvisor\LastMonth" cities_location = 'G:\School\VA Tech Courses\ZhangLab\DataIncubator\LatLongCities.csv' data_directory = os.path.join("C:\Users\jsporter\Downloads\Data", date_time_string) output_directory = os.path.join("C:\Users\jsporter\Downloads\Temp", date_time_string) if not os.path.isdir(os.path.dirname(output_directory)): os.makedirs(os.path.dirname(output_directory)) likelihood_location = os.path.join( output_directory, "Likelihoods_" + date_time_string + ".csv") all_forecasts_location = os.path.join(output_directory, "All_Forecasts_" + date_time_string) likelihoods = extractInfo.runExtractInfo(weight_directory, data_directory, cities_location, likelihood_location, all_forecasts_location, date_of_forecast) #Prior api_key_location = 'G:\School\VA Tech Courses\ZhangLab\DataIncubator\Weather_API_Keys.csv' prior_location = os.path.join(output_directory, "Priors_" + date_time_string + ".csv") history_directory = os.path.join(output_directory, "DarkSky_PriorData_" + date_time_string) priors = getHistory.runGetHistory(api_key_location, cities_location, prior_location, history_directory, date_to_predict, past_days=past_days) #Posterior posterior_file = os.path.join(output_directory, "Posteriors_" + date_time_string + '.csv') cityDict = getForecasts.getCities(cities_location) #date_to_predict = datetime.date_to_predict(2017, 07, 21) cityStates = cityDict.keys() cityStates.sort() posteriors = {} for cityState in cityStates: posteriors[cityState] = computePosteriorArguments( likelihoods[cityState][date_to_predict], priors[cityState]) pprint(posteriors) writePosteriorArguments(posteriors, posterior_file, date_to_predict) return posteriors
def getPosteriors(): cityDict = getForecasts.getCities('G:\School\VA Tech Courses\ZhangLab\DataIncubator\LatLongCities.csv') outfile = 'C:\Users\jsporter\Downloads\Temp\Posteriors_2017_07_21.csv' date = datetime.date(2017, 07, 21) likelihoods = extractInfo.main() priors = getHistory.parseArgs() cityStates = cityDict.keys() cityStates.sort() posteriors = {} for cityState in cityStates: posteriors[cityState] = computePosteriorArguments(likelihoods[cityState][date], priors[cityState]) pprint(posteriors) writePosteriorArguments(posteriors, outfile, date) return posteriors
def parseArgs(): DarkSkyKey = getForecasts.getAPIKeys( 'G:\Weather\Weather_API_Keys.csv')["DarkSky"] cityDict = getForecasts.getCities('G:\Cities\LatLongCities.csv') outfile = 'G:\Temp\Priors_2017_07_21.csv' priorDataLocation = 'G:Temp\DarkSky_PriorData_2017_07_21' allHistoriesFile = os.path.join(priorDataLocation, "allHistories.pickle") booleanDarkSkyHistory = False thisDay = False date = datetime.date(2017, 07, 21) past_days = 20 cityStateKeys = cityDict.keys() cityStateKeys.sort() priorWriter = setupPriorWriter(outfile) allPriorArguments = {} counter = 0 allHistories = {} if not booleanDarkSkyHistory: allHistories = pickle.load(open(allHistoriesFile, 'rb')) for cityState in cityStateKeys: # if counter >= 2: # break counter += 1 if booleanDarkSkyHistory: latlon = cityDict[cityState] history = getHistory(DarkSkyKey, latlon[0], latlon[1], date, past_days, priorDataLocation, thisDay=thisDay) allHistories[cityState] = history else: history = allHistories[cityState] if not thisDay: priorArguments = getPriorArguments(history) allPriorArguments[cityState] = priorArguments writePriorArguments(priorArguments, cityState, date, priorWriter) if booleanDarkSkyHistory: pickle.dump(allHistories, open(allHistoriesFile, 'wb')) pprint(allHistories) pprint(allPriorArguments) return allPriorArguments
def getAllWeights(cities_location, weight_directory): cities = getForecasts.getCities(cities_location).keys() return {cityState : getWeights(cityState, weight_directory) for cityState in cities}