def gather_city_data(city_csv, db_name, output_name): ''' This function creates a csv that saves the total acreage, number of resorts and time to drive to them for all resorts within a 3.25 hour drive of the city. ''' conn = lite.connect(db_name) c = conn.cursor() conn.create_function('time_between', 4, compute_time_between) cities = pd.read_csv(city_csv) rows = [] for i, city in cities.iterrows(): lat, lon = get_lat_lon(city['city'] + " " + city['state']) params = (lon,lat,lon,lat) query = "SELECT SUM(area), SUM(time_between(lon, lat, ?, ?)), COUNT(*) \ FROM main WHERE time_between(lon, lat, ?, ?) < 3.25" result = c.execute(query, params) area, time, count = list(result)[0] rows.append((city['city'], city['state'], area, time, count, lat, lon)) conn.commit() conn.close() labels = ['city','state','area','time','number', 'lat', 'lon'] csv_writer(labels,rows,output_name)
def gather_city_data(city_csv, db_name, output_name): ''' This function creates a csv that saves the total acreage, number of resorts and time to drive to them for all resorts within a 3.25 hour drive of the city. ''' conn = lite.connect(db_name) c = conn.cursor() conn.create_function('time_between', 4, compute_time_between) cities = pd.read_csv(city_csv) rows = [] for i, city in cities.iterrows(): lat, lon = get_lat_lon(city['city'] + " " + city['state']) params = (lon, lat, lon, lat) query = "SELECT SUM(area), SUM(time_between(lon, lat, ?, ?)), COUNT(*) \ FROM main WHERE time_between(lon, lat, ?, ?) < 3.25" result = c.execute(query, params) area, time, count = list(result)[0] rows.append((city['city'], city['state'], area, time, count, lat, lon)) conn.commit() conn.close() labels = ['city', 'state', 'area', 'time', 'number', 'lat', 'lon'] csv_writer(labels, rows, output_name)
def get_current(db_name, output_file): ''' This function gathers the current weather conditions for all of the resorts in our database and writes a csv containing all of the data ''' zip_url = "http://api.openweathermap.org/data/2.5/weather?zip={},us" resort_loc = query_resorts(db_name) cnt = 0 weather_data = [] for ID, z_code in resort_loc: sleep(1.1) # Slow down to prevent API lockout req_addr = zip_url.format(z_code) + "&units=imperial" + API_KEY # Attempt to resolve faulty database connection try: r = requests.get(req_addr) except ConnectionError: sleep(5) r = requests.get(req_addr) json_str = r.text weather = json.loads(json_str) wthr = weather['weather'][0]['main'] dscr = weather['weather'][0]['description'] temp = weather['main']['temp'] pres = weather['main']['pressure'] humd = weather['main']['humidity'] spd = weather['wind']['speed'] if 'rain' in weather: rain = weather['rain'] if '3h' in rain: rain = rain['3h'] else: rain = 0 if 'snow' in weather: snow = weather['snow'] if '3h' in snow: snow = snow['3h'] else: snow = 0 data = [ID, wthr, dscr, temp, pres, humd, spd, rain, snow] weather_data.append(data) labels = [ "ID", 'wthr', 'dscr', "temp", "pres", "humd", "spd", "rain", "snow" ] csv_writer(labels, weather_data, output_file)
def get_current(db_name, output_file): ''' This function gathers the current weather conditions for all of the resorts in our database and writes a csv containing all of the data ''' zip_url = "http://api.openweathermap.org/data/2.5/weather?zip={},us" resort_loc = query_resorts(db_name) cnt = 0 weather_data = [] for ID, z_code in resort_loc: sleep(1.1) # Slow down to prevent API lockout req_addr = zip_url.format(z_code) + "&units=imperial" + API_KEY # Attempt to resolve faulty database connection try: r = requests.get(req_addr) except ConnectionError: sleep(5) r = requests.get(req_addr) json_str = r.text weather = json.loads(json_str) wthr = weather['weather'][0]['main'] dscr = weather['weather'][0]['description'] temp = weather['main']['temp'] pres = weather['main']['pressure'] humd = weather['main']['humidity'] spd = weather['wind']['speed'] if 'rain' in weather: rain = weather['rain'] if '3h' in rain: rain = rain['3h'] else: rain = 0 if 'snow' in weather: snow = weather['snow'] if '3h' in snow: snow = snow['3h'] else: snow = 0 data = [ID, wthr, dscr, temp, pres, humd, spd, rain, snow] weather_data.append(data) labels = ["ID", 'wthr', 'dscr', "temp", "pres", "humd", "spd", "rain", "snow"] csv_writer(labels, weather_data, output_file)
def get_forecast(db_name, output_file): ''' This function gathers the seven day forecast for all of the resorts in our database and writes a csv containing all of the data ''' zip_url = "http://api.openweathermap.org/data/2.5/forecast/daily?zip={},us" NUM_DAYS = 7 labels = ['ID'] # Build column headers for every day and field of interest for day_num in range(NUM_DAYS): fields = "wthr_{d} dscr_{d} avg_day_{d} avg_night_{d} \ t_min_{d} t_max_{d} pres_{d} humd_{d} w_spd_{d} \ rain_{d} snow_{d}".format(d=str(day_num + 1)) fields = fields.split() labels = labels + fields resort_locs = query_resorts(db_name) cnt = 0 weather_data = [] for ID, z_code in resort_locs: sleep(1.1) # Slow down requests to prevent API lockout req_addr = zip_url.format(z_code) + "&units=imperial" + API_KEY # Attempt to resolve faulty database connection try: r = requests.get(req_addr) except ConnectionError: sleep(5) r = requests.get(req_addr) json_str = r.text weather = json.loads(json_str) fcast_data = [ID] for day_num in range(len(weather['list'])): link = weather['list'][day_num] wthr = link['weather'][0]['main'] dscr = link['weather'][0]['description'] t_day = link['temp']['day'] t_night = link['temp']['night'] t_min = link['temp']['min'] t_max = link['temp']['max'] pres = link['pressure'] humd = link['humidity'] w_spd = link['speed'] if 'rain' in link: rain = link['rain'] else: rain = 0 if 'snow' in link: snow = link['snow'] else: snow = 0 data = [ wthr, dscr, t_day, t_night, t_min, t_max, pres, humd, w_spd, rain, snow ] fcast_data += data weather_data.append(fcast_data) csv_writer(labels, weather_data, output_file)
def get_forecast(db_name, output_file): ''' This function gathers the seven day forecast for all of the resorts in our database and writes a csv containing all of the data ''' zip_url = "http://api.openweathermap.org/data/2.5/forecast/daily?zip={},us" NUM_DAYS = 7 labels = ['ID'] # Build column headers for every day and field of interest for day_num in range(NUM_DAYS): fields = "wthr_{d} dscr_{d} avg_day_{d} avg_night_{d} \ t_min_{d} t_max_{d} pres_{d} humd_{d} w_spd_{d} \ rain_{d} snow_{d}".format(d=str(day_num + 1)) fields = fields.split() labels = labels + fields resort_locs = query_resorts(db_name) cnt = 0 weather_data = [] for ID, z_code in resort_locs: sleep(1.1) # Slow down requests to prevent API lockout req_addr = zip_url.format(z_code) + "&units=imperial" + API_KEY # Attempt to resolve faulty database connection try: r = requests.get(req_addr) except ConnectionError: sleep(5) r = requests.get(req_addr) json_str = r.text weather = json.loads(json_str) fcast_data = [ID] for day_num in range(len(weather['list'])): link = weather['list'][day_num] wthr = link['weather'][0]['main'] dscr = link['weather'][0]['description'] t_day = link['temp']['day'] t_night = link['temp']['night'] t_min = link['temp']['min'] t_max = link['temp']['max'] pres = link['pressure'] humd = link['humidity'] w_spd = link['speed'] if 'rain' in link: rain = link['rain'] else: rain = 0 if 'snow' in link: snow = link['snow'] else: snow = 0 data = [wthr, dscr, t_day, t_night, t_min, t_max, pres, humd, w_spd, rain, snow] fcast_data += data weather_data.append(fcast_data) csv_writer(labels, weather_data, output_file)