def get_utility_rates(rates_url, address, lat, lon): """ Given an address or latititude and longitude positions, return a dictionary with the electricity rate in $/(kWh) for residential, commercial, and industrial buildings. """ d = {} if address != None: string = rates_url + "&address=" + address else: string = rates_url + "&lat=" + lat + "&lon=" + lon print(string) request = utility.get_request(string) text =utility.read_request(request) soup = bs4.BeautifulSoup(text, "html5lib") commercial = soup.find("commercial") industrial = soup.find("industrial") residential = soup.find("residential") d["commercial"] = commercial.text d["industrial"] = industrial.text d["residential"] = residential.text return d
def get_utility_rates(rates_url, address, lat, lon): """ Given an address or latititude and longitude positions, return a dictionary with the electricity rate in $/(kWh) for residential, commercial, and industrial buildings. """ d = {} if address != None: string = rates_url + "&address=" + address else: string = rates_url + "&lat=" + lat + "&lon=" + lon print(string) request = utility.get_request(string) text = utility.read_request(request) soup = bs4.BeautifulSoup(text, "html5lib") commercial = soup.find("commercial") industrial = soup.find("industrial") residential = soup.find("residential") d["commercial"] = commercial.text d["industrial"] = industrial.text d["residential"] = residential.text return d
def visit_pv_pages(solar_url, address, system_size, lat, lon): """ solar_url: url used to send request. system_size: PV capacity (kW). Number between 0.05 and 500000. address: address of building of interest (string) """ d = {} if address != None: string = solar_url + system_size + "&address=" + address else: string = solar_url + system_size + "&lat=" + lat + "&lon=" + lon request = utility.get_request(string) text =utility.read_request(request) soup = bs4.BeautifulSoup(text, "html5lib") pp = pprint.PrettyPrinter() ac_monthly = soup.find_all("ac-monthly") ac_annual = soup.find("ac-annual") d["ac_annual"] = ac_annual.text for index in range(1,len(soup.find_all('ac-monthly'))): d[index] = ac_monthly[index].text return d
def visit_page(url): """ Crawl the standby power page and create a csv file with the data of interest. """ request = utility.get_request(url) text =utility.read_request(request) soup = bs4.BeautifulSoup(text, "html5lib") d = {} tr_tag = soup.find_all("tr") for tag in tr_tag: th_tag = tag.find_all("th") for electronic in th_tag: electronic = electronic.text td_tag = tag.find_all("td") n = 0 l = [] for tag in td_tag: if n == 0: condition = tag.text if n <5 and n > 0: #Assuming one sleeps 8 hours a night, this is the amount that #would be saved by an individual per month. if n!=4: kwh_per_month = round(float(tag.text)*8*30,3) l.append(kwh_per_month) else: l.append(tag.text) n+= 1 try: d.setdefault(electronic,[]) if l != []: d[electronic].append({condition:l}) except UnboundLocalError: print("Not valid") print(d) #create a csv file with the electricity improvements with open("standby.csv", "wt") as standby: for key, value in d.items(): for condition in value: for key2,value2 in condition.items(): writer = csv.writer(standby) writer.writerow((key, key2, condition[key2][0],condition[key2][1], condition[key2][2],condition[key2][3]))
def visit_page(url): """ Crawl the standby power page and create a csv file with the data of interest. """ request = utility.get_request(url) text = utility.read_request(request) soup = bs4.BeautifulSoup(text, "html5lib") d = {} tr_tag = soup.find_all("tr") for tag in tr_tag: th_tag = tag.find_all("th") for electronic in th_tag: electronic = electronic.text td_tag = tag.find_all("td") n = 0 l = [] for tag in td_tag: if n == 0: condition = tag.text if n < 5 and n > 0: #Assuming one sleeps 8 hours a night, this is the amount that #would be saved by an individual per month. if n != 4: kwh_per_month = round(float(tag.text) * 8 * 30, 3) l.append(kwh_per_month) else: l.append(tag.text) n += 1 try: d.setdefault(electronic, []) if l != []: d[electronic].append({condition: l}) except UnboundLocalError: print("Not valid") print(d) #create a csv file with the electricity improvements with open("standby.csv", "wt") as standby: for key, value in d.items(): for condition in value: for key2, value2 in condition.items(): writer = csv.writer(standby) writer.writerow( (key, key2, condition[key2][0], condition[key2][1], condition[key2][2], condition[key2][3]))
def addr_to_coords(address_url, address): """ Converts an address to the latitude and longitude locations. """ complete_url = address_url + address request = utility.get_request(complete_url) text =utility.read_request(request) soup = bs4.BeautifulSoup(text, "html5lib") lat = soup.find("lat") lon = soup.find("lng") return[lat.text,lon.text]
def visit_alt_fuel_pages(url_list): """ Given a list of urls with different zip codes in Chicago, it returns a dictionary with data on alternative fuel stations in the following format: {n:[station name, address, city, latitude,longitude,alternative fuel type, status (E for Open, P for planned, T for temporarily unavailable)]} alternative fuel type options: BD Biodiesel (B20 and above) CNG Compressed Natural Gas E85 Ethanol (E85) ELEC Electric HY Hydrogen LNG Liquefied Natural Gas LPG Liquefied Petroleum Gas (Propane) """ d = {} n = 0 for url in url_list: request = utility.get_request(url) text = utility.read_request(request) soup = bs4.BeautifulSoup(text, "html5lib") city = soup.find_all('city') ft = soup.find_all('fuel-type-code') lat = soup.find_all('latitude') lon = soup.find_all('longitude') sn = soup.find_all('station-name') sa = soup.find_all('street-address') sc = soup.find_all('status-code') for index in range(len(soup.find_all('fuel-station'))): string = str(sn[index].text) + "|" + str( sa[index].text) + "|" + str(city[index].text) + "|" + str( lat[index].text) + "|" + str(lon[index].text) + "|" + str( ft[index].text) + "|" + str(sc[index].text) station_list = string.split("|") d[n] = station_list n += 1 return d
def visit_alt_fuel_pages(url_list): """ Given a list of urls with different zip codes in Chicago, it returns a dictionary with data on alternative fuel stations in the following format: {n:[station name, address, city, latitude,longitude,alternative fuel type, status (E for Open, P for planned, T for temporarily unavailable)]} alternative fuel type options: BD Biodiesel (B20 and above) CNG Compressed Natural Gas E85 Ethanol (E85) ELEC Electric HY Hydrogen LNG Liquefied Natural Gas LPG Liquefied Petroleum Gas (Propane) """ d = {} n = 0 for url in url_list: request = utility.get_request(url) text =utility.read_request(request) soup = bs4.BeautifulSoup(text, "html5lib") city = soup.find_all('city') ft = soup.find_all('fuel-type-code') lat = soup.find_all('latitude') lon = soup.find_all('longitude') sn = soup.find_all('station-name') sa = soup.find_all('street-address') sc = soup.find_all('status-code') for index in range(len(soup.find_all('fuel-station'))): string = str(sn[index].text) + "|" + str(sa[index].text) + "|"+ str(city[index].text) + "|"+ str(lat[index].text) + "|"+ str(lon[index].text) + "|"+ str(ft[index].text) + "|"+ str(sc[index].text) station_list = string.split("|") d[n] = station_list n+= 1 return d
def visit_pages(url,coords): """ Given a url and coordinates, this returns the corresponding census block the coordinates fall under. """ url = url + "latitude=" + coords[0] + "&longitude=" + coords[1] request = utility.get_request(url) text =utility.read_request(request) soup = bs4.BeautifulSoup(text, "html5lib") census_block = soup.find('block') stripped_census_block = re.sub('[^0-9]', ' ', str(census_block)) code_list = stripped_census_block.split() census_block = code_list[0] return census_block
def visit_pages(url,coords): """ Given a url and coordinates, this returns the corresponding census block the coordinates fall under. This will then be used to compare the census block data to the photovoltaic data """ if coords == None: return None url = url + "latitude=" + coords[0] + "&longitude=" + coords[1] request = utility.get_request(url) text =utility.read_request(request) soup = bs4.BeautifulSoup(text, "html5lib") census_block = soup.find('block') stripped_census_block = re.sub('[^0-9]', ' ', str(census_block)) code_list = stripped_census_block.split() census_block = code_list[0] return census_block
def visit_pages(url, coords): """ Given a url and coordinates, this returns the corresponding census block the coordinates fall under. This will then be used to compare the census block data to the photovoltaic data """ if coords == None: return None url = url + "latitude=" + coords[0] + "&longitude=" + coords[1] request = utility.get_request(url) text = utility.read_request(request) soup = bs4.BeautifulSoup(text, "html5lib") census_block = soup.find('block') stripped_census_block = re.sub('[^0-9]', ' ', str(census_block)) code_list = stripped_census_block.split() census_block = code_list[0] return census_block