def main(): args = Parser().initilaize_arg_parser() config = Config().collect_config() try: if (args.email == 'gmail'): connection = Gmail() connection.connect() connection.login() connection.select_folder('INBOX') if (args.uid is None): connection.collect_emails() else: connection.collect_emails(args.uid) connection.disconnect() elif (args.email == 'outlook'): connection = Outlook() connection.connect() connection.login() connection.select_folder('Sent Directly To Me') if (args.uid is None): connection.collect_emails() else: connection.collect_emails_by_uid(args.uid) connection.disconnect() except KeyboardInterrupt as err: print('') print('Exiting monty mail.') except Exception as err: print(err)
def main(): args = Parser().initilaize_arg_parser() config = Config().collect_config() if (args.function == 'iqr'): IQR().calc_iqr() elif (args.function == 'sd'): SD().calc_sd() else: print( "Please provide a valid function. -h to see list of available options." )
def scrap_station(weather_station_url): url_gen = date_url_generator(weather_station_url) session = requests.Session() station_name = weather_station_url.split('/')[-1] file_name = f'{station_name}.csv' with open(file_name, 'w', newline='') as csvfile: fieldnames = [] fieldnames = ['Date', 'Time', 'Temperature', 'Dew_Point', 'Humidity', 'Wind', 'Speed', 'Gust', 'Pressure', 'Precip_Rate', 'Precip_Accum', 'UV', 'Solar'] writer = csv.DictWriter(csvfile, fieldnames=fieldnames, delimiter=';') # Write the correct headers to the CSV file if UNIT_SYSTEM == "metric": # 12:04 AM 24.4 C 18.3 C 69 % SW 0.0 km/h 0.0 km/h 1,013.88 hPa 0.00 mm 0.00 mm 0 0 w/m² writer.writerow({'Date': 'Date', 'Time': 'Time', 'Temperature': 'Temperature_C', 'Dew_Point': 'Dew_Point_C', 'Humidity': 'Humidity_%', 'Wind': 'Wind', 'Speed': 'Speed_kmh', 'Gust': 'Gust_kmh', 'Pressure': 'Pressure_hPa', 'Precip_Rate': 'Precip_Rate_mm', 'Precip_Accum': 'Precip_Accum_mm', 'UV': 'UV', 'Solar': 'Solar_w/m2'}) elif UNIT_SYSTEM == "imperial": # 12:04 AM 75.9 F 65.0 F 69 % SW 0.0 mph 0.0 mph 29.94 in 0.00 in 0.00 in 0 0 w/m² writer.writerow({'Date': 'Date', 'Time': 'Time', 'Temperature': 'Temperature_F', 'Dew_Point': 'Dew_Point_F', 'Humidity': 'Humidity_%', 'Wind': 'Wind', 'Speed': 'Speed_mph', 'Gust': 'Gust_mph', 'Pressure': 'Pressure_in', 'Precip_Rate': 'Precip_Rate_in', 'Precip_Accum': 'Precip_Accum_in', 'UV': 'UV', 'Solar': 'Solar_w/m2'}) else: raise Exception("please set 'unit_system' to either \"metric\" or \"imperial\"! ") for date_string, url in url_gen: print(f'getting 🌞 🌨 ⛈ from {url}') html_string = session.get(url) doc = lh.fromstring(html_string.content) history_table = doc.xpath('//*[@id="inner-content"]/section[1]/div[1]/div/div/div/div/lib-history/div[2]/lib-history-table/div/div/div/table/tbody') if not history_table: raise Exception('Table not found, please update xpath!') # parse html table rows data_rows = Parser.parse_html_table(date_string, history_table) # convert to metric system converter = ConvertToSystem(UNIT_SYSTEM) data_to_write = converter.clean_and_convert(data_rows) print(f'Saving {len(data_to_write)} rows') writer.writerows(data_to_write)
def __init__(self): Parser.__init__(self) self.filename = "./log/redis.log" self.logFile = open(self.filename, "w")
def __init__(self): Parser.__init__(self)
def scrap_station(weather_station_url): session = requests.Session() timeout = 5 global START_DATE global END_DATE global UNIT_SYSTEM global FIND_FIRST_DATE if FIND_FIRST_DATE: # find first date first_date_with_data = Utils.find_first_data_entry( weather_station_url=weather_station_url, start_date=START_DATE) # if first date found if (first_date_with_data != -1): START_DATE = first_date_with_data url_gen = Utils.date_url_generator(weather_station_url, START_DATE, END_DATE) station_name = weather_station_url.split('/')[-1] file_name = f'{station_name}.csv' with open(file_name, 'a+', newline='') as csvfile: fieldnames = [ 'Date', 'Time', 'Temperature', 'Dew_Point', 'Humidity', 'Wind', 'Speed', 'Gust', 'Pressure', 'Precip_Rate', 'Precip_Accum', 'UV', 'Solar' ] writer = csv.DictWriter(csvfile, fieldnames=fieldnames) # Write the correct headers to the CSV file if UNIT_SYSTEM == "metric": # 12:04 AM 24.4 C 18.3 C 69 % SW 0.0 km/h 0.0 km/h 1,013.88 hPa 0.00 mm 0.00 mm 0 0 w/m² writer.writerow({ 'Date': 'Date', 'Time': 'Time', 'Temperature': 'Temperature_C', 'Dew_Point': 'Dew_Point_C', 'Humidity': 'Humidity_%', 'Wind': 'Wind', 'Speed': 'Speed_kmh', 'Gust': 'Gust_kmh', 'Pressure': 'Pressure_hPa', 'Precip_Rate': 'Precip_Rate_mm', 'Precip_Accum': 'Precip_Accum_mm', 'UV': 'UV', 'Solar': 'Solar_w/m2' }) elif UNIT_SYSTEM == "imperial": # 12:04 AM 75.9 F 65.0 F 69 % SW 0.0 mph 0.0 mph 29.94 in 0.00 in 0.00 in 0 0 w/m² writer.writerow({ 'Date': 'Date', 'Time': 'Time', 'Temperature': 'Temperature_F', 'Dew_Point': 'Dew_Point_F', 'Humidity': 'Humidity_%', 'Wind': 'Wind', 'Speed': 'Speed_mph', 'Gust': 'Gust_mph', 'Pressure': 'Pressure_in', 'Precip_Rate': 'Precip_Rate_in', 'Precip_Accum': 'Precip_Accum_in', 'UV': 'UV', 'Solar': 'Solar_w/m2' }) else: raise Exception( "please set 'unit_system' to either \"metric\" or \"imperial\"! " ) for date_string, url in url_gen: try: print(f'Scraping data from {url}') history_table = False while not history_table: html_string = session.get(url, timeout=timeout) doc = lh.fromstring(html_string.content) history_table = doc.xpath( '//*[@id="main-page-content"]/div/div/div/lib-history/div[2]/lib-history-table/div/div/div/table/tbody' ) if not history_table: print("refreshing session") session = requests.Session() # parse html table rows data_rows = Parser.parse_html_table(date_string, history_table) # convert to metric system converter = ConvertToSystem(UNIT_SYSTEM) data_to_write = converter.clean_and_convert(data_rows) print(f'Saving {len(data_to_write)} rows') writer.writerows(data_to_write) except Exception as e: print(e)