def __init__(self, data_queue): self.data_queue = data_queue self.logger = logger_setup(__name__) self.control_logger = logger_setup('ControlLog.log') # get smb connection config = configparser.ConfigParser() config.sections = [] config.read('smb.ini') self.SMBW = SMB_Worker(config['SMB']['USER'],config['SMB']['PASS'],config['SMB']['CLIENT_MACHINE'],config['SMB']['SERVER']) self.OSS = OS_Suit('C:/OUTPUT_FOLDER', 'C:/MDRtoGKN', 'C:/TEMP_FOLDER') self.file_counter = 0
def __init__(self, config): self.uri = config['URI']['URI'] self.user = config['MDR']['USER'] self.password = config['MDR']['PASSWORD'] binary = FirefoxBinary(r'c:\Program Files\Mozilla Firefox\firefox.exe') self.driver = webdriver.Firefox(firefox_binary=binary) self.logger = logger_setup(__name__)
def __init__(self, output_folder, input_folder, temp_folder): self.logger = logger_setup(__name__) self.output_folder = output_folder self.temp_folder = temp_folder self.input_folder = input_folder try: os.makedirs(self.output_folder) os.makedirs(self.temp_folder) except Exception as exc: self.logger.error( 'initialize global folders error : {}'.format(exc))
def __init__(self, smb_user, user_pass, client_machine_name, server_name): self.conn = SMBConnection(smb_user, user_pass, client_machine_name, server_name, domain='zkp28', use_ntlm_v2=False, is_direct_tcp=True) self.logger = logger_setup(__name__) try: self.conn.connect(server_name, 445) self.logger.info('Server {} port {} connected'.format( server_name, 445, )) except Exception as exc: print('connException: {}\n'.format(Exception)) self.logger.error( 'An error occured . connException - {}'.format(exc))
def __init__(self, csv_filename): self.csv_filename = csv_filename self.logger = logger_setup(__name__)
import scrape import analyze as az from plot import plot import db as db from debug import print_cases from log import logger_setup from all_covid_data import all_covid_data logger = logger_setup("main.py") logger.info("*************** begin script ***************") # create soup object from html url = 'https://eblanding.com/covid-19-case-report-summary/' soup = scrape.get_soup(url, print_flag=False) logger.info('create soup object from html') # parse all <pre> html tags covid_data_pre = scrape.parse_html('pre', soup) logger.info("parse all <pre> html tags") # parse all <p> html tags covid_data_p = scrape.parse_html('p', soup) # October 19th & 23rd logger.info("parse all <p> html tags") # merge <pre> and <p> lists covid_data = az.merge_day_list(covid_data_p, covid_data_pre) logger.info("merge <pre> and <p> lists") # print_cases(covid_data,3)
import requests from bs4 import BeautifulSoup as bs from covid_case import covid_case from log import logger_setup logger = logger_setup("get_web_data.py") def get_soup(url, print_flag=False): """ """ page = requests.get(url) logger.info(f"HTTP status is: {page.status_code}") soup = bs(page.content, 'html.parser') if print_flag: article = soup.find('article') print(article.prettify()) logger.info("soup object successfully returned") return soup def parse_html(tag, soup): """ Returns a list of covid_case classes, one for each covid case at EB. """ covid_data = [] tag_list = soup.find('article').find('div').find_all(tag)
import matplotlib.pyplot as plt import matplotlib.dates as mdates from matplotlib import gridspec import math import datetime as dt from log import logger_setup logger = logger_setup("plot_data.py") def set_y_tick_range(y_data, interval): """ Define a range (list) of points for the y-axis ticks. """ upper_y_limit = math.ceil(max(y_data) / interval) * interval y_ticks = range(0, upper_y_limit + interval, interval) return y_ticks def plot(dates, cases_per_day, N_day_avg, cases_per_day_avg, running_totals, top_N_facilities, count_per_top_N_facility, top_N_depts, count_per_top_N_depts, dates_corr, running_totals_corr, sir_data, sir_params, dates_back, running_tot_back): fig = plt.figure(figsize=(12, 9)) spec = gridspec.GridSpec(ncols=2, nrows=3, width_ratios=[2, 1]) today = dt.datetime.today() fig_title = "Number of COVID Cases at Electric Boat as of "\ + today.strftime("%B %d, %Y") + ": " + str(running_totals[-1])