### Establish our MySQL Connection (for logging, etc.) engine, connection, metadata, mysql_table_name, mysql_log_name = scraperfunctions.create_mysql_engine( config) ########### Download actions try: ### Initiate our virtual display print("Initiating virtual display") display = Display(visible=0, size=(1920, 1080)) display.start() ### Let's start our browser browser = scraperfunctions.create_browser() ### Let's load the page work scraperfunctions.load_homepage(browser, pubshort, puburl) ### See if the MV list requires extra actions if puburl_mv_extraactions is not None: ### Actions for acquiring MV List pass ### Let's first store the source code html_code = browser.page_source write_out_file = scraperfunctions.write_out_file( "%s" % homepages_dir, "%s_%s.html" % (pubshort, curr_time.strftime("%Y%m%d%H%M")), html_code) ### Save a screenshot scraperfunctions.take_screenshot(browser, screenshots_dir, pubshort, curr_time.strftime("%Y%m%d%H%M"))
engine, connection, metadata, mysql_table_name, mysql_log_name = scraperfunctions.create_mysql_engine( config) ########### Download actions if download_desktop == 1: try: ### Initiate our virtual display print("Initiating virtual display") display = Display(visible=0, size=(1920, 1080)) display.start() ### Let's start our browser browser = scraperfunctions.create_browser() ### Let's load the page work scraperfunctions.load_homepage(browser, pubshort, puburl) ### See if the MV list requires extra actions if puburl_mv_extraactions != None: ### Actions for acquiring MV List pass ### Let's first store the source code html_code = browser.page_source write_out_file = scraperfunctions.write_out_file( "%s" % (homepages_dir), "%s_%s.html" % (pubshort, curr_time.strftime("%Y%m%d%H%M")), html_code) ### See if the MV list is in a separate URL if puburl_mv != None:
### Establish our MySQL Connection (for logging, etc.) engine, connection, metadata, mysql_table_name, mysql_log_name = scraperfunctions.create_mysql_engine(config) ########### Download actions if download_desktop == 1: try: ### Initiate our virtual display print("Initiating virtual display") display = Display(visible=0, size=(1920, 1080)) display.start() ### Let's start our browser browser = scraperfunctions.create_browser() ### Let's load the page work scraperfunctions.load_homepage(browser, pubshort, puburl) ### See if the MV list requires extra actions if puburl_mv_extraactions != None: ### Actions for acquiring MV List pass ### Let's first store the source code html_code = browser.page_source write_out_file = scraperfunctions.write_out_file("%s" % (homepages_dir), "%s_%s.html" % (pubshort, curr_time.strftime("%Y%m%d%H%M")), html_code) ### See if the MV list is in a separate URL if puburl_mv != None: ### Actions for acquiring MV List pass
### Establish our MySQL Connection (for logging, etc.) engine, connection, metadata, mysql_table_name, mysql_log_name = scraperfunctions.create_mysql_engine(config) ########### Download actions try: ### Initiate our virtual display print("Initiating virtual display") display = Display(visible=0, size=(1920, 1080)) display.start() ### Let's start our browser browser = scraperfunctions.create_browser() ### Let's load the page work scraperfunctions.load_homepage(browser, pubshort, puburl) ### See if the MV list requires extra actions if puburl_mv_extraactions is not None: ### Actions for acquiring MV List pass ### Let's first store the source code html_code = browser.page_source write_out_file = scraperfunctions.write_out_file("%s" % homepages_dir, "%s_%s.html" % (pubshort, curr_time.strftime("%Y%m%d%H%M")), html_code) ### See if the MV list is in a separate URL if puburl_mv is not None: scraperfunctions.load_homepage(browser, pubshort, "http://www.denverpost.com/popular#pop-day") html_code = browser.page_source write_out_file = scraperfunctions.write_out_file("%s" % homepages_dir, "%s_poplist_%s.html" % (pubshort, curr_time.strftime("%Y%m%d%H%M")), html_code)
### Establish our MySQL Connection (for logging, etc.) engine, connection, metadata, mysql_table_name, mysql_log_name = scraperfunctions.create_mysql_engine(config) ########### Download actions if download_desktop == 1: try: ### Initiate our virtual display print("Initiating virtual display") display = Display(visible=0, size=(1920, 1080)) display.start() ### Let's start our browser browser = scraperfunctions.create_browser() ### Let's load the page work scraperfunctions.load_homepage(browser, pubshort, puburl) ### See if the MV list requires extra actions if puburl_mv_extraactions != None: ### Actions for acquiring MV List pass ### Let's first store the source code html_code = browser.page_source write_out_file = scraperfunctions.write_out_file("%s" % (homepages_dir), "%s_%s.html" % (pubshort, curr_time.strftime("%Y%m%d%H%M")), html_code) ### Save a screenshot scraperfunctions.take_screenshot(browser, screenshots_dir, pubshort, curr_time.strftime("%Y%m%d%H%M")) print("Screenshot taken") ### See if the MV list is in a separate URL
### Establish our MySQL Connection (for logging, etc.) engine, connection, metadata, mysql_table_name, mysql_log_name = scraperfunctions.create_mysql_engine(config) ########### Download actions if download_desktop == 1: try: ### Initiate our virtual display print("Initiating virtual display") display = Display(visible=0, size=(1920, 1080)) display.start() ### Let's start our browser browser = scraperfunctions.create_browser() ### Let's load the page work scraperfunctions.load_homepage(browser, pubshort, puburl) ### See if the MV list requires extra actions if puburl_mv_extraactions != None: ### Actions for acquiring MV List pass ### Let's first store the source code html_code = browser.page_source write_out_file = scraperfunctions.write_out_file("%s" % (homepages_dir), "%s_%s.html" % (pubshort, curr_time.strftime("%Y%m%d%H%M")), html_code) ### See if the MV list is in a separate URL if puburl_mv != None: scraperfunctions.load_homepage(browser, pubshort, "http://www.denverpost.com/popular#pop-day") html_code = browser.page_source write_out_file = scraperfunctions.write_out_file("%s" % (homepages_dir), "%s_poplist_%s.html" % (pubshort, curr_time.strftime("%Y%m%d%H%M")), html_code)