def __init__(self, driver=None, suppress_driver=False): if not suppress_driver: # don't build a driver if not requested if driver is None: self.driver = CommonFuncs.get_driver() else: self.driver = driver pass
def run(self): self.isRunning() self.set_error(False) # reset error bot_class_string = self.jobsiteaccount.site_bot_name # create instance of bot Bot_Class = eval(bot_class_string) with CommonFuncs.get_driver(headless=WEB_DRIVERS_HEADLESS, visible=WEB_DRIVERS_VISIBLE) as driver: bot_instance = Bot_Class(driver=driver) if not bot_instance.login(self.jobsiteaccount): # attempt login self.set_error(True) else: # if login successful self.set_error(False) self.isFinished()
def run(self): self.isRunning() self.set_error(False) # reset error Bot_Class = eval(self.site_bot_name) site_name = self.site_bot_name.split('_Bot')[0] spider_name = '_' + site_name.lower() + '_' + 'webcrawler.py' cached_username = '' cached_password = '' logged_in = False # APPLY LOOP bot = CommonFuncs.get_bot(self.site_bot_name) new_links = [''] with CommonFuncs.get_driver(visible=WEB_DRIVERS_VISIBLE, headless=WEB_DRIVERS_HEADLESS) as driver: bot_inst = Bot_Class(driver) while bot.is_running and len(new_links)>0: if cached_username != bot.username or cached_password != bot.password: # if the username or password changed, attempt new login cached_username = bot.username cached_password = bot.password logged_in = bot_inst.login(bot) if logged_in: # if logged in and bot is running, apply to a job with CommonFuncs.get_db() as db: try: new_to_db = False while not new_to_db: unprocessed_job = db.query(UnprocessedJob).filter( UnprocessedJob.bot_type == self.site_bot_name).all() new_link = unprocessed_job[0].job db.delete(unprocessed_job[0]) db.commit() db_results = db.query(Job).filter(Job.link_to_job == new_link).all() if db_results is None or db_results == []: new_to_db = True except: new_link = None pass if not new_link is None: CommonFuncs.log(self, 'attempting to apply to: ' + new_link) new_job = bot_inst.apply(new_link) # goto page and apply if new_job != False and isinstance(new_job, Job): # only add the job to database, if it is an instance with CommonFuncs.get_db() as db: # save job object to db try: db.add(new_job) db.commit() except Exception as e: print(e) else: CommonFuncs.log('applier taking a timeout as it waits for more job links') Jobbybot.run_bot_job_link_webcrawler( spider_name=spider_name ) # start the webcrawler for this bot sleep_count = 5 for i in range(sleep_count): # wait for more results, check to make sure the bot is still running if CommonFuncs.is_bot_running(self.site_bot_name): sleep(1) else: break bot = CommonFuncs.get_bot(self.site_bot_name) sleep(0.1) self.isFinished()