def __init__(self): self.account = '' self.driver = Driver() self.streamer = Streamer() self.stream_table = 'stream' self.accounts_table = 'accounts' self.limit = self.driver.limit
def __init__(self, bl_number): self.driver = Driver() dummy_url = '/error404' self.bl_number = bl_number # BL format : 6194326880 self.driver.get_page(COSCOURL + dummy_url) self.driver.add_cookies() self.driver.get_page(COSCOURL + '/cargoTracking') # Locates search box and inputs BL number search_element = self.driver.find_xpath_element( '//*[@id="wrap"]/input') search_element.send_keys(bl_number) # Locates search button and clicks search_xpath = '/html/body/div[1]/div[4]/div[1]/div/div[1]/div/div[2]/form/div/div[2]/button' search_button = self.driver.find_xpath_element(search_xpath) search_button.click() # Locates table containing majority of information main_info_table = self.driver.find_class_element('ivu-c-detailPart') main_info_text = main_info_table.text raw_info = main_info_text.replace(' ', '') self.main_info = raw_info.split('\n') # Locates table containing secondary information secondary_xpath = '/html/body/div[1]/div[4]/div[1]/div/div[2]/div/div/div[2]/div[1]/div[2]/div' secondary_table = self.driver.find_xpath_element(secondary_xpath) secondary_info_text = secondary_table.text self.secondary_info = secondary_info_text.split('\n')
def pull(variable): driver = Driver() driver.connect(mode='heroku') query = 'select row_to_json({0}) from {0}'.format(variable) result = driver.pull(query) result = [x for t in result for x in t] output_dct = {'content': result} return jsonify(output_dct)
def update(self): self.reddit = self.auth() driver = Driver() subs = driver.pull('select * from {0}'.format(self.subreddits_table)) self.subreddits_names = [x for t in subs for x in t] self.subreddits_names = [ x for x in self.subreddits_names if isinstance(x, str) ]
def overwrite(variable): driver = Driver() driver.connect(mode='heroku') input_json = request.get_json(force=True) input_json['table'] = variable restart_query = render_template('restart.sql.jinja2', **input_json) insert_query = render_template('jsondump.sql.jinja2', **input_json) driver.push(restart_query) driver.push(insert_query)
class TestShop: def setup_class(self): self.driver = Driver().get_driver() self.login = Work() def setup(self): time.sleep(2) self.driver.get("http://localhost/Admin/Admin/login") # @staticmethod # def teardown_class(): # Driver().quit_web() @pytest.mark.parametrize(("username", "password", "verify"), [("admin", "123456", "8888")]) def test_login(self, username, password, verify): logging.info("start->proxy--->{}{}{}".format(username, password, verify)) self.login.admin_login(username, password, verify)
def __init__(self, bl_number): # Instantiates Driver class and opens url in headless browser self.driver = Driver() self.driver.get_page(MSCURL) self.driver.add_cookies({ 'name': 'newsletter-signup-cookie', 'value': 'temp-hidden' }) self.bl_number = bl_number # Locates search box and inputs BL number element = self.driver.find_id_element( "ctl00_ctl00_plcMain_plcMain_TrackSearch_txtBolSearch_TextField") element.send_keys('MEDUQ' + self.bl_number) # BL Format: 1312550 # Locates search button and clicks search_button_xpath = '//*[@id="ctl00_ctl00_plcMain_plcMain_TrackSearch_pnlTrackSearchForm"]/div/div[2]' search = self.driver.find_xpath_element(search_button_xpath) search.click()
class Streamer(object): def __init__(self,*args,**kwargs): self.reddit = '' self.account = '' self.driver = Driver() self.bot = ArchiveBot() self.error = "There was an error processing your request." def auth(self): return praw.Reddit(client_id=self.account['CLIENT_ID'], client_secret=self.account['CLIENT_SECRET'], password=self.account['PASSWORD'], user_agent=self.account['USER_AGENT'], username=self.account['USERNAME']) def connect(self,mode='local'): self.driver.connect(mode='local') query = 'SELECT * FROM accounts' account = self.driver.pull(query) account = account[0] self.account = { 'id' : account[0], 'CLIENT_ID' : account[1], 'CLIENT_SECRET' : account[2], 'PASSWORD' : account[3], 'USER_AGENT' : account[4], 'USERNAME' : account[5] } self.reddit = self.auth() def compile(self,**kwargs): self.results = [] mentions = self.reddit.inbox.unread(**kwargs) self.results.extend(mentions) self.results.sort(key=lambda post: post.created_utc, reverse=True) return self.results def get_body(self,post): try: post_type = post.parent().__class__.__name__.lower() except: return self.error if post_type == 'submission': url = post.parent().url title = post.parent().title if 'reddit.com' in url: try: selftext = post.parent().selftext body = " ".join([title, selftext]) except: return self.error else: return " ".join([title, url]) elif post_type == 'comment' or post_type == 'message': return post.parent().body else: return self.error def get_url(self, body, length): def __iter__(self): username = self.account['USERNAME'] stream = praw.models.util.stream_generator(lambda **kwargs: self.compile(**kwargs)) for idx,post in enumerate(stream): self.reddit.inbox.mark_read([post]) body = '' if post.parent().author == username: chain = [] while True: try: parent = post.parent() chain.append(parent) except: break chain_authors = [ x.author for x in chain ]) for idx,item in chain_authors[::-1]: if item == username: original_post = chain[idx-1] body = self.get_body(original_post) break else: body = self.get_body(post.parent()) chain_length = len([x for x in chain if x == username ]) post.reply(self.get_url(body, chain_length)) if __name__ == '__main__': stream = Streamer() stream.connect() for post in stream: print(post)
def __init__(self): self.driver = Driver.get_driver_mis()
def __init__(self): self.driver = Driver.get_driver_app()
def tearDownClass(cls): Driver.quit_driver_app()
def setup_class(self): self.driver = Driver.get_driver_mp() self.proxy_login = LoginProxy()
def test_begin(self): Driver.change_mp_key(False)
def setup_class(self): self.driver = Driver.get_driver_mis() self.login = MisProxyPage()
def __init__(self): self.driver = Driver.get_driver_mis() self.mis_audit = MisAuditPage()
def setup_class(self): self.driver = Driver().get_driver() self.login = Work()
def __init__(self,*args,**kwargs): self.reddit = '' self.account = '' self.driver = Driver() self.bot = ArchiveBot() self.error = "There was an error processing your request."
class Manager(object): def __init__(self): self.account = '' self.driver = Driver() self.streamer = Streamer() self.stream_table = 'stream' self.accounts_table = 'accounts' self.limit = self.driver.limit def auth(self, idx=1): query = 'SELECT * FROM accounts WHERE id = {0};' count_query = 'SELECT count(*) FROM {0};' query = query.format(idx) count_query = count_query.format(self.accounts_table) count = self.driver.pull(count_query)[0][0] if (idx > count): self.auth(1) return try: account = self.driver.pull(query)[0] self.account = { 'id': account[0], 'CLIENT_ID': account[1], 'CLIENT_SECRET': account[2], 'PASSWORD': account[3], 'USER_AGENT': account[4], 'USERNAME': account[5] } except: self.auth(idx + 1) return def build(self): self.streamer.account = self.account self.streamer() def get_type(self, post): types = ['comment', 'submission'] post_type = str(type(post)).lower() for t in types: if t in post_type: return t def run(self): self.driver.check(self.stream_table) select_query = 'select * from {0} where reddit_id = %s' insert_query = 'insert into {0} (reddit_id,class) values (%s,%s)' idx = 0 try: for post in self.streamer: idx += 1 post_type = self.get_type(post) copies = self.driver.pull_var( select_query.format(self.stream_table), (post.id, )) copies = [x for t in copies for x in t] if not any(copies): self.driver.push_var( insert_query.format(self.stream_table), (post.id, post_type)) if idx >= self.limit: self.driver.check(self.stream_table) idx = 1 except: self.auth() self.build() self.run() def __call__(self): self.auth() self.build() self.run()
def push(): driver = Driver() driver.connect(mode='heroku') input_json = request.get_json(force=True) query = render_template('jsondump.sql.jinja2', **input_json) driver.push(query)
def test_mp_end(self): Driver.change_mp_key(True) Driver.quit_driver_mp()
def setup_class(self): self.driver = Driver.get_driver_mis() self.home_proxy = MisProxyHome() self.audit_proxy = MisProxyPage()
try: for post in self.streamer: idx += 1 post_type = self.get_type(post) copies = self.driver.pull_var( select_query.format(self.stream_table), (post.id, )) copies = [x for t in copies for x in t] if not any(copies): self.driver.push_var( insert_query.format(self.stream_table), (post.id, post_type)) if idx >= self.limit: self.driver.check(self.stream_table) idx = 1 except: self.auth() self.build() self.run() def __call__(self): self.auth() self.build() self.run() if __name__ == '__main__': driver = Driver() driver.initialize() man = Manager() man()
class MSC: def __init__(self, bl_number): # Instantiates Driver class and opens url in headless browser self.driver = Driver() self.driver.get_page(MSCURL) self.driver.add_cookies({ 'name': 'newsletter-signup-cookie', 'value': 'temp-hidden' }) self.bl_number = bl_number # Locates search box and inputs BL number element = self.driver.find_id_element( "ctl00_ctl00_plcMain_plcMain_TrackSearch_txtBolSearch_TextField") element.send_keys('MEDUQ' + self.bl_number) # BL Format: 1312550 # Locates search button and clicks search_button_xpath = '//*[@id="ctl00_ctl00_plcMain_plcMain_TrackSearch_pnlTrackSearchForm"]/div/div[2]' search = self.driver.find_xpath_element(search_button_xpath) search.click() def get_loading_port(self): pol_xpath = '//*[@id="ctl00_ctl00_plcMain_plcMain_rptBOL_ctl00_pnlBOLContent"]/table/tbody[1]/tr/td[3]' load_port_element = self.driver.find_xpath_element(pol_xpath) loading_port = load_port_element.text.split(',') return loading_port[0] def get_departure_date(self): etd_xpath = '//*[@id="ctl00_ctl00_plcMain_plcMain_rptBOL_ctl00_pnlBOLContent"]/table/tbody[1]/tr/td[1]/span' etd_element = self.driver.find_xpath_element(etd_xpath) departure_date = etd_element.text.split('/') return departure_date[1] + departure_date[0] def get_discharge_port(self): pod_xpath = '//*[@id="ctl00_ctl00_plcMain_plcMain_rptBOL_ctl00_rptContainers_ctl01_pnlContainer"]' \ '/table[2]/tbody/tr[1]/td[1]/span' pod_element = self.driver.find_xpath_element(pod_xpath) discharge_port = pod_element.text.split(',')[:2] return ','.join(discharge_port) def get_arrival_date(self): global arrival_date, arrival_info eta_xpath = '//*[@id="ctl00_ctl00_plcMain_plcMain_rptBOL_ctl00_rptContainers_ctl01_pnlContainer"]/table[2]/tbody/tr' eta_elements = self.driver.find_xpath_elements(eta_xpath) for element in eta_elements: if 'Discharged' or 'Arrival' in element.text: arrival_info = element.text.split(' ') break for item in arrival_info: if '/' in item: arrival_date = item.split('/') return arrival_date[1] + arrival_date[0] def get_containers(self): # Locates containers (e.g. ABCD1234567) elements on page container_list = [] container_elements = self.driver.find_xpath_elements( '//*[@class="containerToggle"]') time.sleep(1) i = 1 for element in container_elements: element.click() raw_container = element.text.split(' ') container_number = raw_container[1] container_size_element = self.driver.find_xpath_element( '//*[@id="ctl00_ctl00_plcMain_plcMain_rptBOL_ctl00_' 'rptContainers_ctl0{}_pnlContainer"]/table[1]/tbody[1]' '/tr/td[1]/span'.format(i)) container_size = container_size_element.text[:2] container_list.append((container_number, container_size)) i += 1 cookie_popup = self.driver.find_xpath_element( '//*[@id="cookiePolicyModal"]/div/div/a') cookie_popup.click() container_elements[0].click() return container_list def get_all_info(self): containers = self.get_containers() load_port = self.get_loading_port() etd = self.get_departure_date() discharge_port = self.get_discharge_port() eta = self.get_arrival_date() return { 'MBL': self.bl_number, 'Info': { 'POL': load_port, 'ETD': etd, 'POD': discharge_port, 'ETA': eta, 'Containers': containers } }
def teardown_class(self): Driver.quit_driver_mis()
import time, random from datetime import datetime from config import Config from utils import Utils, Driver from checks.all_imports import * Utils.self_check() Utils.log_restart() lands = Utils.get_lands() if not Utils.cookies_exist(): driver = Driver(images=True, headless=False) driver.cookie_saver() driver.stop() for land in lands: runner = Driver() runner.land = land[0] runner.track_url = land[1] if runner.track_url == '0': runner.link = '%s?fbpixel=%s' % (runner.land, runner.FBPIXEL) else: runner.link = '%s?track_url=%s&fbpixel=%s' % ( runner.land, runner.track_url, runner.FBPIXEL) if 'leadrocktest.com' in runner.link: runner.link = runner.link.replace( '://', '://%s:%s@' % (runner.SERVER_LOGIN, runner.SERVER_PASS))
def find_channel(self, channel_name): return Driver.get_driver_app().find_element( self.channel[0], self.channel[1].format(channel_name))
def __init__(self): self.driver = Driver.get_driver_mp() self.pub_page = PubPage()
def __init__(self): self.index_elem = AppIndexPage() self.driver = Driver.get_driver_app()
def click_login_btn(self): js_str = "document.getElementById('inp1').removeAttribute('disabled')" Driver.get_driver_mis().execute_script(js_str) self.mis_login.find_login_btn().click()
def setUpClass(cls): cls.driver = Driver.get_driver_app() cls.index_proxy = AppIndexProxy()