def main(): username = None password = None with open('account.json', 'r') as account_file: raw_text = account_file.read().replace('\r\n', '') username = json.loads(raw_text)['username'] password = json.loads(raw_text)['password'] raw_text = None if username and password: robot = GRobot(display=True, develop=False, log_level=logging.DEBUG, loading_timeout=10, operate_timeout=10, viewport_size=(400, 500)) robot.open('https://www.landofbitcoin.com/login') # robot.open('http://www.baidu.com') robot.wait_for_page_loaded() # print robot.content().encode('utf-8') robot.type('name=username', username) robot.type('name=password', password) robot.click("xpath=/html/body/div[2]/div/div[1]/div/div[2]/form/input[2]", expect_loading=True) robot.wait_for_page_loaded() robot.webview.showMinimized() while True: print robot.content().encode('utf-8') if robot.content().encode('utf-8').find( '<p class="text-center" id="faucet-lucky">Lucky number: <span id="lucky">') == -1: robot.webview.showNormal() robot.click('xpath=//*[@id="faucet"]/div/a', expect_loading=True) robot.wait_for_text('<span id="lucky">', -1) robot.webview.showMinimized() else: gevent.sleep(5)
def main(): robot = GRobot(display=True, log_level=logging.DEBUG, develop=False) # Chinese people love proxy. robot.set_proxy('socks5://127.0.0.1:7070') robot.open('https://twitter.com') # Login robot.key_clicks('id=signin-email', USERNAME) robot.key_clicks('id=signin-password', PASSWORD) robot.click("xpath=//td/button[contains(text(),'Sign in')]", expect_loading=True) # Post a twitter robot.key_clicks( "id=tweet-box-mini-home-profile", "GRobot is too powerful.https://github.com/DYFeng/GRobot") # Wait for post success while 1: robot.click( "xpath=//div[@class='module mini-profile']//button[text()='Tweet']" ) try: robot.wait_for_text('Your Tweet was posted') break except: #Something go wrong,refresh page. if 'refresh the page' in robot.content(): robot.reload() # Wait forever. robot.wait_forever()
def main(): robot = GRobot(display=True, log_level=logging.DEBUG, develop=False) # Chinese people love proxy. robot.set_proxy('socks5://127.0.0.1:7070') robot.open('https://twitter.com') # Login robot.key_clicks('id=signin-email', USERNAME) robot.key_clicks('id=signin-password', PASSWORD) robot.click("xpath=//td/button[contains(text(),'Sign in')]", expect_loading=True) # Post a twitter robot.key_clicks("id=tweet-box-mini-home-profile", "GRobot is too powerful.https://github.com/DYFeng/GRobot") # Wait for post success while 1: robot.click("xpath=//div[@class='module mini-profile']//button[text()='Tweet']") try: robot.wait_for_text('Your Tweet was posted') break except: #Something go wrong,refresh page. if 'refresh the page' in robot.content(): robot.reload() # Wait forever. robot.wait_forever()
def main(): # Show the browser window.Open the webkit inspector. robot = GRobot(display=True, develop=False, log_level=logging.DEBUG, loading_timeout=10, operate_timeout=10) # In China,people can only using proxy to access google. robot.set_proxy('socks5://127.0.0.1:7070') #Open google robot.open('http://www.google.com/') #Type out project and search. robot.type('name=q', 'GRobot github') robot.click('name=btnK', expect_loading=True) for i in xrange(1, 10): # Waiting for the ajax page loading. robot.wait_for_xpath("//tr/td[@class='cur' and text()='%s']" % i) if u'https://github.com/DYFeng/GRobot' in robot.content(): print 'The porject in page', i break # Click the Next link.We don't use expect_loading.Because it's ajax loading,not page loading. robot.click("xpath=//span[text()='Next']") else: print "Can not found.Make a promotion for it." # Wait forever. robot.wait_forever()
def main(): # Show the browser window.Open the webkit inspector. robot = GRobot(display=True, develop=False, log_level=logging.DEBUG, loading_timeout=10, operate_timeout=10) # In China,people can only using proxy to access google. robot.set_proxy("socks5://127.0.0.1:7070") # Open google robot.open("http://www.google.com/") # Type out project and search. robot.type("name=q", "GRobot github") robot.click("name=btnK", expect_loading=True) for i in xrange(1, 10): # Waiting for the ajax page loading. robot.wait_for_xpath("//tr/td[@class='cur' and text()='%s']" % i) if u"https://github.com/DYFeng/GRobot" in robot.content(): print "The porject in page", i break # Click the Next link.We don't use expect_loading.Because it's ajax loading,not page loading. robot.click("xpath=//span[text()='Next']") else: print "Can not found.Make a promotion for it." # Wait forever. robot.wait_forever()