def get_espn_ids(url=None): browser = Browser('chrome', headless=True) browser.visit(url) count = 1 kvs = {} for section in range(1, 7): for column in range(1, 3): if column is 2 and section is 6: continue for row in range(1, 13): xpath = '//*[@id="fittPageContainer"]/div[3]/div[1]/div/div[1]/div[2]/div[{}]/div[{}]/section/section[{}]/div/section/div/a'.format( column, section, row) try: result = browser.find_by_xpath(xpath)['href'] name = browser.find_by_xpath(xpath).text team_id = result.split('/')[-2] kvs[name] = team_id print count, team_id, name count = count + 1 except exceptions.ElementDoesNotExist: continue return kvs
def reguster(): browser = Browser() inn.generate() browser.visit('http://hh.ru/auth/employer') browser.fill('companyName', inn.generate_work_place(1982)['company']) browser.fill('firstName', inn.FirstName) browser.fill('lastName', inn.LastName) browser.fill('email', inn.MailAddress + '@arterp.ru') browser.fill('phoneCode', inn.phone_code) browser.fill('phoneNumber', inn.phone) time.sleep(3) browser.find_by_xpath( '//*[starts-with(@class,"HH-Form-SubmitButton")]').first.click() time.sleep(5) browser.find_by_xpath( '//*[starts-with(@class,"g-button m-button_mid")]').first.click() #count=count+1 #print count browser.quit()
def run(general, session, did): """ Open the browser, insert access code (if given), insert name, join session and start recording. """ url = session['url'] browser = Browser() browser.driver.maximize_window() browser.visit(session['url']) if "accessCode" in session: # Insert access code browser.fill('room[access_code]', session['accessCode']) browser.find_by_xpath( '/html/body/div[2]/div[2]/div/div[2]/div[2]/form/div/input[2]' ).click() idx = url.find('/gl/') # BBB Greenlight fname = url[idx:len(url)] + "[join_name]" browser.fill(fname, general['name']) browser.find_by_id('room-join').click() # Join BBB session now = datetime.now() ext = session['prefix'] + '-' + now.strftime("%d%m%Y-%H%M") out = record(general, session['duration'], did, ext) browser.quit() return out
def qd(): ## browser = Browser('firefox' ## , profile='D:\UsersDocuments\FFmissions') browser = Browser('chrome') browser.visit(juchaozhi) time.sleep(1) browser.find_by_name('username').fill(username) browser.find_by_name('password').fill(password) browser.find_by_name('button').click() time.sleep(1) if browser.is_text_present(username, wait_time=1): print ('login success!') logging.info('login success!') browser.visit('http://best.pconline.com.cn/qiandao.html') time.sleep(1) ## 添加今天是否有签到的判断,主要判断口id="JBar",或者判断CSS中的元素,如.signStatus,.isgnUps if browser.find_by_css('.signStatus').visible: print ('you had signed!') logging.info('you had signed!') browser.quit() else: browser.find_by_xpath('//*[@id="JSignBox"]/a').click() time.sleep(1) print ('mission success!') logging.info('mission success!') browser.quit() else: print ('login fail!!!') logging.info('login fail!!!')
def member_new(emails,link,password): browser=Browser('chrome') for n in emails: email=n.rstrip()+"@sgsaas.net" browser.visit(link); browser.fill('age',"33"); browser.fill('firstname',"Sam"); browser.fill('lastname',"wang"); browser.find_by_name('email').last.fill(email); browser.fill('telephone',3984020); browser.fill('address_1',"Bukit Batok Avenue") browser.fill('city',"Singapore"); browser.fill('postcode',658080); browser.find_by_xpath('//select[@id="input-country"]//option[@value="188"]').click(); browser.find_by_xpath('//select[@id="input-zone"]//option[@value="4231"]').click(); browser.find_by_name('password').last.fill(password); browser.fill('confirm',password); browser.find_by_name('agree').click(); browser.find_by_css('.btn-primary').click(); sleep(1) print email+" has registered!" browser.quit() return;
def job_func(username, password): browser = Browser("chrome") browser.visit('https://web-vpn.sues.edu.cn/') browser.find_by_id('username').fill(username) browser.find_by_id('password').fill(password) browser.find_by_id('passbutton').click() print(datetime.datetime.now()) print("登陆成功") time.sleep(10) # 登陆填报界面 browser.find_by_text('健康信息填报').click() time.sleep(10) browser.windows.current = browser.windows[1] time.sleep(10) tep = random.uniform(36.1, 36.7) tep = format(tep, '.1f') # 随机一些温度 browser.find_by_xpath( '//*[@id="form"]/div[18]/div[1]/div/div[2]/div/div/input').fill(tep) # 找到温度填写的框 time.sleep(5) browser.find_by_id('post').click() # 找到按钮 time.sleep(5) result = browser.find_by_xpath("//*[@id=\"layui-layer1\"]/div[2]").text browser.quit() return result
def scrape_featured_img (): executable_path = {'executable_path': '/usr/local/bin/chromedriver'} browser = Browser('chrome', **executable_path, headless=True) url = 'https://www.jpl.nasa.gov/spaceimages' browser.visit(url) browser.find_by_xpath('//*[@id="full_image"]').click() time.sleep(5) html = browser.html soup = BeautifulSoup(html, 'html.parser') img_path = soup.find('img', class_='fancybox-image') featured_image_url = 'https://www.jpl.nasa.gov' + img_path.get('src') featured_image_url # feature_img = { # "featured_image_url" : featured_image_url # } return featured_image_url
def main(aids): livelogin = '******' sboxlogin = '******' livesearch = 'https://admin.ebizautos.com/index.cfm?section=accounts&page=manage&settings=overview&aid=' sboxsearch = 'https://admin.sandbox.ebizautos.com/index.cfm?section=accounts&page=manage&settings=overview&aid=' livecp = 'https://cp.ebizautos.com/index.cfm?ebxid=314' sboxcp = 'https://cp.sandbox.bizautos.com/index.cfm?ebxid=314' login = False while login == False: username = input('Username: '******'Password: '******'username', username) browser.fill('Password', password) try: browser.find_by_xpath( '/html/body/div[1]/div/div[1]/form/table/tbody/tr[6]/td/div[1]/a' ).click() time.sleep(2) for aid in aids: browser.visit(livesearch + str(aid)) browser.find_by_css( 'html body#MainAdminBody div#BodyContainerWide div#MainBody.ClearFix div#LeftNav div.Content div div.Nav3 a' ).click() browser.driver.find_element_by_tag_name('body').send_keys( Keys.CONTROL, '1') login = True except: print('Login error\a') browser.quit()
def main(argv): try: opts, args = getopt.getopt(sys.argv[1:], "f:") except getopt.GetoptError as err: sys.exit(2) global test_config config_file_name = None for o, a in opts: if o == "-f": config_file_name = a else: assert False, "unhandled option" if config_file_name is None: usage(argv) sys.exit(1) config_file = open(config_file_name, "r") test_config = json.load(config_file) browser = Browser() browser.visit(test_config['url']) e = browser.find_by_xpath("//div[@id='treeNode_account']/div/span") e.click() e = browser.find_by_xpath("//div[@id='treeNode_account.AdminAccount']/div/span") e.click() e = browser.find_by_xpath("//div[@id='treeNode_account.AdminAccount.ChangePass']/div/span[3]/span[2]") time.sleep(1) e.click() browser.find_by_id('id_new_password').fill(test_config['password']) browser.find_by_id('id_new_password2').fill(test_config['password']) browser.find_by_id('btn_PasswordChangeForm_Ok_label').click()
def check(url, interval, number=False): try: browser = Browser('chrome') browser.driver.set_window_size(1000, 800) browser.visit(url) Original = len(browser.find_by_xpath('/html').text.encode('utf-8')) browser.quit() while True: print('To exit program ctrl+c') browser = Browser('chrome') browser.driver.set_window_size(1000, 800) browser.visit(url) New = len(browser.find_by_xpath('/html').text.encode('utf-8')) if New != False: print("PAGE CHANGED from %d bytes to %d bytes!" % (Original, New)) Original = New if number != None: Text(New) else: print("No change detected") time.sleep(float(interval)) browser.quit() except KeyboardInterrupt: print('Thank you for running this program exiting now.') sys.exit()
def check(url, interval): try: browser = Browser('chrome') browser.driver.set_window_size(1000, 800) browser.visit(url) Original = len(browser.find_by_xpath('/html').text.encode('utf-8')) browser.quit() while True: print('To exit program ctrl+c') browser = Browser('chrome') browser.driver.set_window_size(1000, 800) browser.visit(url) New = len(browser.find_by_xpath('/html').text.encode('utf-8')) if New != Original: changeMessage = ("PAGE CHANGED from %d bytes to %d bytes!" % (Original, New)) print changeMessage Original = New message = client.messages.create(to="+15037043189", from_="+19718034237", body=changeMessage) else: print("No change detected") message = client.messages.create(to="+15037043189", from_="+19718034237", body="No change detected") time.sleep(float(interval)) browser.quit() except KeyboardInterrupt: print('Thank you for running this program exiting now.') sys.exit()
def update_driver(): # Create a temporary browser using Firefox tb = Browser() # Note that this does not make use of splinter since it can't base_url = 'https://chromedriver.storage.googleapis.com/index.html' # Navigate to the url where the chromedrivers are tb.visit(base_url) # Grab all the links here links = tb.find_by_xpath('//tr/td/a') # Find the folder that goes after the last relevant one current_link = '' for i in range(len(links)): try: if links[i].html == 'icons': current_link = links[i - 1].outer_html break except: pass # Parse the url out of this current_link = current_link.split('"')[1] tb.visit(base_url + current_link) # Click the driver to download it tb.find_by_xpath('//a[contains(text(), "win32")]')[0].click() # Close the browser, we're done with it tb.quit() # Import the libraries necessary for the following code import os, glob, zipfile, shutil # Save the current directory to easily get back to it od = os.getcwd() # Navigate to the Downloads folder os.chdir(Info.DOWNLOADS_DIR) # Find all of the relevant zip files located in this folder zips = glob.glob('chromedriver*.zip') # Save the first found as a default latest file and time created latest = zips[0] last_date = os.path.getctime(latest) # Loop through the zips and find the actual latest files for i in range(1, len(zips)): if os.path.getctime(zips[i]) > last_date: last_date = os.path.getctime(zips[i]) latest = zips[i] # Unzip the latest downloaded driver file with zipfile.ZipFile(latest) as z: z.extractall(latest.split('.')[0]) # Navigate into this folder os.chdir(latest.split('.')[0]) # Copy this into each of the relevant folder as necessary for d in Info.PYTHON_DIRS: if os.path.getctime(d + '//chromedriver.exe') < last_date: try: shutil.copy('chromedriver.exe', d + '//chromedriver.exe') except: print( 'Copy failed due to admin, hopefully copying elsewhere works better...' ) os.chdir(od) # -------------------- END UPDATE METHODS --------------------
class Submitter: def __init__(self, url, username, password, course_id, homework_id, submit_list): self._callback = None self._browser = Browser() self._url = url self._username = username self._password = password self._course_id = course_id self._homework_id = homework_id self._submit_list = submit_list def _login(self): self._browser.visit(self._url) self._browser.fill("i_user", self._username) self._browser.fill("i_pass", self._password) self._browser.find_by_id("loginButtonId").click() def _nvi2course(self): self._browser.find_link_by_partial_text(self._course_id).first.click() self._browser.windows.current.close() def _nvi2homework(self): self._browser.find_link_by_partial_text("课程作业").first.click() self._browser.find_link_by_partial_text( self._homework_id).first.click() def _submit(self, stu_id, grade, comment, ex_file): xpath_str = '//tbody/tr[td[3]=' + stu_id + ']/td[last()]/a' self._browser.find_by_xpath(xpath_str).last.click() self._browser.fill('cj', grade) self._browser.fill('pynr', comment) if os.path.splitext(ex_file)[1] == '.pdf': self._browser.driver.find_element_by_name('fileupload').send_keys( ex_file) submit_btn_css = 'div[class="sub-back sub-back-3 absolute"] > input[class="btn"]' self._browser.find_by_css(submit_btn_css).first.click() while not self._browser.is_text_present('关闭', wait_time=1): pass self._browser.find_by_text('关闭').click() self._browser.back() self._browser.back() def add_single_task_callback(self, callback): self._callback = callback def start(self): self._login() self._nvi2course() self._nvi2homework() for stu_id, grade, comment, ex_file in self._submit_list: self._submit(stu_id, grade, comment, ex_file) self._callback([stu_id, grade, comment, ex_file]) self._browser.quit() @staticmethod def clean(): work_dir = os.getcwd() os.remove(work_dir + "/geckodriver.log")
class UserTest(StaticLiveServerTestCase): def setUp(self): check_permissions() self.username = "******" create_user(self.username) self.browser = Browser() self.browser.visit(self.live_server_url) def test_signup(self): signup_url = settings.SIGNUP_URL self.browser.click_link_by_partial_href(signup_url) username = "******" password = "******" email = "*****@*****.**" signup(self.browser, username, password, email) user_exists = exists_user(username) self.assertTrue(user_exists) user = get_user(username) self.assertEquals(user.username, username) # self.assertEquals(user.password, password) self.assertEquals(user.email, email) document_list_url = self.live_server_url + reverse("documents.views.list_documents") self.assertEquals(self.browser.url, document_list_url) profile_xpath = "/html/body/div/div[1]/div/ul[2]/li[4]/a" profile_link = self.browser.find_by_xpath(profile_xpath) self.assertEquals(profile_link.value, "@{}".format(username)) # import time; time.sleep(3) self.browser.quit() def test_signin(self): login_url = settings.LOGIN_URL self.browser.click_link_by_partial_href(login_url) username = self.username password = self.username login(self.browser, username, password) document_list_url = self.live_server_url + reverse("documents.views.list_documents") self.assertEquals(self.browser.url, document_list_url) profile_xpath = "/html/body/div/div[1]/div/ul[2]/li[4]/a" profile_link = self.browser.find_by_xpath(profile_xpath) self.assertEquals(profile_link.value, "@{}".format(username)) # import time; time.sleep(3) self.browser.quit()
def get_buyback_price(isbn): browser = Browser('phantomjs') url = "http://onlinebuyback.mbsbooks.com/index.php?jde=5920" browser.visit(url) search = browser.find_by_xpath('//form[@id="frmSearch"]/textarea[@name="FVISBN"]').first.fill(isbn) submit = browser.find_by_xpath('//form[@id="frmSearch"]/div[@class="btn"]/input').click() try: buyback_price = browser.find_by_xpath('//span[@class="priceback"]/b').value except: buyback_price = None return buyback_price
def get_ip(page, page_number): browser = Browser(driver_name='chrome') for s in range(1, page): try: url = 'https://www.xicidaili.com/nn/%s' % s browser.visit(url) for i in range(2, page_number): ip = browser.find_by_xpath( '/html/body/div[1]/div[2]/table/tbody/tr[%s]/td[2]' % i) port = browser.find_by_xpath( '/html/body/div[1]/div[2]/table/tbody/tr[%s]/td[3]' % i) print(ip.value + ":" + port.value) except: pass
def meet_date(url): executable_path = { 'executable_path': '/Users/andrewlindstrom/Documents/chromedriver' } browser = Browser('chrome', headless=True, **executable_path) #path below is xml path to find date of meet (as a string) date_path = '/html/body/div/div/div/div/div/div/div/ul/li' browser.visit(url) date_browser = browser.find_by_xpath(date_path) if len(browser.find_by_xpath(date_path)) != 0: date = date_browser[0].text.split('\n')[1] browser.quit() return date else: return ''
class parser(object): def __init__(self): self.browser = Browser('phantomjs') def get_text_by_xpath(self, xpath): return self.browser.find_by_xpath(xpath).first.text def get_html_by_xpath(self, xpath): return self.browser.find_by_xpath(xpath).first.html def download_page(self, url): self.browser.visit(url) return self.browser.html def __del__(self): self.browser.quit
class Webkit(object): "" def __init__(self): self.tag_attr = { '*': 'href', 'frame': 'src', 'iframe': 'src', 'object': 'src' } def get_links(self, url): links = [] self.browser = Browser('phantomjs') self.browser.visit(url) for tag, attr in self.tag_attr.viewitems(): llinks = self.browser.find_by_xpath('//%s[@%s]'% (tag, attr)) if not llinks: continue for link in llinks: link = link.__getitem__(attr) if not link: continue if link == 'about:blank' or link.startswith('javascript:'): continue if not link.startswith('http:'): link = urlparse.urljoin(url, link) links.append(link) return links def close(self): self.browser.quit()
def firstSearches(search): hour = time.strftime("%H:%M:%S") date = time.strftime("%d/%m/%y") date = date.replace('/', '-') browser = Browser('chrome') browser.driver.set_window_size(640, 480) keyWords = search.split() query = '' for i in range(len(keyWords)): if i != len(keyWords) - 1: query = query + keyWords[i] + '+' else: query = query + keyWords[i] url = 'https://www.google.es/search?q=' + query browser.visit(url) search_results_xpath = '//h3[@class="r"]/a' search_results = browser.find_by_xpath(search_results_xpath) scraped_data = [] for search_result in search_results: title = search_result.text.encode('utf8') link = search_result["href"] scraped_data.append((title, link)) df = pd.DataFrame(data=scraped_data, columns=["Title", "Link"]) df.to_csv('/home/enrique/Documentos/Busqueda automatica/links_' + date + '_' + hour + '.csv')
def extract_from_page(self, url, filename_main) -> bool: ''' By finding the html file inside the article, then write to a txt file. ''' mybrowser = Browser('chrome') mybrowser.visit(url) # + '?printable=1') article = mybrowser.find_by_xpath( '//*[@id="app"]/div/div/div[2]/div[3]/div[1]') if article == []: mybrowser.quit() return False print('Acquire page and prepare to write to file ' + str(self.pages)) print('\tthrough url: ' + url) # setup the html2text object h2t = HTML2Text() h2t.ignore_links = True h2t.bypass_tables = False with open(filename_main + str(self.pages) + '.txt', 'w', encoding='utf-8') as f: f.write(h2t.handle(article.value)) self.pages += 1 mybrowser.quit() print('Done crawling from one page...') time.sleep(2) return True
class WebKit(object): '''WebKit引擎''' def __init__(self): self.tag_attr_dict={'*':'href', 'embed':'src', 'frame':'src', 'object':'data'} def extract_links(self,url): '''提取页面中链接''' self.browser=Browser("phantomjs") try: self.browser.visit(url) except Exception as e: return for tag,attr in self.tag_attr_dict.items(): link_list=self.browser.find_by_xpath('//%s[@%s]' % (tag,attr)) if not link_list: continue for link in link_list: link=link.__getitem__(attr) if not link: continue link=link.strip() if link=='about:blank' or link.startwith('javascript:'): continue if not link.startwith('http'): link=parse.urljoin(url,link) yield link
def main(): # https://splinter.readthedocs.io/en/latest/drivers/chrome.html city_name = input("请输入城市名字:") month = input("请输入年月(格式为2014-01):") browser = Browser('chrome') browser.visit('https://www.aqistudy.cn/historydata/daydata.php?city='+ city_name + '&month='+ month) time.sleep(3) rows = browser.find_by_xpath('//table//tbody//tr') titles = rows[0] title_str = '' for title in titles.find_by_xpath('th'): title_str += title.html + '\t' print(title_str) header = ['日期','AQI','质量等级','PM2.5','PM10','SO2','CO','NO2','O3_8h'] with open('{}_{}_aqi.csv'.format(city_name,month), 'w', encoding='GB2312', newline='') as f: writer = csv.writer(f) writer.writerow(header) for i in range( 1, len(rows) ): columns = rows[i].find_by_xpath('td') date = columns[0].html aqi = columns[1].html zhiliang = columns[2].find_by_tag('span')[0].html pm25 = columns[3].html pm10 = columns[4].html so2 = columns[5].html co = columns[6].html no2 = columns[7].html o3 = columns[8].html row = ([date] + [aqi] + [zhiliang] + [pm25] + [pm10] + [so2] + [co] + [no2] + [o3]) print(row) writer.writerow(row)
def write_urls(season='2017-2018', file_out='urls'): executable_path = { 'executable_path': '/Users/andrewlindstrom/Documents/chromedriver' } browser = Browser('chrome', headless=True, **executable_path) start = time.time() file_out = file_out + ' ' + season if file_out[-4:] != '.txt': file_out = file_out + '.txt' link_path = '/html/body/div/div/div/section/div/div/div/table/tbody/tr/td/a' page_url = 'https://www.collegeswimming.com/results/?season=%s&page=%s' last_page = max_pages(season) with open(file_out, 'w') as out: for page in range(1, last_page + 1): browser.visit(page_url % (season, str(page))) links = browser.find_by_xpath(link_path) for element in range(len(links)): out.write(links[element].text + ' ID=') id_split = links[element]['href'].split('/')[ -1] #choose just the last part(id of meet) out.write(id_split + '\n') print('Page #' + str(page) + ' written to ' + file_out) end = time.time() browser.quit() return str('Finished in ' + str(int(end - start)) + 's')
class Webkit(object): "" def __init__(self): self.tag_attr = { '*': 'href', 'frame': 'src', 'iframe': 'src', 'object': 'src' } def get_links(self, url): links = [] self.browser = Browser('phantomjs') self.browser.visit(url) for tag, attr in self.tag_attr.viewitems(): llinks = self.browser.find_by_xpath('//%s[@%s]' % (tag, attr)) if not llinks: continue for link in llinks: link = link.__getitem__(attr) if not link: continue if link == 'about:blank' or link.startswith('javascript:'): continue if not link.startswith('http:'): link = urlparse.urljoin(url, link) links.append(link) return links def close(self): self.browser.quit()
class WsSimulation(): def __init__(self, username, password, area, playername): self.username = username self.password = password self.area = area self.playername = playername def login(self): c = GetLoginInfo(self.username, self.password) c.getServer() u, p = c.getCookie() s = str(self.area) playername = self.playername self.browser = Browser('chrome') # 访问 URL url = "http://mush.fun" self.browser.visit(url) self.browser.cookies.add({'u': u}) self.browser.cookies.add({'p': p}) self.browser.cookies.add({'s': s}) self.browser.reload() errNum = 0 while not self.browser.is_text_present('登陆'): time.sleep(3) errNum = errNum + 1 if errNum > 5: return False try: # 找到并点击搜索按钮 playerlist = self.browser.find_by_css('.role-list') for item in playerlist: if playername in item.text: item.click() btnlist = self.browser.find_by_xpath('//li[@command="SelectRole"]') btnlist.click() self.browser.evaluate_script("WG.SendCmd('tm 云武神启动')") except Exception as e: return False return True def close(self): self.browser.windows[0].close() def exec_js(self, code, codetype='raid'): try: if codetype == 'ws': self.browser.evaluate_script("WG.SendCmd('{}');".format(code)) else: self.browser.evaluate_script( "ToRaid.perform('{}');".format(code)) except Exception as e: print(e) def get_image(self): screenshot_path = self.browser.screenshot( os.getcwd() + '/absolute_path/{}.png'.format(self.playername), full=True) return screenshot_path
class JS_Diagram_Converter(object): def __init__(self): self.browser = None def _init(self): self.browser = Browser(**options_for_browser) self.browser.visit(url) def convert_diagram(self, source): if self.browser is None: self._init() s = self.browser.find_by_xpath('//select')[0] s.select_by_text("Simple") a = self.browser.find_by_text("Download as SVG")[0] # writing to the textarea is a bit clumsy ta = self.browser.find_by_xpath('//textarea')[0] # select all text snippet1 = f"""document.querySelector("textarea").focus()""" snippet2 = f"""document.querySelector("textarea").select()""" self.browser.execute_script(snippet1) self.browser.execute_script(snippet2) # insert some dummy text # ta.fill("S1 -> S2: test") # select again # self.browser.execute_script(snippet) # now insert the actual source (unfortunately this is slow to simulate typing) ta.fill(source) s.select_by_text("Simple") time.sleep(2) a.click() def quit(self): if not self.browser is None: self.browser.quit() self.browser = None
def run(self): """Run the b0t""" browser = Browser(self.browser) browser.visit(self.url) try: while browser.find_by_xpath(self.next_button_xpath): self.process_elements(browser) browser.find_by_xpath(self.next_button_xpath).last.click() except ElementDoesNotExist: pass try: window = browser.windows[0] window.close() except: pass
def meet_name(url): executable_path = { 'executable_path': '/Users/andrewlindstrom/Documents/chromedriver' } browser = Browser('chrome', headless=True, **executable_path) #path below is xml path to find name of the meet name_path = '/html/body/div/div/div/div/div/div/div/div/div' browser.visit(url) if len(browser.find_by_xpath(name_path)) != 0: meet = browser.find_by_xpath(name_path)[0].text browser.quit() #want to return plaintext of meet name, dont need extra \n in there return str(meet.split('\n')[0]) else: browser.quit() #return empty string if no name is found - could be url error return ''
def _login(user_index): print 'Wait 45 sec for logon' time.sleep(47) browser = Browser() login=users_arr[user_index][0] passw=users_arr[user_index][1] print 'Login for user %s' % login browser.visit('https://bitcointalk.org/index.php?action=login') browser.find_by_name('user').last.fill(login) browser.find_by_name('passwrd').last.fill(passw) browser.find_by_name('cookielength').last.fill('600') browser.find_by_xpath('//input[@value="Login"]').last.click() return browser
def test_add_name(self): driver =Browser('firefox') #navigate to webpage driver.visit('http://www.ranorex.com/web-testing-examples/vip/') #enter First name driver.find_by_xpath('//*[@id="'"FirstName"'"]').fill('Prasanth') #Enter Last name driver.find_by_xpath('//*[@id="'"LastName"'"]').fill('Patil') #Selecting the category driver.select_id('Category','Music') driver.choose('Gender','male') driver.find_by_xpath('//*[@id="'"Add"'"]').click() #def test_load(self): driver.find_by_xpath('//*[@id="'"Load"'"]').click() #def test_save(self): driver.find_by_xpath('//*[@id="'"Save"'"]').click()
def render_index(): if request.method == 'POST': searche = request.form['nm'] browser = Browser('chrome') browser.visit("https://amazon.in") search_bar = browser.find_by_id("twotabsearchtextbox") search_btn = browser.find_by_xpath( '//*[@id="nav-search"]/form/div[2]/div/input') search_bar.fill(searche) search_btn.click() element_path = '//h2[@class="a-size-medium s-inline s-access-title a-text-normal"]' elements = browser.find_by_xpath(element_path) title = [] for element in elements: text = element.text title.append(text) return render_template('result.html', res=title)
def open_web(acc,pwd): chrome_options = Options() chrome_options.add_argument('disable-infobars') b = Browser("chrome",options=chrome_options) b.visit("https://cis.ncu.edu.tw/HumanSys/login") b.fill("j_username",acc) b.fill("j_password",pwd) b.find_by_xpath('//button[@type="submit"]').click() #check the acc pwd are match or not #flag is 0 when its the acc,pwd are not checked while b.url == 'https://portal.ncu.edu.tw/login?login_error=t' or b.url == 'https://tarot.cc.ncu.edu.tw/UnixAccount/enableaccount.php': b.quit() os.system(clear) print('[Error] 帳號或密碼輸入錯誤!\n \n>> 請重新輸入\n') acc,pwd = enter() b = open_web(acc,pwd) return b
def does_element_exist(browser: Browser, tag: str, substring: str) -> bool: """ Returns True if an element with the substring exists in the DOM, and is visible """ xpath = f"//{tag}" elements = browser.find_by_xpath(xpath) for element in elements: if substring in element.html: if element.visible: return True return False
def filloutleads(contacturl, appurl): '''Fills out lead forms specified in geturls()''' #TODO: see if we can get addendumDict inside appDict. formDict = dictBuilder(contacturl) appDict = dictBuilder(appurl) addendumDict = { 'ctlTypedSignature': 'test', 'ctlDateSigned': today.strftime('%m/%d/%Y') } browser = Browser() browser.visit(contacturl) browser.fill_form(formDict) browser.find_by_text('Send').click() browser.visit(appurl) try: browser.fill_form(appDict) except: # print('Failed to fill out credit app') pass page = requests.get(appurl) soup = BeautifulSoup(page.text, 'lxml') for menu in soup.findAll('select'): if menu.has_attr('required'): if 'CoApplicant' in str(menu.get('id')): pass elif 'State' in str(menu.get('id')): browser.select(menu.get('name'), 'AL') elif 'Years' in str(menu.get('id')): browser.select(menu.get('name'), '1') elif 'Month' in str(menu.get('id')): browser.select(menu.get('name'), '1') browser.fill_form(addendumDict) browser.find_by_xpath( '''/html/body/section/form/div/fieldset[7]/div[1]/div[4]/div/div/div/label/span''' ).click() browser.find_by_text('Submit Your Application').click() browser.quit()
def buy(fullcode, code, shipan): print dangqianjiage(code) browser = Browser('firefox') browser.visit(cf.get("URL", "url1")) browser.click_link_by_partial_text('登录') browser.find_by_xpath("//input[@class='w-input telephone']").fill( cf.get("Trade", "name")) browser.find_by_xpath("//input[@class='w-input password']").fill( cf.get("Trade", "password")) browser.find_by_xpath("//div[@class='btn-red loginBtn']").click() url = 'https://www.xrcj.com/api/trading-sec/create-stock' values = { 'subjectId': fullcode, 'createMode': '1', 'strategyId': shipan + '0101-00-1000', 'strategyType': shipan, 'marginRate': '10', 'openAsset': '1', 'openPrice': dangqianjiage(code), 'openEntrustCmd': '1', 'stopSwitch': '0', 'marginStopLossRate': '', 'marginStopProfitRate': '', 'sourceDealingNo': '' } print browser.cookies.all()['SESSION'] response = post(url, values, browser.cookies.all()['SESSION']) if (response.code == 200): print '买入' + code + '成功。' time.sleep(3) browser.quit()
def netflix(sentence, args): netflix_config = config.load_config("netflix") if netflix_config: username = netflix_config['username'] password = keyring.get_password("netflix", username) profile_name = netflix_config["profile_name"] if username and password: #TODO: change this to zope.testbrowser once it's working in the frontend chrome_path = config.load_config("chrome_path") executable_path = {'executable_path': chrome_path} browser = Browser("Chrome") browser.visit("https:///netflix.com/Login") email_field = browser.find_by_name("email") password_field = browser.find_by_name("password") sign_in_xpath = '//*[@id="appMountPoint"]/div/div[2]/div/form[1]/button' sign_in_button = browser.find_by_xpath(sign_in_xpath) email_field.fill(username) password_field.fill(password) sign_in_button.click() if browser.is_text_present(profile_name): profile_button = browser.find_by_text(profile_name) profile_button.click() #Use ent code to find out if there's a named work of art that was detected #search_tab_xpath = '//*[@id="hdPinTarget"]/div/div[1]/div/button' #search_tab_xpath = '//*[@id="hdPinTarget"]/div/div[1]/div/button/span[1]' search_tab_xpath = '//*[@id="hdPinTarget"]/div/div[1]/div/button' search_tab = browser.find_by_xpath(search_tab_xpath) search_tab.click() if "netflix" in sentence: if "netflix play "in sentence: show = sentence.split("netflix play ")[1] else: show = sentence.split("netflix ")[1] #search_field = browser.find_by_text("Titles, people, genres")[0] search_field_xpath = '//*[@id="hdPinTarget"]/div/div[1]/div/div/input' search_field = browser.find_by_xpath(search_field_xpath) search_field.fill(show) show_card_xpath = '//*[@id="title-card-undefined-0"]' show_card = browser.find_by_xpath(show_card_xpath) show_card.click() play_icon_xpath = '//*[@id="title-card-undefined-0"]/div[2]/a/div/div' play_icon = browser.find_by_xpath(play_icon_xpath) play_icon.click() play_button_xpath = '//*[@id="70279852"]/div[2]/a/div/div' play_button = browser.find_by_xpath(play_button_xpath) play_button.click() #chromecast_button_xpath = '//*[@id="netflix-player"]/div[4]/section[2]/div[7]/div[2]/button' #chromecast_button = browser.find_by_xpath(chromecast_button_xpath) #chromecast_button.click() return "Done" else: return "Profile {0} could not be found on the netflix page".format(str(profile_name)) else: return "Netflix username and password could not be retrieved from config and keyring" else: return "Netflix config not found"
def splinter(): browser = Browser() url = "http://ehire.51job.com/MainLogin.aspx" browser.visit(url) time.sleep(1) browser.find_by_id('txtMemberNameCN').fill(u'安能聚业') browser.find_by_id('txtUserNameCN').fill(u'上海安能聚创供应链') browser.find_by_id('txtPasswordCN').fill('aneqc888') browser.find_by_id('Login_btnLoginCN').click() time.sleep(1) browser.find_by_tag('a').click() browser.find_by_id('hlResumeSearch').click() # id 85798642 未公开 # 309554553 未下载 # browser.find_by_id('txtUserID').fill('6098724') time.sleep(1) browser.find_by_id('btnSearchID_leftbtnSearchID').click() cvTarget = browser.find_by_xpath('//tr/td/p/span/a[@target="_blank"]') if len(cvTarget) == 0: print "can not find the cv from this id." return cvTarget.click() allwindows = browser.windows driver = browser.driver driver.switch_to_window(allwindows[-1].name) UndownloadLink = browser.find_by_id('UndownloadLink') if len(UndownloadLink) == 0: print "can not find the cv from this id." else: UndownloadLink.click() time.sleep(1) browser.find_by_id('btnCommonOK').click() selector = etree.HTML(browser.html) lines = selector.xpath('//title') if len(lines) != 0: print "name:", strip(lines[0].text) contents = browser.html.encode("utf-8") print re.findall(re.compile('''<td height="20">电 话:</td><td height="20" colspan="3">(.*?)<span'''), contents)[0] printre.findall(re.compile('''E-mail:</td><td height="20" colspan="3"><a href="mailto:(.*?)" class="blue">'''), contents)[0] winNum = len(allwindows) for i in range(winNum): allwindows[winNum - 1 - i].close()
class AddTask(TestCase): def setUp(self): self._browser = Browser('django') def test_add_task(self): filename = os.path.join(os.path.dirname(__file__), '../../docs/features/add_task.feature') run(filename, self, verbose=True) def step_user_exists(self, username): r'user "([^"]+)" exists' user = UserFactory.build(username=username) user.is_staff = True user.set_password(username) user.save() def step_I_visit_page_as_logged_user(self, page, username): r'I visit "([^"]+)" as logged user "([^"]+)"' self._browser.visit('/admin/') self._browser.fill('username', username) self._browser.fill('password', username) self._browser.find_by_value('Log in').first.click() self._browser.visit(page) def step_I_enter_value_in_field(self, value, field): r'I enter "([^"]+)" in field "([^"]+)"' self._browser.fill(field, value) def step_I_press(self, button): r'I press button "([^"]+)"' self._browser.find_by_name(button).first.click() def step_I_see_task_on_tasks_list(self, task): r'I see task "([^"]+)" on tasks list' task_on_list = self._browser.find_by_xpath('//ul[@id="todo"]/li[contains(., "%s")]' % task) self.assertTrue(task_on_list)
def main(argv): try: opts, args = getopt.getopt(sys.argv[1:], "f:") except getopt.GetoptError as err: sys.exit(2) global test_config config_file_name = None for o, a in opts: if o == "-f": config_file_name = a else: assert False, "unhandled option" if config_file_name is None: usage(argv) sys.exit(1) config_file = open(config_file_name, "r") test_config = json.load(config_file) browser = Browser() browser.visit(test_config['url']) # log in browser.find_by_id('id_username').fill(test_config['username']) browser.find_by_id('id_password').fill(test_config['password']) browser.find_by_id('dijit_form_Button_0_label').click() l = browser.find_by_xpath("//div[@id='treeNode_reboot']/div") time.sleep(2) l.first.click() # The button labelled Cancel in the code is actually the button which does the Reboot! l = browser.find_by_id("btn_Reboot_Cancel") time.sleep(2) l.first.click() time.sleep(5) browser.quit()
class Google: def __init__(self,headless=True): if headless: self.browser=Browser('phantomjs', user_agent='Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36', desired_capabilities={'loadImages': False}) else: self.browser=Browser() self.browser.driver.set_page_load_timeout(45) self.url = 'http://www.google.co.in/search?{}'.format # (urlencode(dict(tbm='isch',q=keyword))) def __enter__(self): return self def search(self,keyword): try: self.browser.visit(self.url(urlencode(dict(tbm='isch',q=keyword)))) except TimeoutException: return None # self.browser.fill('q', keyword) # button = self.browser.find_by_name('btnG') # button.click() p=self.browser.find_by_xpath('//div[@data-ri="0"]/a')[0] # p=self.browser.find_by_css('div.rg_el:nth-child(1) > a:nth-child(1)') # print p try: imgurl = parse_qs(urlparse(p['href']).query)['imgurl'][0] # print imgurl return imgurl except Exception as e: print e,'error here' return None def teardown(self): self.browser.quit() def __exit__(self, exc_type, exc_value, traceback): self.teardown()
import time from splinter import Browser #------------------------------------------------------------------------------------------------------------- # Open Google Chrome and browse to URL (You can also change "chrome" to "firefox" if you want to use Firefox) browser = Browser('chrome') url = 'http://www.reddit.com/search?q=python' browser.visit(url) #------------------------------------------------------------------------------------------------------------- # Example Output #1 - Print Reddit questions print '\n\nPrinting questions...\n\n' time.sleep(2) questions = browser.find_by_xpath("//*[contains(@class, 'title may-blank ')]") for question in questions: print question.text # Example Output #2 - Print authors print '\n\nPrinting authors...\n\n' time.sleep(2) authors = browser.find_by_xpath("//*[contains(@class, 'author may-blank')]") for author in authors: print author.text #END
class WOS(object): """ A little module for exporting Web of Science search results into a txt file """ def __init__(self, **kwargs): """ Construct a new WOS object given a query, an export file (without ".isi") a username and a password for authentication eg : WOS(query="TS=(epigenetic*", outfile="epigenetic", user="******", passw="mypassw") """ #defining params self.query = kwargs["query"] self.outfile = kwargs["outfile"]+".isi" try: self.user=kwargs["user"] self.passw = kwargs["passw"] except: self.user, self.passw = private try: self.browser_app = kwargs["browser"] except: self.browser_app = "splinter" #using MLV Auth Server self.auth_url = "https://apps-webofknowledge-com.fennec.u-pem.fr/WOS_AdvancedSearch_input.do?&product=WOS&search_mode=AdvancedSearch" #Firefox Browser if self.browser_app == "splinter": self.browser = Browser("firefox") else: self.browser = spynner.Browser() self.browser.set_html_parser(PyQuery) #self.browser = Browser('zope.testbrowser', ignore_robots=True) #Session params self.session = None self.cookies = {} if self.query is None: sys.exit("No query provided") if "=" not in self.query: #or "(" not in self.query logging.warning("Syntax is not WOS compliant. Check Query Syntax") sys.exit("Query Syntax Error") if self.outfile is None: self.outfile = str(re.sub(re.compile("[^0-9a-zA-Z]+"),"_", self.query))+".isi" if self.user is None and self.passw is None: self.user, self.passw = private logging.info("WOS search parameters:\n\t- query: %s\n\t- outfile: %s\n\t- user: %s\n\t- password: %s" %(self.query, self.outfile, self.user, self.passw)) self.run() def auth(self): """ authentification throught auth_url to get the session id SID """ #Loading url if self.browser_app == "splinter": self.browser.visit(self.auth_url) self.browser.fill('username', self.user) self.browser.fill('password', self.passw) self.browser.find_by_name("submit").click() self.cookies = self.browser.cookies.all() else: self.browser = self.browser.load(self.url) self.browser.wk_fill('input[id="username"]',self.username) self.browser.wk_fill('input[id="password"]',self.password) self.browser.click('input[name="submit"]') #~ if self.debug is True: #~ print "Proceding to authentication..." if "SessionError" in self.session.url : self.session.click('a[target="_top"]') self.session.wait(random.uniform(1, 3)) p_url = urlparse(self.browser.url) if p_url.netloc == "apps-webofknowledge-com.fennec.u-pem.fr": #print p_url.scheme+"//"+p_url.netloc+"/WOS_GeneralSearch_input.do?"+p_url.query match = re.match(re.compile("product\=(?P<product>.*?)\&search_mode\=(?P<search_mode>.*?)\&SID=(?P<ssid>.*?)\&preferencesSaved\="), str(p_url.query)) if match is not None: self.product = match.group("product") self.ssid = match.group("ssid") self.search_mode = re.sub("General", "Advanced", match.group("search_mode")) #self.search_mode = match.group("search_mode") self.search_url = "%s://%s/%s_%s_input.do?product=%s&search_mode=%s&SID=%s" %(p_url.scheme, p_url.netloc, self.product,self.search_mode,self.product,self.search_mode,self.ssid) if self.browser_app == "splinter": self.browser.visit(self.search_url) print self.browser.url else: self.browser.load(self.search_url) print self.browser.url return self else: return sys.exit("Session Id could not be found") else: logging.info("No redirection to service") return sys.exit("Invalid credentials") def launch_search(self): """ Filling the query form found into advanced search page """ logging.info("Launching search") if self.browser_app == "splinter": self.browser.fill("value(input1)", self.query) self.browser.find_by_xpath("/html/body/div[1]/form/div[1]/table/tbody/tr/td[1]/div[2]/div[1]/table/tbody/tr/td[1]/span[1]/input").click() bs = BeautifulSoup(self.browser.html) else: self.session.wk_fill('textarea[id="value(input1)"]', self.query) self.session.click('input[title="Search"]') self.session.wait(random.randint(2,5)) bs = BeautifulSoup(self.browser.html.encode("utf-8")) query_history = bs.find_all("div", {"class":"historyResults"}) self.nb_search = len(query_history) try: self.nb_results = int(re.sub(",", "", query_history[0].text)) except IndexError: self.nb_results = int(re.sub(",", "", query_history.text)) print self.nb_results logging.warning("Your search \"%s\" gave %i results"%(self.query, self.nb_results)) logging.info("Your SSID is : %s" %self.ssid) if self.browser_app == "splinter": self.browser.click_link_by_partial_href('/summary.do?') else: self.session.click('a[title="Click to view the results"]',wait_load=True) print urlparse(self.browser.url).query match = re.search(re.compile("product=WOS&doc\=(?P<doc>.*?)\&qid\=(?P<qid>.*?)&SID"), urlparse(self.browser.url).query) if match is not None: print match.group() self.doc, self.qid = match.group("doc"), match.group('qid') print self.doc, self.qid return self else: self.doc, self.qid = self.parse_params() return self def load_results(self, markFrom, markTo, i): """ Load_results(markFrom, markTo) 500 by 500 given the nb of results """ logging.info("loading results") #print "exporting" #p_url0= "http://apps.webofknowledge.com/AutoSave_UA_output.do?action=saveForm&SID=%s&product=UA&search_mode=output" %self.ssid #r0 = requests.post(p_url0, headers= headers, cookies=self.cookies) # print p_url0 #print r0 #p_url1= "http://apps.webofknowledge.com/AutoSave_UA_output.do?action=saveForm&SID=%s&product=UA&search_mode=results" %self.ssid # print p_url1 #r1 = requests.post(p_url1, headers= headers, cookies=self.cookies) #print r1 r_url = "https://apps-webofknowledge-com.fennec.u-pem.fr/summary.do?product=WOS&doc=1&qid="+self.qid+"&SID="+self.ssid+"&search_mode=AdvancedSearch" post_url = "https://apps-webofknowledge-com.fennec.u-pem.fr/OutboundService.do?action=go&&" #r2 = requests.post() header={ 'Host': 'apps-webofknowledge-com.fennec.u-pem.fr', 'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:29.0) Gecko/20100101 Firefox/29.0', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3', 'Accept-Encoding': 'gzip, deflate', 'DNT': 1, 'Referer': 'https://apps-webofknowledge-com.fennec.u-pem.fr/summary.do?product=WOS&doc=1&qid=%s&SID=%s&search_mode=AdvancedSearch'%(self.qid, self.ssid), 'Connection': 'keep-alive' } # markTo = 500 # markFrom = 1 data = { 'SID': self.ssid, 'colName':'WOS', 'count_new_items_marked':0, 'displayCitedRefs':'true', 'displayTimesCited':'true', 'fields_selection':'USAGEIND AUTHORSIDENTIFIERS ACCESSION_NUM FUNDING SUBJECT_CATEGORY JCR_CATEGORY LANG IDS PAGEC SABBR CITREFC ISSN PUBINFO KEYWORDS CITTIMES ADDRS CONFERENCE_SPONSORS DOCTYPE CITREF ABSTRACT CONFERENCE_INFO SOURCE TITLE AUTHORS', 'filters':'USAGEIND AUTHORSIDENTIFIERS ACCESSION_NUM FUNDING SUBJECT_CATEGORY JCR_CATEGORY LANG IDS PAGEC SABBR CITREFC ISSN PUBINFO KEYWORDS CITTIMES ADDRS CONFERENCE_SPONSORS DOCTYPE CITREF ABSTRACT CONFERENCE_INFO SOURCE TITLE AUTHORS', 'format':'saveToFile', 'locale':'en_US', 'markFrom':1, 'markTo':markTo, 'mark_from':markFrom, 'product':'WOS', 'mark_to':markTo, 'mode':'OpenOutputService', 'product':'WOS', 'qid':self.qid, 'startYear':'2015', 'endYear':'2014', #rurl:'http%3A%2F%2Fapps.webofknowledge.com%2Fsummary.do%3FSID%3DT1WYtnvIngPkHzI4ShI%26product%3DWOS%26doc%3D1%26qid%3D1%26search_mode%3DAd 'rurl':urllib.quote_plus(r_url), 'save_options':'othersoftware', 'search_mode':'AdvancedSearch', 'selectedIds':'', 'sortBy':'PY.D;LD.D;SO.A;VL.D;PG.A;AU.A', 'value(record_select_type)':'range', 'viewType':'summary', 'view_name':'WOS-summary', } r = requests.get(post_url, params=data,headers=header, cookies=self.cookies) #redirects to #url = "http://ets.webofknowledge.com/ETS/ets.do?" data_directory = self.outfile.split('.isi')[0] try: os.mkdir("exported_data") print "creating directory exported_data" except: print "exported_data already exists" pass try: os.mkdir("exported_data/"+data_directory) print "creating directory "+data_directory except: print data_directory +" already exists" pass final_r = requests.get(r.url, cookies=self.cookies, stream=True) with open( "exported_data/"+data_directory+'/'+data_directory+'_'+str(i) +'.isi' , 'w') as f: final_r.text f.write(final_r.text.encode('utf-8')) return self.outfile def export(self): """Writing results into outfile (defaut is normalized query)""" start_time = time.time() #open(self.outfile, 'w').close() l = list(range(0, self.nb_results, 500)) l.append(self.nb_results) logging.info("Exporting %s 500 by 500..." %self.nb_results) for i,n in enumerate(l): if l[i]+1 < self.nb_results: self.load_results(l[i]+1, l[i+1],str(l[i]+1)+'-'+str(l[i+1])) total = time.time() - start_time, "seconds" # raw_file = open(self.outfile, 'r') # raw_file_data = raw_file.read().decode("utf-8-sig").encode("utf-8") # nb_occurence = len(raw_file_data.split("\n\n"))-1 logging.info("Query \"%s\" had %d results: %d has been exported" %(self.query, self.nb_results)) logging.info("Sucessfully stored in directory : %s\n" %(self.outfile)) #logging.info("Execution total time:"+str(" ".join(total))) return def run(self): """ Generic method that encapsulates the WOS extract process """ self.auth() self.launch_search() self.export() self.browser.close() return
class SplinterBrowserDriver(BaseBrowserDriver): """ This is a BrowserDriver for splinter (http://splinter.cobrateam.info) that implements the BaseBrowserDriver API. To use it, you must have splinter installed on your env. For itself it's a browser driver that supports multiple browsing technologies such as selenium, phantomjs, zope, etc. """ driver_name = 'splinter' def __init__(self): super(SplinterBrowserDriver, self).__init__() if not splinter_available: raise ImportError( "In order to use splinter Base Driver you have to install it. " "Check the instructions at http://splinter.cobrateam.info") self._browser = Browser(config.default_browser) def _handle_empty_element_action(self, element): if not element: raise ActionNotPerformableException( "The action couldn't be perfomed because the element couldn't " "be found; Try checking if your element" "selector is correct and if the page is loaded properly.") @property def page_url(self): return self._browser.url @property def page_source(self): return self._browser.html @property def page_title(self): return self._browser.title def open_url(self, url): self._browser.driver.get(url) def quit(self): return self._browser.quit() def is_element_visible(self, element): return element.visible def get_element_text(self, element): return element.text def get_element_by_xpath(self, selector): return self._browser.find_by_xpath(selector) def get_element_by_css(self, selector): return self._browser.find_by_css(selector) def get_element_by_id(self, selector): return self._browser.find_by_id(selector) def get_element_by_tag(self, selector): return self._browser.find_by_tag(selector) @element_action def type(self, element, text, slowly=False): return element.type(text, slowly) @element_action def fill(self, element, text): return element.fill(text) @element_action def clear(self, element): self.fill(element, '') @element_action def click(self, element): return element.click() @element_action def check(self, element): return element.check() @element_action def uncheck(self, element): return element.uncheck() @element_action def mouse_over(self, element): return element.mouse_over() @element_action def mouse_out(self, element): return element.mouse_out() def reload(self): return self._browser.reload() def go_back(self): return self._browser.back() def go_forward(self): return self._browser.forward() def execute_script(self, script): return self._browser.evaluate_script(script) def get_iframe(self, iframe_id): return self._browser.get_iframe(iframe_id) def get_alert(self): return self._browser.get_alert() def attach_file(self, input_name, file_path): return self._browser.attach_file(input_name, file_path) def wait_pageload(self, timeout=30): wait_interval = 0.05 elapsed = 0 while self.execute_script('document.readyState') != 'complete': self.wait(wait_interval) elapsed += wait_interval if elapsed > timeout: raise PageNotLoadedException def click_and_wait(self, element, timeout=30): self.click(element) self.wait_pageload(timeout)
# browser.fill('clientSearchString', 'jason\'s client') # browser.find_by_name('search').first.click() # browser.find_by_value('GO').first.click() waitNavigation = ctypes.windll.user32.MessageBoxW(0, "Now navigate the scenario/obligor/facility page and then click OK.", "", 0) if waitNavigation == 2: break # noddd = browser.find_by_tag('form').first.find_by_tag('td').first.find_by_tag('table').first noddd = browser.find_by_tag('form').first noddStr = noddd.html.replace('\n','\t').replace('\'','\\\'') scenarioName = browser.find_by_xpath('//body/table/tbody/tr/td/table/tbody/tr/td/table/tbody/tr/td[@class="rowHeader"]')[2].text if noddStr.find('Scenario Status') != -1: file = open('scenario_' + scenarioName + '.xml', 'w+') file.write(noddStr) file.close() elif noddStr.find('Correlation Information') != -1: file = open('obligor_' + scenarioName + '.xml', 'w+') file.write(noddStr) file.close() else:
class DataNodeTestCase(LiveServerTestCase): """ A set of tests to test all interaction related to the creation and deletion of nodes and relationships. Also, we test export the data in two formats: gexf and csv. """ def setUp(self): self.browser = Browser() socket.setdefaulttimeout(30) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') def tearDown(self): logout(self) self.browser.quit() @classmethod def tearDownClass(cls): sleep(10) # It needs some time for close the LiverServerTestCase super(DataNodeTestCase, cls).tearDownClass() def test_data_node_addition(self): create_graph(self) create_schema(self) create_type(self) create_data(self) # Check the node name self.browser.find_by_xpath("//td[@class='dataList']/a[@class='edit']").first.click() text = self.browser.find_by_id('propertiesTitle').first.value spin_assert(lambda: self.assertEqual(text, 'Properties')) self.browser.find_by_xpath("//span[@class='buttonLinkOption buttonLinkRight']/a").first.click() self.browser.choose('confirm', '1') self.browser.find_by_value('Continue').first.click() text = self.browser.find_by_xpath("//div[@class='indent']/div").first.value Graph.objects.get(name="Bob's graph").destroy() spin_assert(lambda: self.assertEqual(text, 'Nodes: 0')) def test_data_node_addition_rel_add_del(self): create_graph(self) create_schema(self) create_type(self) create_node(self, "Bob") create_node(self, "Alice") # We create a allowed relation js_code = "$('a#schema-link')[0].click();" self.browser.execute_script(js_code) self.browser.find_by_id('allowedRelations').first.click() self.browser.select('source', '1') self.browser.find_by_name('name').fill("Bob's rel") self.browser.select('target', '1') self.browser.find_by_id('id_description').fill("This the allowed relationship for Bob's graph") self.browser.find_by_value('Save Type').first.click() spin_assert(lambda: self.assertEqual( self.browser.title, "SylvaDB - Bob's graph")) # We create the link between the nodes self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath("//td[@class='dataActions']/a[@class='dataOption list']").first.click() self.browser.find_by_xpath("//td[@class='dataList']/a[@class='edit']").first.click() self.browser.find_by_xpath("//li[@class='token-input-input-token']/input").first.fill('Alice') self.browser.is_element_present_by_id("id_user_wait", 5) self.browser.find_by_xpath("//div[@class='token-input-dropdown']//li[@class='token-input-dropdown-item2 token-input-selected-dropdown-item']/b").first.click() self.browser.find_by_value("Save Bob's type").first.click() # Delete the relationship self.browser.find_by_xpath("//td[@class='dataList']/a[@class='edit']").first.click() self.browser.find_by_xpath("//span[@class='all-relationships incoming-relationships i_bobs_rel1-relationships']//a[@class='delete-row initial-form floating']").first.click() self.browser.find_by_value("Save Bob's type").first.click() self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() text = self.browser.find_by_xpath("//div[@class='flags-block']/span[@class='graph-relationships']").first.value spin_assert(lambda: self.assertEqual(text, "0 relationships")) Graph.objects.get(name="Bob's graph").destroy() def test_node_type_deletion_keeping_nodes(self): create_graph(self) create_schema(self) create_type(self) # Adding relationship to the type self.browser.find_by_id('allowedRelations').first.click() self.browser.select('source', '1') self.browser.find_by_name('name').fill("Bob's rel") self.browser.select('target', '1') self.browser.find_by_id('id_description').fill( 'The loved relationship') self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_xpath( "//div[@class='form-row indent']/label").first.value spin_assert(lambda: self.assertNotEqual(text.find("Bob's rel"), -1)) # Creating nodes create_node(self, 'Bob') create_node(self, 'Alice') # Creating relationship between nodes self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath("//td[@class='dataActions']/a[@class='dataOption list']").first.click() self.browser.find_by_xpath("//td[@class='dataList']/a[@class='edit']").first.click() self.browser.find_by_xpath("//li[@class='token-input-input-token']/input").first.fill('Alice') self.browser.is_element_present_by_id("id_user_wait", wait_time=5) self.browser.find_by_xpath("//div[@class='token-input-dropdown']//li[@class='token-input-dropdown-item2 token-input-selected-dropdown-item']/b").first.click() self.browser.find_by_value("Save Bob's type").first.click() self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() text = self.browser.find_by_xpath("//div[@class='flags-block']/span[@class='graph-relationships']").first.value spin_assert(lambda: self.assertEqual(text, "1 relationships")) # Deleting type js_code = "$('a#schema-link')[0].click();" self.browser.execute_script(js_code) self.browser.find_by_xpath("//fieldset[@class='module aligned wide model']/h2/a").first.click() self.browser.find_by_xpath("//span[@class='buttonLinkOption buttonLinkRight']/a[@class='delete']").first.click() text = self.browser.find_by_xpath( "//p/label[@for='id_option_0']").first.value spin_assert(lambda: self.assertNotEqual(text.find( "We found some elements of this type"), -1)) # Keeping nodes self.browser.choose('option', 'no') self.browser.find_by_value('Continue').first.click() text = self.browser.find_by_xpath( "//div[@class='body-inside']/p").first.value spin_assert(lambda: self.assertEqual( text, 'There are no types defined yet.')) # Checking self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() text = self.browser.find_by_xpath("//div[@class='flags-block']/span[@class='graph-nodes']").first.value spin_assert(lambda: self.assertEqual(text, "2 nodes")) text = self.browser.find_by_xpath("//div[@class='flags-block']/span[@class='graph-relationships']").first.value spin_assert(lambda: self.assertEqual(text, "1 relationships")) text = self.browser.find_by_xpath( "//div[@class='graph-empty-message']").first.value spin_assert(lambda: self.assertNotEqual( text.find("Your Schema is empty."), -1)) Graph.objects.get(name="Bob's graph").destroy() def test_node_type_deletion_deleting_nodes(self): create_graph(self) create_schema(self) create_type(self) # Adding relationship to the type self.browser.find_by_id('allowedRelations').first.click() self.browser.select('source', '1') self.browser.find_by_name('name').fill("Bob's rel") self.browser.select('target', '1') self.browser.find_by_id('id_description').fill( 'The loved relationship') self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_xpath( "//div[@class='form-row indent']/label").first.value spin_assert(lambda: self.assertNotEqual(text.find("Bob's rel"), -1)) # Creating nodes create_node(self, 'Bob') create_node(self, 'Alice') # Creating relationship between nodes self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath("//td[@class='dataActions']/a[@class='dataOption list']").first.click() self.browser.find_by_xpath("//td[@class='dataList']/a[@class='edit']").first.click() self.browser.find_by_xpath("//li[@class='token-input-input-token']/input").first.fill('Alice') self.browser.is_element_present_by_id("id_user_wait", wait_time=5) self.browser.find_by_xpath("//div[@class='token-input-dropdown']//li[@class='token-input-dropdown-item2 token-input-selected-dropdown-item']/b").first.click() self.browser.find_by_value("Save Bob's type").first.click() self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() text = self.browser.find_by_xpath("//div[@class='flags-block']/span[@class='graph-relationships']").first.value spin_assert(lambda: self.assertEqual(text, "1 relationships")) # Deleting type js_code = "$('a#schema-link')[0].click();" self.browser.execute_script(js_code) self.browser.find_by_xpath("//fieldset[@class='module aligned wide model']/h2/a").first.click() self.browser.find_by_xpath("//span[@class='buttonLinkOption buttonLinkRight']/a[@class='delete']").first.click() text = self.browser.find_by_xpath( "//p/label[@for='id_option_0']").first.value spin_assert(lambda: self.assertNotEqual(text.find( "We found some elements of this type"), -1)) # Deleting nodes self.browser.choose('option', 'de') self.browser.find_by_value('Continue').first.click() text = self.browser.find_by_xpath( "//div[@class='body-inside']/p").first.value spin_assert(lambda: self.assertEqual( text, 'There are no types defined yet.')) # Checking self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() text = self.browser.find_by_xpath("//div[@class='flags-block']/span[@class='graph-nodes']").first.value spin_assert(lambda: self.assertEqual(text, "0 nodes")) text = self.browser.find_by_xpath("//div[@class='flags-block']/span[@class='graph-relationships']").first.value spin_assert(lambda: self.assertEqual(text, "0 relationships")) Graph.objects.get(name="Bob's graph").destroy() def test_data_node_clone(self): create_graph(self) create_schema(self) create_type(self) create_data(self) original_name = self.browser.find_by_xpath("//table[@id='content_table']/tbody/tr/td")[1].value # Clone the node self.browser.find_by_xpath("//table[@id='content_table']/tbody/tr/td/a[@class='edit']").first.click() self.browser.find_by_name('Name').first.fill(original_name + " clone") self.browser.find_by_name("as-new").first.click() # Check that two nodes exist original_name = self.browser.find_by_xpath("//table[@id='content_table']/tbody/tr/td")[1].value clone_name = self.browser.find_by_xpath("//table[@id='content_table']/tbody/tr/td")[4].value spin_assert(lambda: self.assertEqual(original_name, "Bob's node")) spin_assert(lambda: self.assertEqual(clone_name, "Bob's node clone")) Graph.objects.get(name="Bob's graph").destroy() def test_sigma_visualization_in_node_view(self): create_graph(self) create_schema(self) create_type(self) # Adding relationship to the type self.browser.find_by_id('allowedRelations').first.click() self.browser.select('source', '1') self.browser.find_by_name('name').fill("Bob's rel") self.browser.select('target', '1') self.browser.find_by_id('id_description').fill( 'The loved relationship') self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_xpath( "//div[@class='form-row indent']/label").first.value spin_assert(lambda: self.assertNotEqual(text.find("Bob's rel"), -1)) # Creating nodes create_node(self, 'Bob') create_node(self, 'Alice') # Creating relationship between nodes self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath("//td[@class='dataActions']/a[@class='dataOption list']").first.click() self.browser.find_by_xpath("//td[@class='dataList']/a[@class='edit']").first.click() self.browser.find_by_xpath("//li[@class='token-input-input-token']/input").first.fill('Alice') self.browser.is_element_present_by_id("id_user_wait", wait_time=5) self.browser.find_by_xpath("//div[@class='token-input-dropdown']//li[@class='token-input-dropdown-item2 token-input-selected-dropdown-item']/b").first.click() self.browser.find_by_value("Save Bob's type").first.click() # Checking self.browser.find_by_xpath("//table[@id='content_table']/tbody/tr/td/a[@title='View node']/p[text()='Alice']").first.click() self.browser.is_element_present_by_id('wait_for_js', 3) js_code = ''' var instance = sigma.instances(0); sylva.test_node_count = instance.graph.nodes().length; ''' self.browser.execute_script(js_code) text = self.browser.evaluate_script('sylva.test_node_count') spin_assert(lambda: self.assertEqual(text, 2)) Graph.objects.get(name="Bob's graph").destroy() def test_graph_export_gexf(self): create_graph(self) create_schema(self) create_type(self) create_data(self) self.browser.find_by_id('toolsMenu').first.click() cookies = {self.browser.cookies.all()[0]["name"]: self.browser.cookies.all()[0]["value"], self.browser.cookies.all()[1]["name"]: self.browser.cookies.all()[1]["value"]} result = requests.get(self.live_server_url + '/tools/bobs-graph/export/gexf/', cookies=cookies) spin_assert(lambda: self.assertEqual( result.headers['content-type'], 'application/xml')) spin_assert(lambda: self.assertEqual( self.browser.status_code.is_success(), True)) fw = open('sylva/sylva/tests/files/bobs-graph.gexf', 'w') fw.write(result.content) fw.close() f = open('sylva/sylva/tests/files/bobs-graph.gexf') xmlFile = "" for line in f: xmlFile += line f.close() spin_assert(lambda: self.assertEqual(xmlFile, result.content)) Graph.objects.get(name="Bob's graph").destroy() def test_graph_export_csv(self): create_graph(self) create_schema(self) create_type(self) create_data(self) self.browser.find_by_id('toolsMenu').first.click() cookies = {self.browser.cookies.all()[0]["name"]: self.browser.cookies.all()[0]["value"], self.browser.cookies.all()[1]["name"]: self.browser.cookies.all()[1]["value"]} result = requests.get(self.live_server_url + '/tools/bobs-graph/export/csv/', cookies=cookies) spin_assert(lambda: self.assertEqual( result.headers['content-type'], 'application/zip')) spin_assert(lambda: self.assertEqual( self.browser.status_code.is_success(), True)) test_file = StringIO(result.content) csv_zip = ZipFile(test_file) for name in csv_zip.namelist(): fw = open('sylva/sylva/tests/files/' + name, 'w') fw.write(csv_zip.read(name)) fw.close() for name in csv_zip.namelist(): f = open('sylva/sylva/tests/files/' + name) csvFile = "" for line in f: csvFile += line f.close() spin_assert(lambda: self.assertEqual(csv_zip.read(name), csvFile)) Graph.objects.get(name="Bob's graph").destroy()
import random import pdb from splinter import Browser reload(sys) sys.setdefaultencoding('utf-8') url = 'http://saishi.zgzcw.com/html/310xingxiang.html/' output = 'data/zgzcw_bet_info.dat' if __name__ == '__main__': br = Browser('firefox') br.visit(url) time.sleep(3) tableElement = br.find_by_xpath('//*[@id="zcxxt"]').first tableHtml = '<table>' + tableElement.html + '</table>' table = BeautifulSoup(tableHtml, 'html5lib') tbody = table.find('tbody') trs = tbody.find_all('tr') print 'table lines: %d' % len(trs) fo = open(output, 'w') line = 0 try: for tr in trs: line += 1 tds = tr.find_all('td') betNum = unicode(tds[0].text) if betNum < '2008001': continue
class CollaboratorTestCase(LiveServerTestCase): """ These tests check the permissions system creating two users: one creates a graph and gives permissions to the other, the other tests the permissions given. On these tests, is the developer who must keep the logic of the permissions. If he/she wants to give any permission to an user he/she must first creates the usar and then creates the collaboration adding the basic perrmission: 'chk_graph_view_graph'. The name of the tests self-explain the behaviour of them. """ def setUp(self): self.browser = Browser() def tearDown(self): logout(self) self.browser.quit() def test_graph_view_without_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) self.browser.find_by_xpath("//div[@class='dashboard-graphs']/div/div/span[@class='graph-title']/a").first.click() logout(self) signin(self, 'alice', 'alice_secret') self.browser.visit(self.live_server_url + '/graphs/bobs-graph/') text = self.browser.find_by_xpath( "//div[@class='heading']/h1").first.value self.assertNotEqual(text.find("403"), -1) Graph.objects.get(name="Bob's graph").destroy() def test_graph_view_with_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) self.browser.find_by_xpath("//div[@class='dashboard-graphs']/div/div/span[@class='graph-title']/a").first.click() add_permission(self, 'alice', CREATE_COLLAB) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() text = self.browser.find_by_xpath("//div[@class='graph-item']/span[@class='graph-title']/a").first.value self.assertEqual(text, "Bob's graph") Graph.objects.get(name="Bob's graph").destroy() def test_graph_change_without_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) self.browser.find_by_xpath("//div[@class='dashboard-graphs']/div/div/span[@class='graph-title']/a").first.click() add_permission(self, 'alice', CREATE_COLLAB) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath("//div[@class='graph-item']/span[@class='graph-title']/a").first.click() text = self.browser.find_by_xpath( "//div[@class='heading']/h1").first.value self.assertNotEqual(text.find("403"), -1) Graph.objects.get(name="Bob's graph").destroy() def test_graph_change_with_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) self.browser.find_by_xpath("//div[@class='dashboard-graphs']/div/div/span[@class='graph-title']/a").first.click() add_permission(self, 'alice', CREATE_COLLAB) add_permission(self, 'alice', GRAPH_CHANGE) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath("//div[@class='graph-item']/span[@class='graph-title']/a").first.click() self.browser.find_by_xpath( "//input[@id='id_name']").first.fill("Alice's graph") self.browser.find_by_xpath( "//form/input[@type='submit']").first.click() text = self.browser.find_by_xpath("//div[@class='graph-item']/span[@class='graph-title']/a").first.value self.assertEqual(text, "Alice's graph") Graph.objects.get(name="Alice's graph").destroy() def test_schema_view_without_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) create_schema(self) create_type(self) add_permission(self, 'alice', CREATE_COLLAB) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath( "//nav[@class='menu']/ul/li[3]/a").first.click() text = self.browser.find_by_xpath( "//div[@class='heading']/h1").first.value self.assertNotEqual(text.find("403"), -1) Graph.objects.get(name="Bob's graph").destroy() def test_schema_view_with_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) create_schema(self) create_type(self) add_permission(self, 'alice', CREATE_COLLAB) add_permission(self, 'alice', SCHEMA_VIEW) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath( "//nav[@class='menu']/ul/li[3]/a").first.click() text = self.browser.find_by_xpath( "//fieldset[@class='module aligned wide model']/h2/a").first.value self.assertEqual(text, "Bob's type") Graph.objects.get(name="Bob's graph").destroy() def test_schema_change_without_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) create_schema(self) create_type(self) add_permission(self, 'alice', CREATE_COLLAB) add_permission(self, 'alice', SCHEMA_VIEW) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath( "//nav[@class='menu']/ul/li[3]/a").first.click() self.browser.find_by_xpath("//fieldset[@class='module aligned wide model']/h2/a").first.click() text = self.browser.find_by_xpath( "//div[@class='heading']/h1").first.value self.assertNotEqual(text.find("403"), -1) Graph.objects.get(name="Bob's graph").destroy() def test_schema_change_with_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) create_schema(self) create_type(self) add_permission(self, 'alice', CREATE_COLLAB) add_permission(self, 'alice', SCHEMA_VIEW) add_permission(self, 'alice', SCHEMA_CHANGE) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath( "//nav[@class='menu']/ul/li[3]/a").first.click() self.browser.find_by_xpath("//fieldset[@class='module aligned wide model']/h2/a").first.click() self.browser.find_by_xpath( "//input[@id='id_name']").first.fill("Alice's type") self.browser.find_by_xpath("//span[@class='buttonLinkOption buttonLinkLeft']/input[@type='submit']").first.click() text = self.browser.find_by_xpath( "//fieldset[@class='module aligned wide model']/h2/a").first.value self.assertEqual(text, "Alice's type") Graph.objects.get(name="Bob's graph").destroy() def test_data_view_without_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) create_schema(self) create_type(self) create_data(self) add_permission(self, 'alice', CREATE_COLLAB) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath("//a[@id='dataMenu']").first.click() self.browser.find_by_xpath("//div[@id='dataBrowse']/table/tbody/tr/td/a[@class='dataOption list']").first.click() text = self.browser.find_by_xpath( "//div[@class='heading']/h1").first.value self.assertNotEqual(text.find("403"), -1) Graph.objects.get(name="Bob's graph").destroy() def test_data_view_with_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) create_schema(self) create_type(self) create_data(self) add_permission(self, 'alice', CREATE_COLLAB) add_permission(self, 'alice', DATA_VIEW) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath("//a[@id='dataMenu']").first.click() self.browser.find_by_xpath("//div[@id='dataBrowse']/table/tbody/tr/td/a[@class='dataOption list']").first.click() text = self.browser.find_by_xpath("//table[@id='content_table']/tbody/tr/td")[1].value self.assertEqual(text, "Bob's node") Graph.objects.get(name="Bob's graph").destroy() def test_data_change_without_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) create_schema(self) create_type(self) create_data(self) add_permission(self, 'alice', CREATE_COLLAB) add_permission(self, 'alice', DATA_VIEW) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath("//a[@id='dataMenu']").first.click() self.browser.find_by_xpath("//div[@id='dataBrowse']/table/tbody/tr/td/a[@class='dataOption list']").first.click() self.browser.find_by_xpath("//td/a[@title='Edit node']").first.click() text = self.browser.find_by_xpath( "//div[@class='heading']/h1").first.value self.assertNotEqual(text.find("403"), -1) Graph.objects.get(name="Bob's graph").destroy() def test_data_change_with_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) create_schema(self) create_type(self) create_data(self) add_permission(self, 'alice', CREATE_COLLAB) add_permission(self, 'alice', DATA_VIEW) add_permission(self, 'alice', DATA_CHANGE) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath("//a[@id='dataMenu']").first.click() self.browser.find_by_xpath("//div[@id='dataBrowse']/table/tbody/tr/td/a[@class='dataOption list']").first.click() self.browser.find_by_xpath("//td/a[@title='Edit node']").first.click() self.browser.find_by_xpath( "//input[@id='id_Name']").first.fill("Alice's node") self.browser.find_by_xpath("//input[@type='submit']").first.click() text = self.browser.find_by_xpath("//table[@id='content_table']/tbody/tr/td")[1].value self.assertEqual(text, "Alice's node") Graph.objects.get(name="Bob's graph").destroy() def test_data_add_without_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) create_schema(self) create_type(self) add_permission(self, 'alice', CREATE_COLLAB) add_permission(self, 'alice', DATA_VIEW) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath("//a[@id='dataMenu']").first.click() self.browser.find_by_xpath("//div[@id='dataBrowse']/table/tbody/tr/td/a[@class='dataOption new']").first.click() text = self.browser.find_by_xpath( "//div[@class='heading']/h1").first.value self.assertNotEqual(text.find("403"), -1) Graph.objects.get(name="Bob's graph").destroy() def test_data_add_with_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) create_schema(self) create_type(self) add_permission(self, 'alice', CREATE_COLLAB) add_permission(self, 'alice', DATA_VIEW) add_permission(self, 'alice', DATA_ADD) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath("//a[@id='dataMenu']").first.click() self.browser.find_by_xpath("//div[@id='dataBrowse']/table/tbody/tr/td/a[@class='dataOption new']").first.click() self.browser.find_by_xpath( "//input[@id='id_Name']").first.fill("Alice's node") self.browser.find_by_xpath("//input[@type='submit']").first.click() text = self.browser.find_by_xpath("//table[@id='content_table']/tbody/tr/td")[1].value self.assertEqual(text, "Alice's node") Graph.objects.get(name="Bob's graph").destroy() def test_data_delete_without_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) create_schema(self) create_type(self) create_data(self) add_permission(self, 'alice', CREATE_COLLAB) add_permission(self, 'alice', DATA_VIEW) add_permission(self, 'alice', DATA_CHANGE) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath("//a[@id='dataMenu']").first.click() self.browser.find_by_xpath("//div[@id='dataBrowse']/table/tbody/tr/td/a[@class='dataOption list']").first.click() self.browser.find_by_xpath("//td/a[@title='Edit node']").first.click() self.browser.find_by_xpath("//span[@class='buttonLinkOption buttonLinkRight']/a[text()='Remove']").first.click() text = self.browser.find_by_xpath( "//div[@class='heading']/h1").first.value self.assertNotEqual(text.find("403"), -1) Graph.objects.get(name="Bob's graph").destroy() def test_data_delete_with_permissions(self): signup(self, 'alice', '*****@*****.**', 'alice_secret') signin(self, 'alice', 'alice_secret') logout(self) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') create_graph(self) create_schema(self) create_type(self) create_data(self) add_permission(self, 'alice', CREATE_COLLAB) add_permission(self, 'alice', DATA_VIEW) add_permission(self, 'alice', DATA_CHANGE) add_permission(self, 'alice', DATA_DELETE) logout(self) signin(self, 'alice', 'alice_secret') self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() self.browser.find_by_xpath("//a[@id='dataMenu']").first.click() self.browser.find_by_xpath("//div[@id='dataBrowse']/table/tbody/tr/td/a[@class='dataOption list']").first.click() self.browser.find_by_xpath("//td/a[@title='Edit node']").first.click() self.browser.find_by_xpath("//span[@class='buttonLinkOption buttonLinkRight']/a[text()='Remove']").first.click() self.browser.choose('confirm', '1') self.browser.find_by_xpath("//input[@type='submit']").first.click() text = self.browser.find_by_xpath( "//div[@id='content2']/div[@class='indent']").first.value self.assertNotEqual(text.find('Nodes: 0'), -1) Graph.objects.get(name="Bob's graph").destroy()
file_name = "Lakme Channel" # change here folder_name = "downloads\\" + file_name + "\\" if not path.exists(folder_name): makedirs(folder_name) if not path.exists("reports"): makedirs("reports") fh = io.open("reports\\" + file_name + ".xls", "w", encoding='utf8') fh.write(u"URL\tTitle\tAuthor\tUser Name\tCategory\tLikes\tDislikes\tDuration\tPublished\tRating\tView Count\tFile Name\n") channel_url = "https://www.youtube.com/user/ILoveLakme/videos" # change here chrome = Browser("chrome") print "visit 'chrome://settings/content' to disable images" sleep(20) chrome.visit(channel_url) sleep(5) while chrome.is_element_present_by_xpath("//button[@class='yt-uix-button yt-uix-button-size-default yt-uix-button-default load-more-button yt-uix-load-more browse-items-load-more-button']"): chrome.find_by_xpath("//button[@class='yt-uix-button yt-uix-button-size-default yt-uix-button-default load-more-button yt-uix-load-more browse-items-load-more-button']").click() sleep(2) sleep(5) links = [a["href"] for a in chrome.find_by_xpath("//a[@class='yt-uix-sessionlink yt-uix-tile-link spf-link yt-ui-ellipsis yt-ui-ellipsis-2']")] chrome.quit() #flag = False for url in links: try: video = pafy.new(url) if video.length <= 120: title = video.title # .encode('ascii', 'ignore') print title #if title == "rexona men in philippines": flag = True #if flag: author = video.author # .encode('ascii', 'ignore') username = video.username
for t in range(trials): browser = Browser() browser.visit("http://gabrielecirulli.github.io/2048/") # need to have this so that the body is in focus x_dim, y_dim = m.screen_size() m.click(x_dim/2, y_dim/2, 1) largest = 2 # MAIN LOOP grid = () while True: # this checks up on the current grid state div = browser.find_by_xpath("//div[contains(@class, 'tile-container')]")[0] grid = re.findall(pat, div.html) grid = makeGrid(grid) # max,max because it's two dimensional largest = max([item for sublist in grid for item in sublist]) # this selects a random move direc = strat.move(grid) # Actually make the move locals()[direc]() scoretext = browser.find_by_xpath("//div[@class='score-container']")[0] score = int(scoretext.text.split()[0])
class UserTestCase(LiveServerTestCase): """ A set of tests for testing Users, Accounts and UserProfiles. Also, we check the patterns for the required fields for signup, signin, the menu of user details, the change password view and the change email view. """ def setUp(self): self.browser = Browser() def tearDown(self): self.browser.quit() def test_user_signup(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') self.assertEqual(self.browser.find_by_css('.body-inside').first.value, 'Thank you for signing up with us!\nYou can now use the supplied credentials to signin.') self.assertEqual(self.browser.title, 'SylvaDB - Signup almost done!') def test_user_singup_empty_email(self): self.browser.visit(self.live_server_url + '/accounts/signup/') self.browser.find_by_name('username').fill('bob') self.browser.find_by_name('email').fill('') self.browser.find_by_name('password1').fill('bob_secret') self.browser.find_by_name('password2').fill('bob_secret') self.browser.find_by_value('Signup').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') def test_user_singup_bad_email(self): self.browser.visit(self.live_server_url + '/accounts/signup/') self.browser.find_by_name('username').fill('bob') self.browser.find_by_name('email').fill('bobcultureplex.ca') self.browser.find_by_name('password1').fill('bob_secret') self.browser.find_by_name('password2').fill('bob_secret') self.browser.find_by_value('Signup').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Enter a valid e-mail address.') def test_user_singup_empty_name(self): self.browser.visit(self.live_server_url + '/accounts/signup/') self.browser.find_by_name('username').fill('') self.browser.find_by_name('email').fill('*****@*****.**') self.browser.find_by_name('password1').fill('bob_secret') self.browser.find_by_name('password2').fill('bob_secret') self.browser.find_by_value('Signup').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') def test_user_singup_empty_password(self): self.browser.visit(self.live_server_url + '/accounts/signup/') self.browser.find_by_name('username').fill('bob') self.browser.find_by_name('email').fill('*****@*****.**') self.browser.find_by_name('password1').fill('') self.browser.find_by_name('password2').fill('bob_secret') self.browser.find_by_value('Signup').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') def test_user_singup_password_unmatched(self): self.browser.visit(self.live_server_url + '/accounts/signup/') self.browser.find_by_name('username').fill('bob') self.browser.find_by_name('email').fill('*****@*****.**') self.browser.find_by_name('password1').fill('bob_secret') self.browser.find_by_name('password2').fill('bob_password') self.browser.find_by_value('Signup').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'The two password fields didn\'t match.') def test_user_signin(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') logout(self) def test_user_signin_empty_user(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') self.browser.visit(self.live_server_url + '/accounts/signin/') self.browser.find_by_name('identification').fill('') self.browser.find_by_name('password').fill('bob_secret') self.browser.find_by_xpath( "//div[@id='body']/div/form/input").first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Either supply us with your email or username.') def test_user_signin_bad_user(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') self.browser.visit(self.live_server_url + '/accounts/signin/') self.browser.find_by_name('identification').fill('alice') self.browser.find_by_name('password').fill('bob_secret') self.browser.find_by_xpath( "//div[@id='body']/div/form/input").first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Please enter a correct username or email and password. Note that both fields are case-sensitive.') def test_user_signin_empty_password(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') self.browser.visit(self.live_server_url + '/accounts/signin/') self.browser.find_by_name('identification').fill('bob') self.browser.find_by_name('password').fill('') self.browser.find_by_xpath( "//div[@id='body']/div/form/input").first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') def test_user_signin_bad_password(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') self.browser.visit(self.live_server_url + '/accounts/signin/') self.browser.find_by_name('identification').fill('bob') self.browser.find_by_name('password').fill('alice_secret') self.browser.find_by_xpath( "//div[@id='body']/div/form/input").first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Please enter a correct username or email and password. Note that both fields are case-sensitive.') def test_user_logout(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') logout(self) self.assertEqual(self.browser.title, 'SylvaDB - Signed out') self.assertEqual(self.browser.find_by_css('.body-inside').first.value, 'You have been signed out. Till we meet again.') def test_user_details(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/edit/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Account setup') self.browser.find_by_name('first_name').fill('Bob') self.browser.find_by_name('last_name').fill('Doe') self.browser.attach_file('mugshot', 'http://www.gravatar.com/avatar/3d4bcca5d9c3a56a0282f308f9acda07?s=90') self.browser.select('language', 'en') self.browser.select('gender', '1') self.browser.find_by_name('website').fill('http://www.bobweb.com') self.browser.find_by_name('location').fill('London, Ontario') self.browser.find_by_name('birth_date').fill('01/01/1975') self.browser.find_by_name('about_me').fill('I am a very nice guy') self.browser.find_by_name('institution').fill('University') self.browser.find_by_name('company').fill('CulturePlex') self.browser.find_by_name('lab').fill('CulturePlex') self.browser.find_by_value('Save changes').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') logout(self) def test_user_details_bad_website(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/edit/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Account setup') self.browser.find_by_name('first_name').fill('Bob') self.browser.find_by_name('last_name').fill('Doe') self.browser.attach_file('mugshot', 'http://www.gravatar.com/avatar/3d4bcca5d9c3a56a0282f308f9acda07?s=90') self.browser.select('language', 'en') self.browser.select('gender', '1') self.browser.find_by_name('website').fill('bobweb') self.browser.find_by_name('location').fill('London, Ontario') self.browser.find_by_name('birth_date').fill('01/01/1975') self.browser.find_by_name('about_me').fill('I am a very nice guy') self.browser.find_by_name('institution').fill('University') self.browser.find_by_name('company').fill('CulturePlex') self.browser.find_by_name('lab').fill('CulturePlex') self.browser.find_by_value('Save changes').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Enter a valid URL.') logout(self) def test_user_details_bad_birthdate(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/edit/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Account setup') self.browser.find_by_name('first_name').fill('Bob') self.browser.find_by_name('last_name').fill('Doe') self.browser.attach_file('mugshot', 'http://www.gravatar.com/avatar/3d4bcca5d9c3a56a0282f308f9acda07?s=90') self.browser.select('language', 'en') self.browser.select('gender', '1') self.browser.find_by_name('website').fill('http://www.bobweb.com') self.browser.find_by_name('location').fill('London, Ontario') self.browser.find_by_name('birth_date').fill('birthdate') self.browser.find_by_name('about_me').fill('I am a very nice guy') self.browser.find_by_name('institution').fill('University') self.browser.find_by_name('company').fill('CulturePlex') self.browser.find_by_name('lab').fill('CulturePlex') self.browser.find_by_value('Save changes').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Enter a valid date.') logout(self) def test_user_details_future_birthdate(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/edit/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Account setup') self.browser.find_by_name('first_name').fill('Bob') self.browser.find_by_name('last_name').fill('Doe') self.browser.attach_file('mugshot', 'http://www.gravatar.com/avatar/3d4bcca5d9c3a56a0282f308f9acda07?s=90') self.browser.select('language', 'en') self.browser.select('gender', '1') self.browser.find_by_name('website').fill('http://www.bobweb.com') self.browser.find_by_name('location').fill('London, Ontario') self.browser.find_by_name('birth_date').fill('2015-01-11') self.browser.find_by_name('about_me').fill('I am a very nice guy') self.browser.find_by_name('institution').fill('University') self.browser.find_by_name('company').fill('CulturePlex') self.browser.find_by_name('lab').fill('CulturePlex') self.browser.find_by_value('Save changes').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'You need to introduce a past date.') logout(self) def test_user_change_pass(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/password/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Change password') self.browser.find_by_name('old_password').fill('bob_secret') self.browser.find_by_name('new_password1').fill('bob_password') self.browser.find_by_name('new_password2').fill('bob_password') self.browser.find_by_value('Change password').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Password changed') logout(self) def test_user_change_pass_empty(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/password/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Change password') self.browser.find_by_name('old_password').fill('') self.browser.find_by_name('new_password1').fill('bob_password') self.browser.find_by_name('new_password2').fill('bob_password') self.browser.find_by_value('Change password').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') logout(self) def test_user_change_pass_incorrectly(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/password/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Change password') self.browser.find_by_name('old_password').fill('bad_password') self.browser.find_by_name('new_password1').fill('bob_password') self.browser.find_by_name('new_password2').fill('bob_password') self.browser.find_by_value('Change password').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Your old password was entered incorrectly. Please enter it again.') logout(self) def test_user_change_new_pass_empty(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/password/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Change password') self.browser.find_by_name('old_password').fill('bob_secret') self.browser.find_by_name('new_password1').fill('') self.browser.find_by_name('new_password2').fill('bob_password') self.browser.find_by_value('Change password').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') logout(self) def test_user_change_new_pass_unmatched(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/password/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Change password') self.browser.find_by_name('old_password').fill('bob_secret') self.browser.find_by_name('new_password1').fill('bob_password') self.browser.find_by_name('new_password2').fill('alice_password') self.browser.find_by_value('Change password').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'The two password fields didn\'t match.') logout(self) def test_user_change_mail(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/email/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Welcome to The Sylva Project') self.browser.find_by_name('email').fill('*****@*****.**') self.browser.find_by_value('Change email').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Email verification') def test_user_change_mail_empty(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/email/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Welcome to The Sylva Project') self.browser.find_by_name('email').fill('') self.browser.find_by_value('Change email').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') def test_user_change_bad(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/email/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Welcome to The Sylva Project') self.browser.find_by_name('email').fill('bobnewcultureplex.ca') self.browser.find_by_value('Change email').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Enter a valid e-mail address.')
from splinter import Browser //splinter is the module used for manipulating the browser import json //json for encoding the data into the file data=dict() // dictionaries save rollnumber and gpa of the student browser = Browser('firefox') //firefox for life browser.visit('http://www.nitt.edu/prm/ShowResult.htm') //our results page roll=110112000 //starting roll number -1 f=open('gpasem4','w') // opens the file that has to be written into while(roll<110112100): #browser.type('TextBox1',roll) roll=roll+1 with browser.get_iframe('main') as iframe: // the page is being divided into frames . so select the appropriate frame try : iframe.fill('TextBox1',roll) iframe.find_by_name('Button1').click() browser.find_by_xpath('//select[@id="Dt1"]/option[@value="96"]')._element.click() name=iframe.find_by_id('LblName') gpa=iframe.find_by_id('LblGPA') data.update({roll:gpa.value}) //appends the new data to our dictionary print roll, "done" // success pass except : print roll, "skipped" // missing rollnos are skipped pass json.dump(data,f) //store the dictionary in our file in hdd f.close() // very imp . close the file after use.
def make(b,c): browser=Browser('chrome') url='http://admin2.okzaijia.com.cn/Account/login' browser.visit(url) browser.find_by_id('UserName').fill('Tina') browser.find_by_id('Password').fill('13916099416') browser.find_by_id('LoginOn').click() browser.find_by_xpath('/html/body/div[1]/div[1]/div/div[2]/div/div/ul/li/a').click() if b==1: browser.find_link_by_text(u'新增订单').click() browser.windows.current=browser.windows[1] #print browser.windows.current textnew=browser.find_by_name('RepairContent') textnew.fill(random.randint(10000,19999)) a=''.join([chr(random.randint(97,122)) for _ in range(4)]) browser.find_by_id('UserName').fill(a) browser.find_by_id('UserMobile').fill(random.randint(15138460867,19000000000)) browser.select('Source',random.randint(1,10)) browser.select('AreaId',random.randint(801,819)) browser.find_by_id('UserAddress').fill(random.randint(3000,9999)) browser.find_by_xpath('//*[@id="submit"]').click() time.sleep(2) else: browser.find_by_name('orderno').fill(c) browser.find_by_xpath('//*[@id="searchForm"]/div[7]/button').click() browser.find_by_text(u'维修记录').click() browser.find_by_xpath("/html/body/div[1]/div[1]/div/div[2]/div[1]/a").click() browser.windows.current=browser.windows[1] b=''.join([chr(random.randint(97,122)) for _ in range(5)]) browser.find_by_name('RepairContent').fill(b) browser.find_by_name('Remark').fill(random.randint(20000,29999)) browser.find_by_id('submit').click() time.sleep(3) browser.visit('http://admin2.okzaijia.com.cn/Task/MyTask?TaskType=4&Status=1') browser.windows.current=browser.windows[1] #print browser.windows.current browser.find_by_xpath('//*[@id="searchForm"]/div[3]/button').click() browser.find_by_xpath('//*[@id="pages"]/div/a[7]').click() browser.find_by_text(u'执行任务').last.click() time.sleep(2) browser.windows.current=browser.windows[2] browser.find_by_value('37').click()#选择接单的施工组 #print browser.find_by_value('17').text browser.find_by_id('submit').click()
cert = {"_id":count, "tcm_id":userid,"type_reg":type_reg, "reg_date":reg_date, "reg_type":reg_type, "prac_cert_start":prac_cert_start, "prac_cert_end":prac_cert_end} tcm_certs.insert_one(cert) #insert working place elif(sections[i].text.find(title_place)!=-1): principle_place = sections[i].text place_name = browser.find_by_xpath('//table/tbody/tr[1]/td[2]')[i+1].text place_address = browser.find_by_xpath('//table/tbody/tr[2]/td[2]')[i+1].text place_tele = browser.find_by_xpath('//table/tbody/tr[3]/td[2]')[i].text count = tcm_work.count() + 1 work = {"_id":count, "tcm_id":userid, "principle_place":principle_place, "place_name":place_name,"place_address":place_address,"place_tele":place_tele} tcm_work.insert_one(work) for i in range(250,303): browser.execute_script("gotoPageDEFAULT(%d)" % int(i+1)) browser.find_by_xpath('//*[@id="searchResultHead"]/table[1]/tbody/tr[1]/td/table/tbody/tr[2]/td/a').click() getProfDetail(int(i*10+1)) browser.execute_script("backToSearchResults()") browser.execute_script("gotoPageDEFAULT(%d)" % int(i+1)) browser.find_by_xpath('//*[@id="searchResultHead"]/table[1]/tbody/tr[2]/td/table/tbody/tr[2]/td/a').click() getProfDetail(int(i*10+2)) browser.execute_script("backToSearchResults()") browser.execute_script("gotoPageDEFAULT(%d)" % int(i+1)) browser.find_by_xpath('//*[@id="searchResultHead"]/table[1]/tbody/tr[3]/td/table/tbody/tr[2]/td/a').click() getProfDetail(int(i*10+3)) browser.execute_script("backToSearchResults()") browser.execute_script("gotoPageDEFAULT(%d)" % int(i+1)) browser.find_by_xpath('//*[@id="searchResultHead"]/table[1]/tbody/tr[4]/td/table/tbody/tr[2]/td/a').click() getProfDetail(int(i*10+4)) browser.execute_script("backToSearchResults()") browser.execute_script("gotoPageDEFAULT(%d)" % int(i+1))
if not path.exists(report_folder): makedirs(report_folder) fh = io.open("reports\\" + fieldValues[0].lower() + ".xls", "w", encoding='utf8') fh.write(u"URL\tTitle\tAuthor\tUser Name\tCategory\tLikes\tDislikes\tDuration\tPublished\tRating\tView Count\tFile Name\n") chrome = Browser("chrome") #print "visit 'chrome://settings/content' to disable images" sleep(5) links = [] page_no = 1 while page_no <= int(pages): # change page no here search_url = "https://www.youtube.com/results?search_query=" + "+".join(fieldValues[0].split()) + "&filters=video&page=" + str(page_no) chrome.visit(search_url) sleep(2) for a in chrome.find_by_xpath("//a[@class='yt-uix-sessionlink yt-uix-tile-link yt-ui-ellipsis yt-ui-ellipsis-2 spf-link ']"): links.append(a["href"]) page_no += 1 chrome.quit() # flag = False for url in links: try: video = pafy.new(url) if video.length <= 120: title = video.title print title # if title == "rexona men in philippines": flag = True # if flag: author = video.author username = video.username
class QueryTestCase(LiveServerTestCase): """ A set of tests for testing queries. """ def setUp(self): self.browser = Browser() socket.setdefaulttimeout(30) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') def tearDown(self): logout(self) self.browser.quit() @classmethod def tearDownClass(cls): sleep(10) # It needs some time for close the LiverServerTestCase super(QueryTestCase, cls).tearDownClass() def test_query_list_view(self): create_graph(self) create_schema(self) create_type(self) self.browser.find_by_id('queriesMenu').first.click() button_text = self.browser.find_by_id('create-query').first.value spin_assert(lambda: self.assertEqual(button_text, "New Query")) def test_query_builder_view(self): create_graph(self) create_schema(self) create_type(self) self.browser.find_by_id('queriesMenu').first.click() new_query_button = self.browser.find_by_id('create-query').first button_text = new_query_button.value spin_assert(lambda: self.assertEqual(button_text, "New Query")) new_query_button.click() diagram_title = self.browser.find_by_id('diagramTitle').first.value spin_assert(lambda: self.assertEqual(diagram_title, "Diagram")) node_type_text = self.browser.find_by_xpath( "//table[@id='node-types']/tbody/tr/td/a").first.value # This node type name is the name that we use in the create_type method spin_assert(lambda: self.assertEqual(node_type_text, "Bob's type")) def test_query_builder_add_box(self): create_graph(self) create_schema(self) create_type(self) self.browser.find_by_id('queriesMenu').first.click() new_query_button = self.browser.find_by_id('create-query').first button_text = new_query_button.value spin_assert(lambda: self.assertEqual(button_text, "New Query")) new_query_button.click() diagram_title = self.browser.find_by_id('diagramTitle').first.value spin_assert(lambda: self.assertEqual(diagram_title, "Diagram")) node_type = self.browser.find_by_xpath( "//table[@id='node-types']/tbody/tr/td/a").first node_type_text = node_type.value # This node type name is the name that we use in the create_type method spin_assert(lambda: self.assertEqual(node_type_text, "Bob's type")) # We check if the node type is the same node_type.click() title_text = self.browser.find_by_xpath( "//select[@class='select-nodetype-bobs-type']").first.value spin_assert(lambda: self.assertEqual(title_text, u"Bob's type 1"))