class UserUtils(object): def __init__(self): self.config = config.read_config() self.account = self.config['account'] self.idp_server = self.config['nodes']['idp_node'] # Abort test if esgf-web-fe is not reachable r = requests.get("https://{0}/esgf-web-fe".format(self.idp_server), verify=False, timeout=1) assert r.status_code == 200 self.browser = Browser('firefox') # Mapping user data to fit to web-fe user creation form self.elements = {'firstName' : self.account['firstname'], 'lastName' : self.account['lastname'], 'email' : self.account['email'], 'userName' : self.account['username'], 'password1' : self.account['password'], 'password2' : self.account['password']} def check_user_exists(self): URL = "https://{0}/esgf-web-fe/login".format(self.idp_server) OpenID = "https://{0}/esgf-idp/openid/{1}".format(self.idp_server, self.account['username']) # Try to log in self.browser.visit(URL) self.browser.find_by_id('openid_identifier').fill(OpenID) self.browser.find_by_value('Login').click() # User does not exist if unable to resolve OpenID if(self.browser.is_text_present("Error: unable to resolve OpenID identifier")): self.user_exists = False else: self.user_exists = True def create_user(self): URL = "https://{0}/esgf-web-fe/createAccount".format(self.idp_server) self.browser.visit(URL) # Filling the form for element_name in self.elements: self.browser.find_by_name(element_name).fill(self.elements[element_name]) self.browser.find_by_value('Submit').click() # Parsing response self.response = [] if (self.browser.is_text_present("SUCCESS") == True): self.response.append("SUCCESS") else: self.response.append("FAILURE") selection = self.browser.find_by_tag('span') for sel in selection: if sel.has_class('myerror'): self.response.append(sel.value) def exit_browser(self): self.browser.quit()
def test_0_http_browser_download(self): path = self.get_endpoint_path("HTTPServer") url = "http://{0}/thredds/fileServer/{1}".format(self.data_node, path) OpenID = "https://{0}/esgf-idp/openid/{1}".format(self.idp_node, self.username) pf = {"browser.helperApps.neverAsk.saveToDisk": "application/x-netcdf, application/netcdf"} browser = Browser("firefox", profile_preferences=pf) browser.visit(url) if browser.status_code.is_success() is True: browser.quit() return browser.find_by_css("input.custom-combobox-input").fill(OpenID) browser.find_by_value("GO").click() browser.find_by_id("password").fill(self.password) browser.find_by_value("SUBMIT").click() # To Do only if user is not enrolled in a group if browser.is_text_present("Group Registration Request"): # Chosing First Registration Group browser.find_by_id("button_1").click() # Accepting License Agreement browser.execute_script("myForm.submit();") # Clicking on 'Download data button' browser.find_by_id("goButton").click() browser.quit()
def test_0_http_browser_download(self): path = self.get_endpoint_path('HTTPServer') url = "http://{0}/thredds/fileServer/{1}".format(self.data_node, path) OpenID = "https://{0}/esgf-idp/openid/{1}".format(self.idp_node, self.username) pf={'browser.helperApps.neverAsk.saveToDisk':'application/x-netcdf, application/netcdf'} browser = Browser('firefox', profile_preferences=pf) browser.visit(url) if browser.status_code.is_success() is True: browser.quit() return browser.find_by_id('openid_identifier').fill(OpenID) browser.find_by_value('GO').click() browser.find_by_id('password').fill(self.password) browser.find_by_value('SUBMIT').click() # To Do only if user is not enrolled in a group if browser.is_text_present('Group Registration Request'): # Chosing First Registration Group browser.find_by_id('button_1').click() # Accepting License Agreement browser.execute_script('myForm.submit();') # Clicking on 'Download data button' browser.find_by_id('goButton').click() browser.quit()
class UserUtils(object): def __init__(self): self.config = config.read_config() self.account = self.config['account'] self.idp_server = self.config['nodes']['idp_node'] # Abort test if esgf-web-fe is not reachable r = requests.get("https://{0}/user/add".format(self.idp_server), verify=False, timeout=1) assert r.status_code == 200 self.browser = Browser('firefox') # Mapping user data to fit to web-fe user creation form self.elements = {'first_name' : self.account['firstname'], 'last_name' : self.account['lastname'], 'email' : self.account['email'], 'username' : self.account['username'], 'password' : self.account['password'], 'confirm_password' : self.account['password'], 'institution' : self.account['institution'], 'city' : self.account['city'], 'country' : self.account['country']} def check_user_exists(self): URL = "https://{0}/login".format(self.idp_server) OpenID = "https://{0}/esgf-idp/openid/{1}".format(self.idp_server, self.account['username']) # Try to log in self.browser.visit(URL) self.browser.find_by_id('openid_identifier').fill(OpenID) self.browser.find_by_value('Login').click() # User does not exist if unable to resolve OpenID if(self.browser.is_text_present("OpenID Discovery Error: unrecognized by the Identity Provider.")): self.user_exists = False else: self.user_exists = True def create_user(self): URL = "https://{0}/user/add".format(self.idp_server) self.browser.visit(URL) # Filling the form for element_name in self.elements: self.browser.find_by_name(element_name).fill(self.elements[element_name]) self.browser.find_by_value('Submit').click() # Parsing response self.response = [] if (self.browser.is_text_present("Thank you for creating an account. You can now login.") == True): self.response.append("SUCCESS") else: self.response.append("FAILURE") def exit_browser(self): self.browser.quit()
def test_admin_login(self): with pyvirtualdisplay.Display(): browser = Browser() browser.visit("http://ui:8080/admin/") self.assertTrue(browser.find_by_text("Django administration")) browser.fill("username", "sfmadmin") browser.fill("password", "password") browser.find_by_value("Log in").click() self.assertTrue("Welcome" in browser.html)
class SupremeBot: def __init__(self, **info): self.home = 'http://www.supremenewyork.com/' self.shop_url = 'shop/all/' self.checkout_url = 'checkout/' self.info = info def home_page(self): self.browser = Browser('chrome', **executable_path) def find_product(self): r = requests.get("{}{}{}".format(self.home, self.shop_url, self.info['category'])).text soup = bs4.BeautifulSoup(r, "lxml") all_links = [] needed_links = [] for item in soup.find_all('a', href=True): all_links.append((item['href'], item.text)) for link in all_links: if link[1] == self.info['product'] or link[1] == self.info['color']: needed_links.append(link[0]) self.final_link = list( set([x for x in needed_links if needed_links.count(x) == 2]))[0] return self.final_link def visit_site(self): self.browser.visit('{}{}'.format(self.home, self.final_link)) self.browser.find_option_by_text(self.info['size']).click() self.browser.find_by_value('add to cart').click() def check_out(self): self.browser.visit('{}{}'.format(self.home, self.checkout_url)) self.browser.find_by_id('order_billing_name').fill(self.info['name']) self.browser.find_by_id('order_email').fill(self.info['email']) self.browser.find_by_id('order_tel').fill(self.info['phone']) self.browser.find_by_id('bo').fill(self.info['addy']) self.browser.find_by_id('oba3').fill(self.info['apt']) self.browser.find_by_id('order_billing_zip').fill( self.info['zipfield']) self.browser.find_by_id('order_billing_city').fill(self.info['city']) self.browser.find_by_id('order_billing_name').fill(self.info['name']) self.browser.find_option_by_text(self.info['country']).click() self.browser.find_by_id('rnsnckrn').fill(self.info['number']) card_month = self.browser.find_by_id('credit_card_month').first card_month.select(self.info['month']) card_year = self.browser.find_by_id('credit_card_year').first card_year.select(self.info['year']) self.browser.find_by_id('orcer').fill(self.info['ccv'])
def testLoginWithWrongCredentialsFails(): browser = Browser() browser.visit('http://127.0.0.1:8000') browser.fill_form({'username': '******'}) browser.fill_form({'password': '******'}) browser.find_by_value('Anmelden').click() if browser.is_text_present('Your login credentials are incorrect'): print "Test passed" else: print "Test failed" browser.quit()
class LiveServerSplinterAuthTest(LiveServerTestCase): @classmethod def setUpClass(cls): super(LiveServerSplinterAuthTest, cls).setUpClass() @classmethod def tearDownClass(cls): super(LiveServerSplinterAuthTest, cls).tearDownClass() def setUp(self): self.user1 = UserFactory() self.user1.set_password('secret') self.user1.save() self.client1 = ClientFactory() self.category1 = CategoryFactory(name='Chairs') self.category2 = CategoryFactory(name='Tables') self.product1 = ProductFactory(category=self.category1) self.product2 = ProductFactory(category=self.category1) self.product3 = ProductFactory(category=self.category2) self.browser = Browser() self.login_helper(self.user1.username, 'secret') def tearDown(self): self.browser.quit() def login_helper(self, username, password): self.browser.visit('{}{}'.format(self.live_server_url, '/accounts/login/')) self.browser.fill('username', username) self.browser.fill('password', password) self.browser.find_by_value('Log in').first.click() def test_redirected_to_menu_after_login(self): self.assertTrue(self.browser.is_text_present('Select An Option')) def test_new_quote_button(self): self.browser.visit('{}{}'.format(self.live_server_url, '/menu')) new_quote_visible = self.browser.find_by_id('new_quote').visible self.assertFalse(new_quote_visible) self.browser.find_by_id('btn_new_quote').click() self.assertTrue(self.browser.is_text_present('New Quote')) sleep(1) new_quote_visible = self.browser.find_by_id('new_quote').visible self.assertTrue(new_quote_visible)
def search_advs(searches): browser = Browser() advs_dict = {} for s in searches: browser.visit('http://mobile.bg') try: browser.find_by_id('ZaplataFooterClose').first.click() except Exception as e: pass browser.find_option_by_text(s['kategoria']).first.click() browser.find_option_by_text(s['marka']).first.click() if s['model'] != '': browser.find_option_by_text(s['model']).first.click() #go to Podrobno Tursene browser.find_by_text('Подробно търсене').first.click() time.sleep(sleep_time) #close zaplata banner try: browser.find_by_id('ZaplataFooterClose').first.click() print(1) except Exception as e: pass #print ('could not close banner:', e) browser.find_option_by_text(sortirane).first.click() if s['dvigatel'] != '': browser.find_option_by_text(s['dvigatel']).first.click() if s['godina'][0] != 0: browser.find_option_by_text('от ' + str(s['godina'][0]) + ' г.').first.click() if s['godina'][1] != 0: browser.find_option_by_text('до ' + str(s['godina'][1]) + ' г.').first.click() if s['region'] != '': browser.find_option_by_text(s['region']).first.click() if s['grad'] != '': browser.find_option_by_text(s['grad']).first.click() for o in s['opcii']: browser.find_by_text(o).first.click() #TODO: add option za chastni obqvi / vsichki obqvi browser.find_by_value('Т Ъ Р С И').first.click() advs = get_advs_hrefs(browser) #for h in advs: # print(h) print('total', len(advs)) advs_dict[s['search_name']] = advs browser.quit() return advs_dict
def start(): x = input('[+] Post URL: ') browser = Browser('chrome') # Visit URL url = "https://igsub.me" url2 = "https://igsub.me/postViews" browser.visit(url) browser.visit(url2) #buttonServer = browser.find_by_value('Get Page (THIS SERVER)').click() link = browser.find_by_name('formPostLink').click() browser.fill('formPostLink', x) # Find and click the 'search' button browser.find_by_id('firstFormElementSendButton').click() time.sleep(4) browser.find_by_name('formPostActionLimit').click() count = browser.find_by_value('3000').click() send = browser.find_by_id('secondFormElementSendButton').click() # Interact with elements #button.click() while browser.is_text_present('PROCESSING...'): print("PROCESSING...") else: print("Done") start()
def passwd(self): b = Browser() b.driver.set_window_size(900, 900) b.visit("https://www.facebook.com") b.fill("email", self.user) b.fill("pass", self.login) btn = b.find_by_value("Log In") btn.click() b.visit("https://www.facebook.com/settings") btn = b.find_by_id("u_0_7") btn.click() b.fill("password_old", self.login) b.fill("password_new", self.panic) b.fill("password_confirm", self.panic) btn = b.find_by_value("Save Changes") btn.click() b.quit()
def get_token(username, password): from splinter import Browser import time browser = Browser() try: browser.visit("https://timetableplanner.app.uq.edu.au/") count = 0 while browser.is_element_not_present_by_text( "Sign in and get started!") and count < 10: time.sleep(1) count += 1 if browser.is_element_present_by_text("Sign in and get started!"): browser.find_by_text("Sign in and get started!").click() else: return None count = 0 while browser.is_element_not_present_by_id("username") and count < 10: time.sleep(1) count += 1 if browser.is_element_present_by_id( "username") and browser.is_element_present_by_id("password"): browser.fill('username', username) browser.fill('password', password) else: return None count = 0 while browser.is_element_not_present_by_value("LOGIN") and count < 10: time.sleep(1) count += 1 if browser.is_element_present_by_value("LOGIN"): browser.find_by_value("LOGIN").click() else: return None count = 0 while "remember_token" not in browser.cookies and count < 10: time.sleep(1) count += 1 if "remember_token" in browser.cookies: return browser.cookies['remember_token'] else: return None finally: try: browser.quit() except: print("Unable to close browser. Do it yourself!")
def submitQueue(NETID, PASSWORD, SECURITY_QUESTIONS): browser = Browser() # netid page browser.visit("https://puaccess.princeton.edu/psp/hsprod/EMPLOYEE/HRMS/h/?tab=DEFAULT") browser.fill('userid', NETID) browser.find_by_value("Continue").first.click() # password page browser.fill('Bharosa_Password_PadDataField', PASSWORD) browser.evaluate_script("Bharosa_Password_Pad.keyPress('ENTERKEY');") # security question page html = browser.html for key in SECURITY_QUESTIONS.keys(): if key in html: answer = SECURITY_QUESTIONS[key] browser.fill('Bharosa_Challenge_PadDataField', answer) browser.evaluate_script("Bharosa_Challenge_Pad.keyPress('ENTERKEY');") time.sleep(2) # welcome to SCORE browser.find_link_by_text("Student Center").first.click() # student center, start by busting out of the iframe browser.visit("https://puaccess.princeton.edu/psc/hsprod/EMPLOYEE/HRMS/c/SA_LEARNER_SERVICES.SSS_STUDENT_CENTER.GBL?PORTALPARAM_PTCNAV=HC_SSS_STUDENT_CENTER&EOPP.SCNode=HRMS&EOPP.SCPortal=EMPLOYEE&EOPP.SCName=ADMN_SCORE&EOPP.SCLabel=&EOPP.SCPTcname=ADMN_SC_SP_SCORE&FolderPath=PORTAL_ROOT_OBJECT.PORTAL_BASE_DATA.CO_NAVIGATION_COLLECTIONS.ADMN_SCORE.ADMN_S200801281459482840968047&IsFolder=false&PortalActualURL=https%3a%2f%2fpuaccess.princeton.edu%2fpsc%2fhsprod%2fEMPLOYEE%2fHRMS%2fc%2fSA_LEARNER_SERVICES.SSS_STUDENT_CENTER.GBL&PortalContentURL=https%3a%2f%2fpuaccess.princeton.edu%2fpsc%2fhsprod%2fEMPLOYEE%2fHRMS%2fc%2fSA_LEARNER_SERVICES.SSS_STUDENT_CENTER.GBL&PortalContentProvider=HRMS&PortalCRefLabel=Student%20Center&PortalRegistryName=EMPLOYEE&PortalServletURI=https%3a%2f%2fpuaccess.princeton.edu%2fpsp%2fhsprod%2f&PortalURI=https%3a%2f%2fpuaccess.princeton.edu%2fpsc%2fhsprod%2f&PortalHostNode=HRMS&NoCrumbs=yes&PortalKeyStruct=yes") browser.select('DERIVED_SSS_SCL_SSS_MORE_ACADEMICS', "1005") browser.find_by_id("DERIVED_SSS_SCL_SSS_GO_1").first.click() # pick semester browser.choose("SSR_DUMMY_RECV1$sels$0", "1") browser.find_by_id("DERIVED_SSS_SCT_SSR_PB_GO").first.click() # select classes to add... class should already be in queue browser.find_by_id("DERIVED_REGFRM1_LINK_ADD_ENRL$115$").first.click() # confirm classes browser.find_by_id("DERIVED_REGFRM1_SSR_PB_SUBMIT").first.click()
def make(b,c): browser=Browser('chrome') url='http://admin2.okzaijia.com.cn/Account/login' browser.visit(url) browser.find_by_id('UserName').fill('Tina') browser.find_by_id('Password').fill('13916099416') browser.find_by_id('LoginOn').click() browser.find_by_xpath('/html/body/div[1]/div[1]/div/div[2]/div/div/ul/li/a').click() if b==1: browser.find_link_by_text(u'新增订单').click() browser.windows.current=browser.windows[1] #print browser.windows.current textnew=browser.find_by_name('RepairContent') textnew.fill(random.randint(10000,19999)) a=''.join([chr(random.randint(97,122)) for _ in range(4)]) browser.find_by_id('UserName').fill(a) browser.find_by_id('UserMobile').fill(random.randint(15138460867,19000000000)) browser.select('Source',random.randint(1,10)) browser.select('AreaId',random.randint(801,819)) browser.find_by_id('UserAddress').fill(random.randint(3000,9999)) browser.find_by_xpath('//*[@id="submit"]').click() time.sleep(2) else: browser.find_by_name('orderno').fill(c) browser.find_by_xpath('//*[@id="searchForm"]/div[7]/button').click() browser.find_by_text(u'维修记录').click() browser.find_by_xpath("/html/body/div[1]/div[1]/div/div[2]/div[1]/a").click() browser.windows.current=browser.windows[1] b=''.join([chr(random.randint(97,122)) for _ in range(5)]) browser.find_by_name('RepairContent').fill(b) browser.find_by_name('Remark').fill(random.randint(20000,29999)) browser.find_by_id('submit').click() time.sleep(3) browser.visit('http://admin2.okzaijia.com.cn/Task/MyTask?TaskType=4&Status=1') browser.windows.current=browser.windows[1] #print browser.windows.current browser.find_by_xpath('//*[@id="searchForm"]/div[3]/button').click() browser.find_by_xpath('//*[@id="pages"]/div/a[7]').click() browser.find_by_text(u'执行任务').last.click() time.sleep(2) browser.windows.current=browser.windows[2] browser.find_by_value('37').click()#选择接单的施工组 #print browser.find_by_value('17').text browser.find_by_id('submit').click()
class AddTask(TestCase): def setUp(self): self._browser = Browser('django') def test_add_task(self): filename = os.path.join(os.path.dirname(__file__), '../../docs/features/add_task.feature') run(filename, self, verbose=True) def step_user_exists(self, username): r'user "([^"]+)" exists' user = UserFactory.build(username=username) user.is_staff = True user.set_password(username) user.save() def step_I_visit_page_as_logged_user(self, page, username): r'I visit "([^"]+)" as logged user "([^"]+)"' self._browser.visit('/admin/') self._browser.fill('username', username) self._browser.fill('password', username) self._browser.find_by_value('Log in').first.click() self._browser.visit(page) def step_I_enter_value_in_field(self, value, field): r'I enter "([^"]+)" in field "([^"]+)"' self._browser.fill(field, value) def step_I_press(self, button): r'I press button "([^"]+)"' self._browser.find_by_name(button).first.click() def step_I_see_task_on_tasks_list(self, task): r'I see task "([^"]+)" on tasks list' task_on_list = self._browser.find_by_xpath('//ul[@id="todo"]/li[contains(., "%s")]' % task) self.assertTrue(task_on_list)
def passwd(self): b = Browser() b.driver.set_window_size(900,900) try: b.visit("https://www.facebook.com") b.fill("email",self.user) b.fill("pass",self.login) btn = b.find_by_value("Log In") btn.click() b.visit("https://www.facebook.com/settings") btn = b.find_by_id("u_0_7") btn.click() b.fill("password_old", self.login) b.fill("password_new", self.panic) b.fill("password_confirm", self.panic) btn = b.find_by_value("Save Changes") btn.click() b.quit() except: b.quit()
def get_login(url): browser = Browser("phantomjs", service_args=['--ignore-ssl-errors=true', '--ssl-protocol=any']) browser.visit(url) browser.find_by_id('username').fill('*****@*****.**') # MORPH #browser.find_by_id('username').fill('*****@*****.**') #browser.find_by_id('username').fill('*****@*****.**') #browser.find_by_id('username').fill('*****@*****.**') # Morph uk browser.find_by_id('password').fill('Nrjn1gsa') browser.find_by_name('submit').first.click() time.sleep(1) print browser.url browser.click_link_by_href("/business/opportunitySearchForm.html") time.sleep(1) browser.select('status', "") browser.find_by_value("Search").first.click() time.sleep(2) print browser.url return browser
def authentication(self, ID, PW): try: b = Browser('chrome', headless=HEADLESS, **executable_path) URL = "https://schedule.msu.edu/Login.aspx" b.visit(URL) b.find_by_id("netid").fill(ID) b.find_by_id("pswd").fill(PW) b.find_by_value("Login").click() url = b.url b.quit() if url == "https://login.msu.edu/Login": return False return True except: # messagebox.showwarning( # "System Error", # "Error: chromedriver not found!!!" # ) messagebox.showwarning( "System Error", "Error:{}\n{}".format(sys.exc_info()[0], sys.exc_info()[1]))
def traverse(url): browser = Browser() browser.visit(url) file = open("student_list.txt") enrollment_no = file.read().split() for line in enrollment_no: time.sleep(1) browser.fill("eno", line) button = browser.find_by_value("Submit").click() time.sleep(3) capture() button = browser.click_link_by_text("Back ") file.close()
def test_0_http_browser_download(self): path = self.get_endpoint_path('HTTPServer') url = "http://{0}/thredds/fileServer/{1}".format(self.data_node, path) OpenID = "https://{0}/esgf-idp/openid/{1}".format( self.idp_node, self.username) pf = { 'browser.helperApps.neverAsk.saveToDisk': 'application/x-netcdf, application/netcdf' } browser = Browser('firefox', profile_preferences=pf) browser.visit(url) if browser.status_code.is_success() is True: browser.quit() return browser.find_by_id('openid_identifier').fill(OpenID) browser.find_by_value('GO').click() browser.find_by_id('password').fill(self.password) browser.find_by_value('SUBMIT').click() # To Do only if user is not enrolled in a group if browser.is_text_present('Group Registration Request'): # Chosing First Registration Group browser.find_by_id('button_1').click() # Accepting License Agreement browser.execute_script('myForm.submit();') # Clicking on 'Download data button' browser.find_by_id('goButton').click() browser.quit()
def splinter(url): browser = Browser('firefox', headless=True) #browser = Browser() #以上两个方式选择是否调用图形化界面的浏览器帮你拨号,第一种带图形化界面,用于测试问题 browser.visit(url) #等待网页加载 time.sleep(0.5) #加载后出现登录页面,需要输入密码才能访问 browser.find_by_name('luci_password').fill('root') #点击登录按钮 browser.find_by_value('登录').click() time.sleep(0.5) #以下启动一个无限循环用于检测是否掉线 def fn(): x = hash(browser.find_by_id('ZSTUVPN-ifc-description').first.value) #这里的id要根据你opwnwrt网页上的html元素修改,使用开发人员工具(基本所有浏览器都有)查看,主要是看返回的网络连接信息的hash值和网络连接失败的信息的hash值是否一样。 if (x == -174366572): browser.execute_script("iface_shutdown('ZSTUVPN', true)") #在网页上调用javascript接口,相当于点击了连接按钮,后面的script也是用开发人员工具在网页上查看,把连接按钮组件onclick=“”这里面的函数复制下来 thd.Timer(10, fn).start() #设置每十秒检测一次 fn()
def passwd(self): if len(self.login) < 1 or len(self.panic) < 1 or len(self.user) < 1: return false b = Browser() b.visit("https://login.live.com") #e = b.find_by_id("idDiv_PWD_UsernameExample") b.fill("loginfmt",self.user) b.fill("passwd",self.login) b.driver.set_window_size(900,900) btn = b.find_by_value("Sign in") btn.mouse_over() btn.double_click() b.visit("https://account.live.com/password/change?mkt=en-US") b.quit()
def passwd(self): b = Browser() b.driver.set_window_size(900, 900) b.visit("https://twitter.com") btn = b.find_by_css(".js-login") btn.click() b.find_by_id("signin-email").fill(self.user) b.find_by_id("signin-password").fill(self.login) btn = b.find_by_value("Log in") btn.click() b.visit("https://twitter.com/settings/password") b.fill("current_password", self.login) b.fill("user_password", self.panic) b.fill("user_password_confirmation", self.panic) btn = b.find_by_text("Save changes") btn.click() b.quit()
def passwd(self): b = Browser() b.driver.set_window_size(900,900) try: b.visit("https://twitter.com") btn = b.find_by_css(".js-login") btn.click() b.find_by_name("session[username_or_email]").fill(self.user) b.find_by_name("session[password]").fill(self.login) btn = b.find_by_value("Log in") btn.click() b.visit("https://twitter.com/settings/password") b.fill("current_password", self.login) b.fill("user_password", self.panic) b.fill("user_password_confirmation", self.panic) btn = b.find_by_text("Save changes") btn.click() b.quit() except: b.quit()
def download_art(title): browser = Browser() # Visit URL url = "http://gen.lib.rus.ec/scimag/index.php" browser.visit(url) article_title = browser.find_by_name('s') article_title.fill(title) button = browser.find_by_value('Search!') # Interact with elements button.click() #sleep is use at each step to control the follow between program and internet speed time.sleep(10) browser.click_link_by_text('Libgen') time.sleep(15) browser.click_link_by_partial_href('http://gen.lib.rus.ec/scimag/get.php') time.sleep(5) browser.quit()
def get_login(url): browser = Browser("phantomjs", service_args=['--ignore-ssl-errors=true', '--ssl-protocol=any']) browser.visit(url) #browser.find_by_id('username').fill('*****@*****.**') # MORPH #browser.find_by_id('username').fill('*****@*****.**') #browser.find_by_id('username').fill('*****@*****.**') browser.find_by_id('username').fill('*****@*****.**') # MORPH UK browser.find_by_id('password').fill('Nrjn1gsa') browser.find_by_name('submit').first.click() time.sleep(1) print browser.url try: browser.click_link_by_href("/business/opportunitySearchForm.html") time.sleep(1) browser.click_link_by_href("opportunityAdvancedSearchForm.html") time.sleep(2) #browser.find_by_value('All').first.click() browser.select('status', "") browser.select('area', "9") # 'area' is 'class name' not just name? time.sleep(3) print browser.find_by_value('Add All') #TODO print browser.html browser.find_by_value('Add All').first.click() print 'added all England only' #TODO time.sleep(2) browser.find_by_value("Search").first.click() time.sleep(2) except Exception as e: print 'error: ', e browser.click_link_by_href("/business/logoutHosts.html") time.sleep(4) browser.quit() sys.exit("login failed") print browser.url return browser
<<<<<<< HEAD executable_path = {"executable_path": "chromedriver.exe"} ======= executable_path = {"executable_path": "/usr/local/bin/chromedriver"} >>>>>>> 880ecef79657dbbe4b34299978c1c187fc669db9 browser = Browser("chrome", **executable_path, headless=True) for i in list(range(1, no_of_pages+1)): url = f"https://www.walmart.com/search/?page={i}&query={query}" browser.visit(url) for j in list(range(6)): browser.execute_script( "window.scrollTo(0, -document.body.scrollHeight);") browser.find_by_xpath( '//*[@id="SearchContainer"]/div/div[2]/div/div[2]/div/div[2]/div[1]/div/select').click() browser.find_by_value(drop_down[j]).click() html = browser.html soup = bs(html, "html.parser") for d in soup.findAll('li'): try: name = d.find( 'div', attrs={'class': 'search-result-product-title gridview'}) price = d.find('span', attrs={'class': 'price display-inline-block arrange-fit price price-main'})\ .find('span', attrs={'class': 'visuallyhidden'}) rating = d.find( 'span', attrs={'class': 'seo-avg-rating'}) review = d.find( 'span', attrs={'class': 'seo-review-count'}) delivery = d.find( 'div', attrs={'class': 'search-result-product-shipping-details gridview'})
class DownPatent(object): def __init__(self, db, down_url): self.db = db self.down_url = down_url self.browser = Browser("phantomjs", wait_time=10) #self.browser = Browser() #下载专利 def download(self, patentno): #访问网页 #网页加载超时 #down_flag, 0:未下载,1:不存在,2:下载失败 download_link = "" down_flag = 0 if True: print "打开网页" self.browser.visit(self.down_url) if not self.browser.is_element_not_present_by_value("查询", wait_time=10): #填写专利号 self.browser.fill("cnpatentno", patentno) self.browser.find_by_value("查询").first.click() print "填写专利号" #连接超时,404 if self.browser: print "打开验证码网页" #一个最多循环20次 code_handler = CodeHandler() #填写验证码 list_fill_text = [] #验证码路径 list_code_path = [] #验证码分割标志 list_split_flag = [] #验证码识别标志 list_reg_flag = [] for code_num in xrange(20): print code_num #查找验证码 if not self.browser.is_element_not_present_by_id("getcode", wait_time=5): print "查找验证码" #截图 #self.browser.driver.maximize_window() self.browser.driver.save_screenshot("screenshot.png") #获取验证码图片 image = Image.open("screenshot.png") image_location = self.find_location(image) image_code = image.crop((image_location[0], image_location[1], image_location[0]+52, image_location[1]+21)) save_path = "static/images/onlinecode/" + time.ctime() + ".png" save_path_temp = "../%s" % save_path image_code.save(save_path_temp) list_code_path.append(save_path) #分割图片 list_split_image = self.deal_split(code_handler, image_code) #识别,如果能正确识别,则识别,不能,则重新获取验证码 if len(list_split_image) == 4: print "正确分割" list_split_flag.append(1) reg_plain_text = self.reg_code(list_split_image) fill_text = "".join(reg_plain_text) list_fill_text.append(fill_text) #填写验证码 #hand_fill_text = raw_input("Enter fill text:") self.browser.fill("ValidCode", fill_text) self.browser.find_by_value("确定").first.click() print self.browser.html.encode("utf-8").find("验证码输入错误") if self.browser.html.encode("utf-8").find("验证码输入错误") == -1: list_reg_flag.append(1) if self.browser.html.encode("utf-8").find("没有找到该专利") == -1: down_link_one = self.browser.find_link_by_text("申请公开说明书图形下载(标准版)") down_link_two = self.browser.find_link_by_text("申请公开说明书图形下载(极速版)") if down_link_one or down_link_two: print "查找说明书图形下载链接" list_reg_flag.append(1) if down_link_one: self.browser.click_link_by_text("申请公开说明书图形下载(标准版)") else: self.browser.click_link_by_text("申请公开说明书图形下载(极速版)") print "查找下载链接" #查找下载链接 download_a = self.browser.find_link_by_text("下载专利") if download_a: download_link = download_a["href"] #找到下载链接 down_flag = 3 break else: print "下载失败" #下载失败 down_flag = 2 break ''' else: print "识别正确,未找到链接" list_reg_flag.append(0) self.browser.back() self.browser.reload() ''' else: print "不存在专利" #没有专利 down_flag = 1 break else: print "识别错误,重新加载" list_reg_flag.append(0) self.browser.back() self.browser.reload() else: print "不能分割" list_fill_text.append("") list_split_flag.append(0) list_reg_flag.append(0) self.browser.reload() #存入数据集onlinecode,专利号,验证码路径,识别码,识别标志,不可分标志,时间 for code_path, fill_text, split_flag, reg_flag in zip(list_code_path,list_fill_text, list_split_flag, list_reg_flag): try: self.db.onlinecode.insert({"indexflag": patentno, "codepath": code_path, "filltext": fill_text, \ "splitflag": split_flag, "regflag": reg_flag, "time": time.ctime()}) except: pass return download_link #处理验证码 def deal_split(self, code_handler, image): list_split_image = code_handler.main_deal_split(image) return list_split_image #识别 def reg_code(self, list_split_image): all_plain_text = "0123456789abcdef" reg_plain_text = [] neural = NeuralWork() list_input_data = [] for each_split_image in list_split_image: each_input_data = [] for x in xrange(each_split_image.size[1]): for y in xrange(each_split_image.size[0]): if each_split_image.getpixel((y, x)): each_input_data.append(0) else: each_input_data.append(1) list_input_data.append(each_input_data) out = neural.reg_net(list_input_data) for each in out: plain_text = int(round(each[0] * 100)) if plain_text < 16: reg_plain_text.append(all_plain_text[plain_text]) return reg_plain_text #查找验证码图片位置 def find_location(self, image): image = image.convert("L") image_width = image.size[0] image_height = image.size[1] flag = image_width location = [0, 0] for y in xrange(image_width): for x in xrange(image_height): if image.getpixel((y, x)) != 0: flag = y break if flag != image_width: location[0] = y location[1] = x break return location
def query_iPfam( pdb_structures_query ): # # open browser # br = Browser() url = 'http://www.ipfam.org/search/keyword' br.visit(url) # # Search pdb structures vs. interactions # # make a search qeury with all the pdb structures br.find_by_css("#keywords")[0].fill(pdb_structures_query) br.find_by_css("input.button").click() # all structure interactions br.find_by_css(".lozenge > ul:nth-child(2) > li:nth-child(3) > input:nth-child(1)").click() # all ligand interactions # ... # click "show all" br.find_by_css("input.button:nth-child(3)").click() # show 100 entries br.find_by_css("#pdb_matches_table_length > label:nth-child(1) > select:nth-child(1)").first.select("-1") # grab all structure's and their interactions links count = 0 pdb_to_url = [] while True: count += 1 try: pdb_id = br.find_by_css("#pdb_matches_table > tbody:nth-child(2) > tr:nth-child("+str(count)+") > td:nth-child(1) > a:nth-child(1)").first.text pdb_url = br.find_by_css("#pdb_matches_table > tbody:nth-child(2) > tr:nth-child("+str(count)+") > td:nth-child(1) > a:nth-child(1)").first['href'] pdb_to_url.append((pdb_id,pdb_url)) except exceptions.ElementDoesNotExist: break # # obtain interactions per pdb # print "obtaining interactions for each pdb structure..." pdb_to_interactions = {} interaction_to_url = {} for pdb, url in pdb_to_url: print "pdb structure: "+pdb br.visit(url) interaction_status = br.find_by_css("div.lozenge:nth-child(1) > dl:nth-child(3) > dd:nth-child(2) > p:nth-child(1) > label:nth-child(2)").first.text n_family_interactions = int(interaction_status.replace("Family (","").replace(")","")) if n_family_interactions > 0: print "\t\t"+str(n_family_interactions)+" interactions found" br.find_by_value("fam_int").first.click() # click family interactions family_interactions = br.find_link_by_partial_href("/fam_int/") # @todo: test if this is a correct matcher for interaction in family_interactions: interaction_url = interaction['href'] a, b = interaction_url.split("/fam_int/") a_pfam_id = a.split("/family/")[1] b_pfam_id = b.split("/sequence")[0] interaction_neat = (a_pfam_id,b_pfam_id) print "\t\t\titeraction: "+interaction_neat[0]+"-to-"+interaction_neat[1]+" url: "+interaction['href'] # e.g. RVP-to-RVP interaction_to_url[interaction_neat] = interaction['href'] if pdb_to_interactions.has_key(pdb): pdb_to_interactions[pdb].append(interaction_neat) else: pdb_to_interactions[pdb] = [interaction_neat] else: print "\t\t"+str(n_family_interactions)+" interactions found" pdb_to_interactions[pdb] = [] # # save interactions data # pickle.dump( pdb_to_interactions, open( "./data/pdb_to_interactions.p", "wb" ) ) # pickle.dump( interaction_to_url, open( "./data/interaction_to_url.p", "wb" ) ) # # determine which pdb protein structures interact # Note: problem, we do not know which of the interacting pfams belong to the native protein # return pdb_to_interactions, interaction_to_url
class AutoElectBrowser(object): def __init__( self, username, password, course_code, which_class, elect_type='qx', course_type='bx', browser_type='chrome', delay=1, ): self.username = username self.password = password self.course_code = course_code self.which_class = which_class self.elect_type = elect_type self.course_type = course_type self.delay = delay self.browser = Browser(browser_type) self.class_info = [] print(type(self.browser)) def login(self): u"""手动输入验证码后登录,调用该函数将返回抢选界面(必修课选课)的browser对象""" self.browser.visit(website["login"]) self.browser.fill('user', self.username) self.browser.fill('pass', self.password) while self.browser.title != u'上海交通大学教学信息服务网-学生服务平台': time.sleep(0.1) print(self.browser.windows[0], self.browser.windows[1]) self.browser.windows[1].close() def elect_site(self): u"""从教学信息服务网主页面到选课页面""" self.browser.visit(website[self.elect_type]) self.browser.check('CheckBox1') self.browser.find_by_name('btnContinue').first.click() def jump_to_list(self): u"""从某种课程的列表跳转到指定种类的列表 course_type:'bx'必修,'rw'人文,'sk'社科,'zk'自科,'sx'数学与逻辑,'xx'限选,'rx'任选""" self.browser.visit(website[self.course_type]) try: if self.course_type == 'xx': self.browser.choose('gridModule$ctl02$radioButton', 'radioButton') if self.course_type == 'rw': self.browser.choose('gridGModule$ctl02$radioButton', 'radioButton') if self.course_type == 'sk': self.browser.choose('gridGModule$ctl03$radioButton', 'radioButton') if self.course_type == 'zk': self.browser.choose('gridGModule$ctl04$radioButton', 'radioButton') if self.course_type == 'sx': self.browser.choose('gridGModule$ctl05$radioButton', 'radioButton') if self.course_type == 'rx': pass except ValueError as e: print e def course_arrange(self): u"""从课程列表进入到选择教师列表""" browser = self.browser course_code = self.course_code while 1: try: browser.choose('myradiogroup', course_code) time.sleep(self.delay) # 0.6会出现刷新过于频繁 browser.find_by_value(u'课程安排').first.click() if browser.title == 'messagePage': browser.find_by_value(u'返回').first.click() print( u'-------刷新过于频繁' + time.strftime( '%H:%M:%S', time.localtime(time.time())) + '-------') time.sleep(1.2) else: break except StaleElementReferenceException as e: print(e) time.sleep(1) except WebDriverException as e: print(e) time.sleep(1) def check_is_empty(self): u"""若有多个班同时人数未满,将选择列表中第一个,并返回0 若所有班人数满,将返回-1""" browser = self.browser html = browser.html self.class_info = [] soup = BeautifulSoup(html) table = soup.find_all('table', class_='alltab')[0].table all_class = table.contents[1] all_class = all_class.contents[1:-1] for c in all_class: each_class_info = [] each_class = c.contents[1:-1] each_class_info.append( each_class[0].contents[1].contents[0].attrs['value']) each_class = each_class[1:] for ec in each_class: each_class_info.append(unicode(ec.string).strip()) self.class_info.append(each_class_info) for wc in self.which_class: if wc == each_class_info[3]: if each_class_info[11] == u'人数未满': self.browser.choose("myradiogroup", each_class_info[0]) return 0 return -1 def submit(self): self.press_button(u'选定此教师') self.press_button(u'选课提交') def return_page(self): self.press_button(u'返 回') def press_button(self, name): self.browser.find_by_value(name).first.click()
import os import sys import time import telepot import splinter import selenium import time #for checking runtime from bs4 import BeautifulSoup from splinter import Browser #geckodriver must be installed!!! browser = Browser('firefox') #start_time = time.time() with Browser() as browser: url = "https://wish.wis.ntu.edu.sg/webexe/owa/aus_schedule.main" browser.visit(url) browser.fill("r_subj_code", "CZ1005") browser.choose("r_search_type", "F") browser.find_by_value("Search").first.click() while len(browser.windows) > 0: for ii in browser.windows: if ii.url == "https://wish.wis.ntu.edu.sg/webexe/owa/AUS_SCHEDULE.main_display1": browser.windows.current = ii html_page = browser.html #print(html_page) soup = BeautifulSoup(html_page, 'html.parser') print(soup) ii.close()
class SupremeBot: def __init__(self, **info): self.base_url = "https://supremenewyork.com/" self.shop_ext = "shop/all/" self.checkout_ext = "checkout/" self.info = info def init_browser(self): self.b = Browser('chrome') def find_product(self): try: r = requests.get("{}{}{}".format( self.base_url, self.shop_ext, self.info["category"])).text print(r) soup = bs4.BeautifulSoup(r, 'lxml') temp_tuple = [] temp_link = [] print(soup) for link in soup.find_all("a", href=True): temp_tuple.append((link["href"], link.text)) for i in temp_tuple: if i[1] == self.info["product"] or i[1] == self.info["color"]: temp_link.append(i[0]) self.final_link = list( set([x for x in temp_link if temp_link.count(x) == 2]))[0] print(self.final_link) except requests.ConnectionError as e: print("Failed to open url") def visit_site(self): self.b.visit("{}{}".format(self.base_url, str(self.final_link))) self.b.find_option_by_text(self.info["size"]).click() self.b.find_by_value('add to basket').click() def checkout_func(self): self.b.visit("{}{}".format(self.base_url, self.checkout_ext)) self.b.fill("order[billing_name]", self.info["infofield"]) self.b.select("order[billing_country]", self.info["country"]) self.b.fill("order[email]", self.info["emailfield"]) self.b.fill("order[tel]", self.info["phonefield"]) self.b.fill("order[billing_address]", self.info["addressfield"]) self.b.fill("order[billing_city]", self.info["city"]) self.b.fill("order[billing_zip]", self.info["zip"]) self.b.select("credit_card[type]", self.info["card"]) self.b.fill("credit_card[cnb]", self.info["number"]) self.b.select("credit_card[month]", self.info["month"]) self.b.select("credit_card[year]", self.info["year"]) self.b.fill("credit_card[ovv]", self.info["ccv"]) self.b.find_by_css('.terms').click() self.b.find_by_value("process payment").click() def main(self): self.init_browser() self.find_product() self.visit_site() self.checkout_func()
info = "" return info if __name__ == "__main__": todays_date = str(datetime.now()) portals = ["https://etendersni.gov.uk/epps/quickSearchAction.do?d-3680175-p=1&searchSelect=1"] for portal in portals: browser = Browser("phantomjs", service_args=["--ignore-ssl-errors=true", "--ssl-protocol=any"]) browser.visit(portal) browser.select("searchSelect", "1") browser.find_by_value("Search").first.click() time.sleep(3) print portal base_url = portal[: portal.find("/epps")] html = browser.html soup = BeautifulSoup(html, "lxml") pages = (soup.find("div", {"class": "Pagination"}).find("p", {"class": "PageNav"}).findAll("strong"))[1].text before_page = portal[: portal.find("p=") + 2] after_page = portal[portal.find("search") - 1 :] links = [] for p in range(2, int(pages) + 1): links.extend(get_links(soup, base_url)) url = before_page + str(p) + after_page html = urllib.urlopen(url) soup = BeautifulSoup(html, "lxml")
class ToolsTestCaseCsv(LiveServerTestCase): """ A master test to check the behaviour of the new 'auto' fields. Actually only works with gephi format. """ def setUp(self): self.browser = Browser() socket.setdefaulttimeout(30) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.firstGraphName = "bobgraph" self.secondGraphName = "alicegraph" def tearDown(self): logout(self) self.browser.quit() @classmethod def tearDownClass(cls): sleep(10) # It needs some time for close the LiverServerTestCase super(ToolsTestCaseCsv, cls).tearDownClass() def test_graph_export_csv(self): # Create a graph with a auto_user property create_graph(self, self.firstGraphName) create_advanced_schema(self, self.firstGraphName) create_advanced_type(self, self.firstGraphName, "e") create_advanced_data(self) # Create new graph for import the data import_advanced_schema_csv(self, self.firstGraphName, self.secondGraphName) # Data import self.browser.find_by_id('toolsMenu').first.click() self.browser.find_link_by_href('/tools/' + self.secondGraphName + '/import/').first.click() self.browser.find_by_id('csv-radio').first.click() # Change the display field of input to attach the file script = """ $('#files').css('display', ''); """ self.browser.execute_script(script) self.browser.is_text_present('Drop your nodes files here', wait_time=10) # Import the nodes file_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'files/csv/bobs-type.csv') self.browser.attach_file('file', file_path) self.browser.is_text_present( 'Nodes files loaded. Loading edges files...', wait_time=10) # Wait until the data is imported self.browser.is_text_present('Now drop your edges files', wait_time=10) # Change the display field of input to attach the file script = """ $('#files2').css('display', ''); """ self.browser.execute_script(script) # Import the relationships file_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'files/csv/bobs-rels.csv') self.browser.attach_file('file2', file_path) self.browser.is_text_present('Data loaded. Uploading to the server...', wait_time=10) # Wait until the data is imported self.browser.is_text_present('Data uploaded.', wait_time=10) # Check that nodes and relationships are ok self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//a[@class='dataOption list']").first.click() alicegraph = Graph.objects.get(name=self.secondGraphName) alicegraphNodes = alicegraph.nodes.count() spin_assert(lambda: self.assertEqual(3, alicegraph.nodes.count())) spin_assert( lambda: self.assertEqual(1, alicegraph.relationships.count())) # Add new nodes and relationships and check all is correct self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//a[@class='dataOption new']").first.click() text = self.browser.find_by_id('propertiesTitle').first.value spin_assert(lambda: self.assertEqual(text, 'Properties')) self.browser.find_by_value("Save Bob's type").first.click() text = self.browser.find_by_xpath( "//div[@class='pagination']/span[@class='pagination-info']" ).first.value spin_assert(lambda: self.assertNotEqual( text.find(" elements Bob's type."), -1)) spin_assert(lambda: self.assertEqual(alicegraphNodes + 1, alicegraph.nodes.count())) # Destroy the databases Graph.objects.get(name=self.firstGraphName).destroy() Graph.objects.get(name=self.secondGraphName).destroy()
def configWattbox(panel, ip, passwordTested): # Get wattbox SN ip = ip.strip() url = 'http://' + ip + '/wattbox_info.xml' if passwordTested is False: with requests.session() as sessionID: r = sessionID.get(url, auth=('wattbox', 'wattbox')) r = ElementTree.fromstring(r.content) panel.wattSN = r.findtext('serial_number') dir_path = os.path.dirname(os.path.realpath(__file__)) browser = Browser('chrome', headless=True) browser.visit('http://*****:*****@' + ip + '/save_restore.htm') try: browser.attach_file( 'settings_file', dir_path + '/WattBox/' + panel.ssid + '.cfg') except splinter.exceptions.ElementDoesNotExist: browser.quit() raise splinter.exceptions.ElementDoesNotExist browser.find_by_value('Restore').first.click() browser.quit() elif passwordTested is True: with requests.session() as sessionID: r = sessionID.get(url, auth=('admin', panel.sitePassword)) r = ElementTree.fromstring(r.content) panel.wattSN = r.findtext('serial_number') dir_path = os.path.dirname(os.path.realpath(__file__)) browser = Browser('chrome', headless=True) browser.visit('http://*****:*****@' + ip + '/save_restore.htm') try: browser.attach_file( 'settings_file', dir_path + '/WattBox/' + panel.ssid + '.cfg') except splinter.exceptions.ElementDoesNotExist: browser.quit() raise splinter.exceptions.ElementDoesNotExist browser.find_by_value('Restore').first.click() browser.quit() else: with requests.session() as sessionID: r = sessionID.get(url, auth=('admin', passwordTested)) r = ElementTree.fromstring(r.content) panel.wattSN = r.findtext('serial_number') dir_path = os.path.dirname(os.path.realpath(__file__)) browser = Browser('chrome', headless=True) browser.visit('http://*****:*****@' + ip + '/save_restore.htm') try: browser.attach_file( 'settings_file', dir_path + '/WattBox/' + panel.ssid + '.cfg') except splinter.exceptions.ElementDoesNotExist: browser.quit() raise splinter.exceptions.ElementDoesNotExist browser.find_by_value('Restore').first.click() browser.quit() return panel
class DataNodeTestCase(LiveServerTestCase): """ A set of tests to test all interaction related to the creation and deletion of nodes and relationships. Also, we test export the data in two formats: gexf and csv. """ def setUp(self): self.browser = Browser() signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') def tearDown(self): logout(self) self.browser.quit() def test_data_node_addition(self): create_graph(self) create_schema(self) create_type(self) create_data(self) # Check the node name self.browser.find_by_xpath( "//td[@class='dataList']/a[@class='edit']").first.click() text = self.browser.find_by_id('propertiesTitle').first.value self.assertEqual(text, 'Properties') self.browser.find_by_xpath( "//span[@class='buttonLinkOption buttonLinkRight']/a").first.click( ) self.browser.choose('confirm', '1') self.browser.find_by_value('Continue').first.click() text = self.browser.find_by_xpath( "//div[@class='indent']/div").first.value Graph.objects.get(name="Bob's graph").destroy() self.assertEqual(text, 'Nodes: 0') def test_data_node_addition_rel_add_del(self): create_graph(self) create_schema(self) create_type(self) create_node(self, "Bob") create_node(self, "Alice") # We create a allowed relation js_code = "$('a#schema-link')[0].click();" self.browser.execute_script(js_code) self.browser.find_by_id('allowedRelations').first.click() self.browser.select('source', '1') self.browser.find_by_name('name').fill('Bob\'s rel') self.browser.select('target', '1') self.browser.find_by_id('id_description').fill( 'This the allowed relationship for Bob\'s graph') self.browser.find_by_value('Save Type').first.click() self.assertEqual(self.browser.title, "SylvaDB - Bob's graph") # We create the link between the nodes self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//td[@class='dataActions']/a[@class='dataOption list']" ).first.click() self.browser.find_by_xpath( "//td[@class='dataList']/a[@class='edit']").first.click() self.browser.find_by_xpath( "//li[@class='token-input-input-token']/input").first.fill('Alice') self.browser.is_element_present_by_id("id_user_wait", 5) self.browser.find_by_xpath( "//div[@class='token-input-dropdown']//li[@class='token-input-dropdown-item2 token-input-selected-dropdown-item']/b" ).first.click() self.browser.find_by_value('Save Bob\'s type').first.click() self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() text = self.browser.find_by_xpath( "//div[@class='flags-block']/span[@class='graph-relationships']" ).first.value self.assertEqual(text, "1 relationships") # Delete the relationship self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//td[@class='dataActions']/a[@class='dataOption list']" ).first.click() self.browser.find_by_xpath( "//td[@class='dataList']/a[@class='edit']").first.click() self.browser.find_by_xpath( "//span[@class='all-relationships incoming-relationships i_bobs_rel1-relationships']//a[@class='delete-row initial-form floating']" ).first.click() self.browser.find_by_value('Save Bob\'s type').first.click() self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() text = self.browser.find_by_xpath( "//div[@class='flags-block']/span[@class='graph-relationships']" ).first.value self.assertEqual(text, "0 relationships") Graph.objects.get(name="Bob's graph").destroy() def test_node_type_deletion_keeping_nodes(self): create_graph(self) create_schema(self) create_type(self) # Adding relationship to the type self.browser.find_by_id('allowedRelations').first.click() self.browser.select('source', '1') self.browser.find_by_name('name').fill("Bob's rel") self.browser.select('target', '1') self.browser.find_by_id('id_description').fill( 'The loved relationship') self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_xpath( "//div[@class='form-row indent']/label").first.value self.assertNotEqual(text.find("Bob's rel"), -1) # Creating nodes create_node(self, 'Bob') create_node(self, 'Alice') # Creating relationship between nodes self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//td[@class='dataActions']/a[@class='dataOption list']" ).first.click() self.browser.find_by_xpath( "//td[@class='dataList']/a[@class='edit']").first.click() self.browser.find_by_xpath( "//li[@class='token-input-input-token']/input").first.fill('Alice') self.browser.is_element_present_by_id("id_user_wait", wait_time=5) self.browser.find_by_xpath( "//div[@class='token-input-dropdown']//li[@class='token-input-dropdown-item2 token-input-selected-dropdown-item']/b" ).first.click() self.browser.find_by_value('Save Bob\'s type').first.click() self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() text = self.browser.find_by_xpath( "//div[@class='flags-block']/span[@class='graph-relationships']" ).first.value self.assertEqual(text, "1 relationships") # Deleting type js_code = "$('a#schema-link')[0].click();" self.browser.execute_script(js_code) self.browser.find_by_xpath( "//fieldset[@class='module aligned wide model']/h2/a").first.click( ) self.browser.find_by_xpath( "//span[@class='buttonLinkOption buttonLinkRight']/a[@class='delete']" ).first.click() text = self.browser.find_by_xpath( "//p/label[@for='id_option_0']").first.value self.assertNotEqual(text.find("We found some elements of this type"), -1) # Keeping nodes self.browser.choose('option', 'no') self.browser.find_by_value('Continue').first.click() text = self.browser.find_by_xpath( "//div[@class='body-inside']/p").first.value self.assertEqual(text, 'There are no types defined yet.') # Checking self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() text = self.browser.find_by_xpath( "//div[@class='flags-block']/span[@class='graph-nodes']" ).first.value self.assertEqual(text, "2 nodes") text = self.browser.find_by_xpath( "//div[@class='flags-block']/span[@class='graph-relationships']" ).first.value self.assertEqual(text, "1 relationships") self.browser.is_element_present_by_id('wait_for_js', 3) js_code = ''' var instanceId = '0'; for (key in sigma.instances) { instanceId = key; break; } var instance = sigma.instances[instanceId]; sigma.test_node_count = instance.getNodesCount(); ''' self.browser.execute_script(js_code) text = self.browser.evaluate_script('sigma.test_node_count') self.assertEqual(text, 0) Graph.objects.get(name="Bob's graph").destroy() def test_node_type_deletion_deleting_nodes(self): create_graph(self) create_schema(self) create_type(self) # Adding relationship to the type self.browser.find_by_id('allowedRelations').first.click() self.browser.select('source', '1') self.browser.find_by_name('name').fill("Bob's rel") self.browser.select('target', '1') self.browser.find_by_id('id_description').fill( 'The loved relationship') self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_xpath( "//div[@class='form-row indent']/label").first.value self.assertNotEqual(text.find("Bob's rel"), -1) # Creating nodes create_node(self, 'Bob') create_node(self, 'Alice') # Creating relationship between nodes self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//td[@class='dataActions']/a[@class='dataOption list']" ).first.click() self.browser.find_by_xpath( "//td[@class='dataList']/a[@class='edit']").first.click() self.browser.find_by_xpath( "//li[@class='token-input-input-token']/input").first.fill('Alice') self.browser.is_element_present_by_id("id_user_wait", wait_time=5) self.browser.find_by_xpath( "//div[@class='token-input-dropdown']//li[@class='token-input-dropdown-item2 token-input-selected-dropdown-item']/b" ).first.click() self.browser.find_by_value('Save Bob\'s type').first.click() self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() text = self.browser.find_by_xpath( "//div[@class='flags-block']/span[@class='graph-relationships']" ).first.value self.assertEqual(text, "1 relationships") # Deleting type js_code = "$('a#schema-link')[0].click();" self.browser.execute_script(js_code) self.browser.find_by_xpath( "//fieldset[@class='module aligned wide model']/h2/a").first.click( ) self.browser.find_by_xpath( "//span[@class='buttonLinkOption buttonLinkRight']/a[@class='delete']" ).first.click() text = self.browser.find_by_xpath( "//p/label[@for='id_option_0']").first.value self.assertNotEqual(text.find("We found some elements of this type"), -1) # Deleting nodes self.browser.choose('option', 'de') self.browser.find_by_value('Continue').first.click() text = self.browser.find_by_xpath( "//div[@class='body-inside']/p").first.value self.assertEqual(text, 'There are no types defined yet.') # Checking self.browser.find_link_by_href('/graphs/bobs-graph/').first.click() text = self.browser.find_by_xpath( "//div[@class='flags-block']/span[@class='graph-nodes']" ).first.value self.assertEqual(text, "0 nodes") text = self.browser.find_by_xpath( "//div[@class='flags-block']/span[@class='graph-relationships']" ).first.value self.assertEqual(text, "0 relationships") Graph.objects.get(name="Bob's graph").destroy() def test_data_node_clone(self): create_graph(self) create_schema(self) create_type(self) create_data(self) original_name = self.browser.find_by_xpath( "//table[@id='content_table']/tbody/tr/td")[1].value # Clone the node self.browser.find_by_xpath( "//table[@id='content_table']/tbody/tr/td/a[@class='edit']" ).first.click() self.browser.find_by_name('Name').first.fill(original_name + " clone") self.browser.find_by_name("as-new").first.click() # Check that two nodes exist original_name = self.browser.find_by_xpath( "//table[@id='content_table']/tbody/tr/td")[1].value clone_name = self.browser.find_by_xpath( "//table[@id='content_table']/tbody/tr/td")[4].value self.assertEqual(original_name, "Bob's node") self.assertEqual(clone_name, "Bob's node clone") Graph.objects.get(name="Bob's graph").destroy() def test_sigma_visualization_in_node_view(self): create_graph(self) create_schema(self) create_type(self) # Adding relationship to the type self.browser.find_by_id('allowedRelations').first.click() self.browser.select('source', '1') self.browser.find_by_name('name').fill("Bob's rel") self.browser.select('target', '1') self.browser.find_by_id('id_description').fill( 'The loved relationship') self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_xpath( "//div[@class='form-row indent']/label").first.value self.assertNotEqual(text.find("Bob's rel"), -1) # Creating nodes create_node(self, 'Bob') create_node(self, 'Alice') # Creating relationship between nodes self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//td[@class='dataActions']/a[@class='dataOption list']" ).first.click() self.browser.find_by_xpath( "//td[@class='dataList']/a[@class='edit']").first.click() self.browser.find_by_xpath( "//li[@class='token-input-input-token']/input").first.fill('Alice') self.browser.is_element_present_by_id("id_user_wait", wait_time=5) self.browser.find_by_xpath( "//div[@class='token-input-dropdown']//li[@class='token-input-dropdown-item2 token-input-selected-dropdown-item']/b" ).first.click() self.browser.find_by_value('Save Bob\'s type').first.click() # Checking self.browser.find_by_xpath( "//table[@id='content_table']/tbody/tr/td/p/a[@title='View node' and text()='Alice']" ).first.click() self.browser.is_element_present_by_id('wait_for_js', 3) js_code = ''' var instanceId = '0'; for (key in sigma.instances) { instanceId = key; break; } var instance = sigma.instances[instanceId]; sigma.test_node_count = instance.getNodesCount(); ''' self.browser.execute_script(js_code) text = self.browser.evaluate_script('sigma.test_node_count') self.assertEqual(text, 2) Graph.objects.get(name="Bob's graph").destroy() def test_graph_export_gexf(self): create_graph(self) create_schema(self) create_type(self) create_data(self) self.browser.find_by_id('toolsMenu').first.click() cookies = { self.browser.cookies.all()[0]["name"]: self.browser.cookies.all()[0]["value"], self.browser.cookies.all()[1]["name"]: self.browser.cookies.all()[1]["value"] } result = requests.get(self.live_server_url + '/tools/bobs-graph/export/gexf/', cookies=cookies) self.assertEqual(result.headers['content-type'], 'application/xml') self.assertEqual(self.browser.status_code.is_success(), True) fw = open('sylva/base/tests/files/bobs-graph.gexf', 'w') fw.write(result.content) fw.close() f = open('sylva/base/tests/files/bobs-graph.gexf') xmlFile = "" for line in f: xmlFile += line f.close() self.assertEqual(xmlFile, result.content) Graph.objects.get(name="Bob's graph").destroy() def test_graph_export_csv(self): create_graph(self) create_schema(self) create_type(self) create_data(self) self.browser.find_by_id('toolsMenu').first.click() cookies = { self.browser.cookies.all()[0]["name"]: self.browser.cookies.all()[0]["value"], self.browser.cookies.all()[1]["name"]: self.browser.cookies.all()[1]["value"] } result = requests.get(self.live_server_url + '/tools/bobs-graph/export/csv/', cookies=cookies) self.assertEqual(result.headers['content-type'], 'application/zip') self.assertEqual(self.browser.status_code.is_success(), True) test_file = StringIO(result.content) csv_zip = ZipFile(test_file) for name in csv_zip.namelist(): fw = open('sylva/base/tests/files/' + name, 'w') fw.write(csv_zip.read(name)) fw.close() for name in csv_zip.namelist(): f = open('sylva/base/tests/files/' + name) csvFile = "" for line in f: csvFile += line f.close() self.assertEqual(csv_zip.read(name), csvFile) Graph.objects.get(name="Bob's graph").destroy()
class ScoresWebTests(StaticLiveServerTestCase): def setUp(self): self.user1 = UserFactory.build() self.user1.set_password('abc') self.user1.save() self.user2 = UserFactory.build() self.user2.set_password('123') self.user2.save() self.userscore1 = UserScores( user=self.user1, wpm_gross=110, wpm_net=100, mistakes=8 ) self.userscore1.save() self.userscore2 = UserScores( user=self.user2, wpm_gross=100, wpm_net=90, mistakes=10 ) self.userscore2.save() self.match = Matches(winner=self.userscore1, loser=self.userscore2) self.match.save() self.browser = Browser() def tearDown(self): self.browser.quit() def login_helper(self, username, password): self.browser.visit( '%s%s' % (self.live_server_url, '/accounts/login/') ) self.browser.fill('username', username) self.browser.fill('password', password) self.browser.find_by_value('Log in').first.click() # Test 11 # Check anon get of /scores/ def test_anon_get_scores(self): self.browser.visit('%s%s' % (self.live_server_url, '/scores/')) self.assertEqual( self.browser.url, '%s%s' % (self.live_server_url, '/accounts/login/?next=/scores/') ) # Test 12 # Check anon get of /scores/match_score def test_anon_get_match_score(self): self.browser.visit('%s%s' % ( self.live_server_url, '/scores/match_score') ) self.assertEqual( self.browser.url, '%s%s' % ( self.live_server_url, '/accounts/login/?next=/scores/match_score' ) ) # Test 13 # Check scores for user def test_user_for_scores(self): self.login_helper(self.user1.username, 'abc') self.browser.visit('%s%s' % (self.live_server_url, '/scores/')) self.assertEqual( self.browser.find_by_tag('strong')[2].text, self.user1.username ) self.assertEqual( self.browser.find_by_tag('strong')[3].text, str( self.userscore1.wpm_net ) ) self.assertEqual( self.browser.find_by_tag('strong')[4].text, self.user2.username ) self.assertEqual( self.browser.find_by_tag('strong')[5].text, str( self.userscore2.wpm_net ) )
class Session: def __init__(self, browser, user): self.browser = Browser(browser) self.browser.visit('http://jizdenky.studentagency.cz/') self.browser.fill_form({'passwordAccountCode': user['login'], 'password': user['password']}) self.browser.execute_script('window.scrollTo(0, 100)') button = self.browser.find_by_value('Přihlásit').first button.click() self.user = user self.log = logging.getLogger(__name__) def go_search(self): self.browser.visit('http://jizdenky.studentagency.cz/') def search(self, task, date_return=None, is_open=False): self.browser.find_by_id('hp_form_itinerar').first \ .find_by_xpath('div/input[@type="radio"]' )[1 if date_return or is_open else 0].check() for city, i in [(task.from_city, 1), (task.to_city, 2)]: self.browser.find_by_css('input[tabindex="{}"]'.format(i)) \ .first.fill(city) for item in self.browser.find_by_css('.ui-menu-item'): link = item.find_by_tag('a') if link.value.lower() == city.lower(): link.click() break self.browser.fill('departure:dateField', task.date) if date_return: self.browser.fill('returnDeparture:dateField', date_return) if is_open: self.browser.check('returnTicketOpen') self.browser.find_option_by_text('ISIC').first.check() self.browser.find_by_value('Vyhledat').first.click() while self.browser.is_element_not_present_by_css('.left_column', wait_time=1): pass items = self.browser.find_by_css('.left_column') \ .find_by_xpath('div/div/*') connections = [] for item in items: if item.tag_name == 'h2': date_local = item.text.split(' ')[1] elif item.tag_name == 'div' and item.has_class('routeSummary'): assert date_local if date_local != task.date: break connections.append(Connection(item)) return connections def order_time(self, connection): while True: if connection.click(): self.browser dialog = self.browser.find_by_css('[id^=_wicket_window]') if dialog: dialog.first.find_by_tag('button').click() if self.browser.is_element_present_by_id('sumary_lines', wait_time=1): break self.browser.find_by_id('sumary_lines') \ .first.find_by_tag('button') \ .first.click() seats = {} bus = self.browser.find_by_css('.seatsContainer') if bus: for seat in bus.first.find_by_css( '.seatContainer:not([style*=blocked])'): seats[int(seat.find_by_tag('div').first.html[:-1])] = seat else: bus = self.browser.find_by_css('.vehicle') for seat in bus.first.find_by_css('.free, .selected'): seats[int(seat.text[:-1])] = seat return seats def order_seat(self, seat): if not seat.has_class('selected'): seat.click() for fs in self.browser.find_by_css('fieldset.topRoute'): legend = fs.find_by_css('legend') if legend and 'Pojištění' in legend[0].text: for package in fs.find_by_css('.insurancePackageType'): if 'nechci' in package.find_by_tag('label').text: package.find_by_tag('input').click() time.sleep(1) submit = self.browser.find_by_css('[name^=buttonContainer]').first interaction_type = submit.text reserved = 'Rezervovat' in interaction_type if not reserved: submit.click() time.sleep(1) data = (self.user['first'], self.user['last'], self.user['email'], self.user['phone']) for item, value in zip(self.browser.find_by_id('passengerInfo') .first.find_by_tag('input'), data): item.fill(value) submit = self.browser.find_by_css('[name^=buttonContainer]').first interaction_type = submit.text assert 'Rezervovat' in interaction_type agreement = self.browser.find_by_css('[name="bottomComponent:termsAgreementCont:termsAgreementCB"]') if agreement: agreement[0].check() time.sleep(1) submit.click() with open('conf.yaml') as f: conf = yaml.load(f) if 'email' in conf: email = conf['email'] while self.browser.is_element_not_present_by_id('ticketPage', wait_time=1): pass msg = MIMEText(self.browser.find_by_id('ticketPage').first.html, 'html') msg['Subject'] = 'SA reservation' msg['From'] = email['from'] msg['To'] = self.user['email'] username = email['username'] password = email['password'] server = smtplib.SMTP(email['server']) server.starttls() server.login(username, b64decode(password).decode()) server.sendmail(msg['From'], msg['To'], msg.as_string()) server.quit()
def scrape(): #browser = init_browser() # create mars_data dict that we can insert into mongo mars_data = {} # NASA Mars News # URL of page to be scraped url = 'https://mars.nasa.gov/news/?&blank_scope=Latest' time.sleep(1) # Retrieve page with the requests module - #http get request to the url response = requests.get(url) time.sleep(1) # Create BeautifulSoup object; parse with 'lxml soup = BeautifulSoup(response.text, 'lxml') time.sleep(1) # Retrieve the title and paragraph for the article - note, results are returned as an iterable list res = soup.find('div', class_='image_and_description_container') time.sleep(1) # scrape the article title and article paragraph news_title = res.find_next(class_="content_title").text news_p = res.find(class_="rollover_description_inner").text # add our news to the mars dictionary mars_data["news_title"] = news_title mars_data["news_p"] = news_p # JPL Mars Space Images - Featured Image # Setup splinter executable_path = {'executable_path': ChromeDriverManager().install()} browser = Browser('chrome', **executable_path, headless=False) # Link up to the jet propulsion labs nasa web site url = 'https://www.jpl.nasa.gov/' browser.visit(url) time.sleep(10) #Find and click on the images button #browser.links.find_by_partial_text('Images').click() browser.links.find_by_partial_text('Images').click() time.sleep(8) #Select Mars filter browser.find_by_value('Mars').click() time.sleep(3) # Now that splinter has us on the right page, we use beautiful soup to find and isolate the first mars image url = 'https://www.jpl.nasa.gov/images' html = browser.html # Create BeautifulSoup object; parse with 'html soup = BeautifulSoup(html, 'html.parser') # Retrieve the cover object for the first image - results are returned as an iterable list image = soup.find('div', class_='sm:object-cover object-cover') time.sleep(2) # scrape the first mars web link featured_image_url = image.find("img")['data-src'] time.sleep(3) # add our featured image to the mars dictionary mars_data["featured_image"] = featured_image_url #Quit Browswer browser.quit() # Mars Facts # URL of page to be scraped url='https://space-facts.com/mars' #Create a handle, page, to handle the contents of the website page = requests.get(url) #Store the contents of the website under doc doc = lh.fromstring(page.content) #Parse data that are stored between <tr>..</tr> of HTML tr_elements = doc.xpath('//tr') #Pull the record detail from the table #Define the elements tr_elements = doc.xpath('//tr') #Create empty list col=[] i=0 #For each row, store each first element (header) and an empty list for t in tr_elements[:9]: i+=1 name=t.text_content() col.append((name)) #Create Pandas DataFrame df=pd.DataFrame(col, columns = ["Trait"]) df[['Trait','Measures']] = df['Trait'].str.split(':',expand=True) # Set Trait column as the indes df.set_index('Trait', inplace=True) #Convert dataframe to an html file mars_facts_html = df.to_html(classes="table table-striped table-bordered") # add our mars facts to the mars dictionary mars_data["mars_facts"] = mars_facts_html # Mars hemispheres #Use a Python dictionary to store the Mars Hemispheres data hemisphere_image_urls = [ {"title": "Valles Marineris Hemisphere", "img_url": "https://astrogeology.usgs.gov/cache/images/b3c7c6c9138f57b4756be9b9c43e3a48_valles_marineris_enhanced.tif_full.jpg"}, {"title": "Cerberus Hemisphere", "img_url": "https://astrogeology.usgs.gov/cache/images/f5e372a36edfa389625da6d0cc25d905_cerberus_enhanced.tif_full.jpg"}, {"title": "Schiaparelli Hemisphere", "img_url": "https://astrogeology.usgs.gov/cache/images/55f04ff759b242bdff8833374544b1be_syrtis_major_unenhanced.tif_full.jpg"}, {"title": "Syrtis Major Hemisphere", "img_url": "https://astrogeology.usgs.gov/cache/images/3778f7b43bbbc89d6e3cfabb3613ba93_schiaparelli_enhanced.tif_full.jpg"} ] # add our mars hemispherse images dictionary to the mars dictionary mars_data["mars_hemispheres"] = hemisphere_image_urls mars_data return mars_data
class UserTestCase(LiveServerTestCase): """ A set of tests for testing Users, Accounts and UserProfiles. Also, we check the patterns for the required fields for signup, signin, the menu of user details, the change password view and the change email view. """ def setUp(self): self.browser = Browser() def tearDown(self): self.browser.quit() def test_user_signup(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') self.assertEqual(self.browser.find_by_css('.body-inside').first.value, 'Thank you for signing up with us!\nYou can now use the supplied credentials to signin.') self.assertEqual(self.browser.title, 'SylvaDB - Signup almost done!') def test_user_singup_empty_email(self): self.browser.visit(self.live_server_url + '/accounts/signup/') self.browser.find_by_name('username').fill('bob') self.browser.find_by_name('email').fill('') self.browser.find_by_name('password1').fill('bob_secret') self.browser.find_by_name('password2').fill('bob_secret') self.browser.find_by_value('Signup').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') def test_user_singup_bad_email(self): self.browser.visit(self.live_server_url + '/accounts/signup/') self.browser.find_by_name('username').fill('bob') self.browser.find_by_name('email').fill('bobcultureplex.ca') self.browser.find_by_name('password1').fill('bob_secret') self.browser.find_by_name('password2').fill('bob_secret') self.browser.find_by_value('Signup').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Enter a valid e-mail address.') def test_user_singup_empty_name(self): self.browser.visit(self.live_server_url + '/accounts/signup/') self.browser.find_by_name('username').fill('') self.browser.find_by_name('email').fill('*****@*****.**') self.browser.find_by_name('password1').fill('bob_secret') self.browser.find_by_name('password2').fill('bob_secret') self.browser.find_by_value('Signup').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') def test_user_singup_empty_password(self): self.browser.visit(self.live_server_url + '/accounts/signup/') self.browser.find_by_name('username').fill('bob') self.browser.find_by_name('email').fill('*****@*****.**') self.browser.find_by_name('password1').fill('') self.browser.find_by_name('password2').fill('bob_secret') self.browser.find_by_value('Signup').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') def test_user_singup_password_unmatched(self): self.browser.visit(self.live_server_url + '/accounts/signup/') self.browser.find_by_name('username').fill('bob') self.browser.find_by_name('email').fill('*****@*****.**') self.browser.find_by_name('password1').fill('bob_secret') self.browser.find_by_name('password2').fill('bob_password') self.browser.find_by_value('Signup').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'The two password fields didn\'t match.') def test_user_signin(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') logout(self) def test_user_signin_empty_user(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') self.browser.visit(self.live_server_url + '/accounts/signin/') self.browser.find_by_name('identification').fill('') self.browser.find_by_name('password').fill('bob_secret') self.browser.find_by_xpath( "//div[@id='body']/div/form/input").first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Either supply us with your email or username.') def test_user_signin_bad_user(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') self.browser.visit(self.live_server_url + '/accounts/signin/') self.browser.find_by_name('identification').fill('alice') self.browser.find_by_name('password').fill('bob_secret') self.browser.find_by_xpath( "//div[@id='body']/div/form/input").first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Please enter a correct username or email and password. Note that both fields are case-sensitive.') def test_user_signin_empty_password(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') self.browser.visit(self.live_server_url + '/accounts/signin/') self.browser.find_by_name('identification').fill('bob') self.browser.find_by_name('password').fill('') self.browser.find_by_xpath( "//div[@id='body']/div/form/input").first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') def test_user_signin_bad_password(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') self.browser.visit(self.live_server_url + '/accounts/signin/') self.browser.find_by_name('identification').fill('bob') self.browser.find_by_name('password').fill('alice_secret') self.browser.find_by_xpath( "//div[@id='body']/div/form/input").first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Please enter a correct username or email and password. Note that both fields are case-sensitive.') def test_user_logout(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') logout(self) self.assertEqual(self.browser.title, 'SylvaDB - Signed out') self.assertEqual(self.browser.find_by_css('.body-inside').first.value, 'You have been signed out. Till we meet again.') def test_user_details(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/edit/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Account setup') self.browser.find_by_name('first_name').fill('Bob') self.browser.find_by_name('last_name').fill('Doe') self.browser.attach_file('mugshot', 'http://www.gravatar.com/avatar/3d4bcca5d9c3a56a0282f308f9acda07?s=90') self.browser.select('language', 'en') self.browser.select('gender', '1') self.browser.find_by_name('website').fill('http://www.bobweb.com') self.browser.find_by_name('location').fill('London, Ontario') self.browser.find_by_name('birth_date').fill('01/01/1975') self.browser.find_by_name('about_me').fill('I am a very nice guy') self.browser.find_by_name('institution').fill('University') self.browser.find_by_name('company').fill('CulturePlex') self.browser.find_by_name('lab').fill('CulturePlex') self.browser.find_by_value('Save changes').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') logout(self) def test_user_details_bad_website(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/edit/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Account setup') self.browser.find_by_name('first_name').fill('Bob') self.browser.find_by_name('last_name').fill('Doe') self.browser.attach_file('mugshot', 'http://www.gravatar.com/avatar/3d4bcca5d9c3a56a0282f308f9acda07?s=90') self.browser.select('language', 'en') self.browser.select('gender', '1') self.browser.find_by_name('website').fill('bobweb') self.browser.find_by_name('location').fill('London, Ontario') self.browser.find_by_name('birth_date').fill('01/01/1975') self.browser.find_by_name('about_me').fill('I am a very nice guy') self.browser.find_by_name('institution').fill('University') self.browser.find_by_name('company').fill('CulturePlex') self.browser.find_by_name('lab').fill('CulturePlex') self.browser.find_by_value('Save changes').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Enter a valid URL.') logout(self) def test_user_details_bad_birthdate(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/edit/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Account setup') self.browser.find_by_name('first_name').fill('Bob') self.browser.find_by_name('last_name').fill('Doe') self.browser.attach_file('mugshot', 'http://www.gravatar.com/avatar/3d4bcca5d9c3a56a0282f308f9acda07?s=90') self.browser.select('language', 'en') self.browser.select('gender', '1') self.browser.find_by_name('website').fill('http://www.bobweb.com') self.browser.find_by_name('location').fill('London, Ontario') self.browser.find_by_name('birth_date').fill('birthdate') self.browser.find_by_name('about_me').fill('I am a very nice guy') self.browser.find_by_name('institution').fill('University') self.browser.find_by_name('company').fill('CulturePlex') self.browser.find_by_name('lab').fill('CulturePlex') self.browser.find_by_value('Save changes').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Enter a valid date.') logout(self) def test_user_details_future_birthdate(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/edit/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Account setup') self.browser.find_by_name('first_name').fill('Bob') self.browser.find_by_name('last_name').fill('Doe') self.browser.attach_file('mugshot', 'http://www.gravatar.com/avatar/3d4bcca5d9c3a56a0282f308f9acda07?s=90') self.browser.select('language', 'en') self.browser.select('gender', '1') self.browser.find_by_name('website').fill('http://www.bobweb.com') self.browser.find_by_name('location').fill('London, Ontario') self.browser.find_by_name('birth_date').fill('2015-01-11') self.browser.find_by_name('about_me').fill('I am a very nice guy') self.browser.find_by_name('institution').fill('University') self.browser.find_by_name('company').fill('CulturePlex') self.browser.find_by_name('lab').fill('CulturePlex') self.browser.find_by_value('Save changes').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'You need to introduce a past date.') logout(self) def test_user_change_pass(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/password/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Change password') self.browser.find_by_name('old_password').fill('bob_secret') self.browser.find_by_name('new_password1').fill('bob_password') self.browser.find_by_name('new_password2').fill('bob_password') self.browser.find_by_value('Change password').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Password changed') logout(self) def test_user_change_pass_empty(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/password/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Change password') self.browser.find_by_name('old_password').fill('') self.browser.find_by_name('new_password1').fill('bob_password') self.browser.find_by_name('new_password2').fill('bob_password') self.browser.find_by_value('Change password').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') logout(self) def test_user_change_pass_incorrectly(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/password/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Change password') self.browser.find_by_name('old_password').fill('bad_password') self.browser.find_by_name('new_password1').fill('bob_password') self.browser.find_by_name('new_password2').fill('bob_password') self.browser.find_by_value('Change password').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Your old password was entered incorrectly. Please enter it again.') logout(self) def test_user_change_new_pass_empty(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/password/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Change password') self.browser.find_by_name('old_password').fill('bob_secret') self.browser.find_by_name('new_password1').fill('') self.browser.find_by_name('new_password2').fill('bob_password') self.browser.find_by_value('Change password').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') logout(self) def test_user_change_new_pass_unmatched(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/password/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Change password') self.browser.find_by_name('old_password').fill('bob_secret') self.browser.find_by_name('new_password1').fill('bob_password') self.browser.find_by_name('new_password2').fill('alice_password') self.browser.find_by_value('Change password').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'The two password fields didn\'t match.') logout(self) def test_user_change_mail(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/email/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Welcome to The Sylva Project') self.browser.find_by_name('email').fill('*****@*****.**') self.browser.find_by_value('Change email').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Email verification') def test_user_change_mail_empty(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/email/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Welcome to The Sylva Project') self.browser.find_by_name('email').fill('') self.browser.find_by_value('Change email').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'This field is required.') def test_user_change_bad(self): signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.assertEqual(self.browser.title, 'SylvaDB - Dashboard') self.browser.find_link_by_href('/accounts/bob/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - bob\'s profile.') self.browser.find_link_by_href('/accounts/bob/email/').first.click() self.assertEqual(self.browser.title, 'SylvaDB - Welcome to The Sylva Project') self.browser.find_by_name('email').fill('bobnewcultureplex.ca') self.browser.find_by_value('Change email').first.click() text = self.browser.find_by_xpath("//ul[@class='errorlist']/li").first.text self.assertEqual(text, 'Enter a valid e-mail address.')
__author__='Zheng Wu' from time import sleep from splinter import Browser browser=Browser('chrome') url='http://electsys.sjtu.edu.cn/edu/login.aspx' browser.visit(url) sleep(30) browser.visit('http://electsys.sjtu.edu.cn/edu/student/elect/warning.aspx?xklc=2&lb=3') button=browser.find_by_id('CheckBox1') if (browser.is_element_not_present_by_id('CheckBox1')): pass else: button.click() browser.find_by_id('btnContinue').click() while 1: browser.find_by_value('AD001').click() #value为对应的课程代码 browser.find_by_id('lessonArrange').click() browser.find_by_name('myradiogroup').click() browser.find_by_id('LessonTime1_btnChoose').click() if browser.is_element_not_present_by_id('Button1'): browser.find_by_id('btnSubmit').click() print 'successfully get class!' browser.quit() else: browser.find_by_id('Button1').click() browser.find_by_id('LessonTime1_Button1').click() browser.quit()
wait() raw_input(colored("READY TO BEGIN. PRESS ENTER TO CONTINUE.", 'green')) # FILL IN EACH QN for question_number in question_numbers: ex = get_answers('ex' + question_number) print question_number # GO TO HOME -> WK 1 -> QN url = 'http://10.1.3.25/digitalworldtutor/tutor2/' browser.visit(url) button = browser.find_by_name('display-problems wk=1') button.click() button = browser.find_by_value('Wk.1.1.' + question_number) button.click() for (idx, qn) in enumerate(ex): input_name = 'abox' + str(idx + 1).zfill(3) ans = str(qn['answer']) # browser.fill(input_name, "hi") browser.fill(input_name, ans) raw_input(colored('PRESS ENTER TO CHECK ANSWERS', 'green')) button = browser.find_by_value("Check") button.click() for (idx, el) in enumerate(browser.find_by_css('ol li img')): ex[idx]['is_correct'] = el['alt'] == 'WELL DONE'
class InlineActor(pykka.ThreadingActor): def __init__(self, bot): super(InlineActor, self).__init__() self.bot = bot self.browser = None self.current_q = None self.q_debounce_s = Subject() self.scheduler = ThreadPoolScheduler() def on_start(self): try: searcher = self.q_debounce_s.debounce( 0.750, # Pause for 750ms scheduler=self.scheduler).map(mark_debounced).map( self.actor_ref.tell) searcher.subscribe() # GOOGLE_CHROME_BIN suffix = ".apt/usr/bin/google-chrome-stable" chrome_path = os.getenv("GOOGLE_CHROME_SHIM", "") prefix = chrome_path[:-len(suffix)] os.environ['PATH'] = os.getenv( "PATH", "") + ":" + prefix + ".chromedriver/bin:" + chrome_path # chrome_options = Options() # chrome_options.binary_location = "/app/.apt/usr/bin/google-chrome-stable" # # driver_options = {'executable_path': "/app/.chromedriver/bin/chromedriver", 'options': chrome_options} # executable_path = {'executable_path': '/tmp/build_3eb58544f5f97e761b0afd5314624668/kor-ka-uproar_server-bcbb420/.chromedriver/bin/chromedriver'} cap = webdriver.DesiredCapabilities.PHANTOMJS cap["phantomjs.page.settings.loadImages"] = True cap["phantomjs.page.settings.resourceTimeout"] = 0 cap["phantomjs.page.settings.webSecurityEnabled"] = False cap["phantomjs.page.settings.clearMemoryCaches"] = True driver_options = {'desired_capabilities': cap} self.browser = Browser('phantomjs', **driver_options) # self.browser = Browser('chrome', **driver_options) self.browser.driver.set_window_size(640, 480) self.browser.visit('http://m.vk.com') self.browser.fill("email", os.getenv("vk_login", "")) self.browser.fill("pass", os.getenv("vk_pass", "")) self.browser.find_by_value("Log in").first.click() # dont know is it working cap["phantomjs.page.settings.javascriptEnabled"] = False self.browser.visit('http://m.vk.com/audio?act=search&q=mozart') except Exception as ex: logging.exception(ex) def on_receive(self, message): try: print "Inline Actor msg" + str(message) if message.get('command') == 'q': if message.get('debounced', False): self.on_query(message.get('q')) else: self.q_debounce_s.on_next(message) except Exception as ex: logging.exception(ex) def on_query(self, query): self.browser.windows[0].close_others() if len(query.query) >= 3: res = [] quote = urllib.quote(query.query.encode('utf-8')) print('start search: ' + query.query.encode('utf-8')) self.browser.visit('http://m.vk.com/audio?act=search&q=' + quote + "&offset=" + (0 if query.offset is None else query.offset)) print("parsing...") for body in self.browser.find_by_css(".ai_body"): try: inpt = body.find_by_tag('input').first label = body.find_by_css('.ai_title') artist = body.find_by_css('.ai_artist') d = None try: duration = body.find_by_css('.ai_dur') d = int(duration['data-dur']) except: pass # print (label.text.encode('utf-8') + " - " + artist.text.encode('utf-8')) r = AudioResult(inpt.value, label.text, artist.text, d) res.append(r) except Exception as ex: logging.exception(ex) self.bot.tell({"command": "inline_res", "res": res, "q": query})
class ToolsTestCaseGexf(LiveServerTestCase): """ A master test to check the behaviour of the new 'auto' fields. Actually only works with gephi format. """ def setUp(self): self.browser = Browser() socket.setdefaulttimeout(30) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') self.firstGraphName = "bobgraph" self.secondGraphName = "alicegraph" def tearDown(self): logout(self) self.browser.quit() @classmethod def tearDownClass(cls): sleep(10) # It needs some time for close the LiverServerTestCase super(ToolsTestCaseGexf, cls).tearDownClass() def test_graph_export_gexf_autoincrement(self): # Create a graph with an auto_increment property create_graph(self, self.firstGraphName) create_advanced_schema(self, self.firstGraphName) create_advanced_type(self, self.firstGraphName, "i") create_advanced_data(self) # Schema export export_advanced_schema(self, self.firstGraphName) # Data export in gexf format data_export_gexf(self) # Create new graph for import the data import_advanced_schema(self, self.firstGraphName, self.secondGraphName) # Data import data_import_gexf(self) bobgraph = Graph.objects.get(name=self.firstGraphName) alicegraph = Graph.objects.get(name=self.secondGraphName) alicegraphNodes = alicegraph.nodes.count() spin_assert(lambda: self.assertEqual(bobgraph.nodes.count(), alicegraph.nodes.count())) spin_assert(lambda: self.assertEqual(bobgraph.relationships.count(), alicegraph.relationships.count())) # We store the auto value to compare later alice_type = alicegraph.schema.nodetype_set.get() alice_properties = alice_type.properties.values()[0] alice_auto = alice_properties['auto'] # Add new nodes and relationships and check all is correct self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//a[@class='dataOption new']").first.click() text = self.browser.find_by_id('propertiesTitle').first.value spin_assert(lambda: self.assertEqual(text, 'Properties')) self.browser.find_by_value("Save Bob's type").first.click() text = self.browser.find_by_xpath( "//div[@class='pagination']/span[@class='pagination-info']" ).first.value spin_assert(lambda: self.assertNotEqual( text.find(" elements Bob's type."), -1)) spin_assert(lambda: self.assertEqual(alicegraphNodes + 1, alicegraph.nodes.count())) # We check the new value for auto alice_type_new = alicegraph.schema.nodetype_set.get() alice_properties_new = alice_type_new.properties.values()[0] alice_auto_new = alice_properties_new['auto'] spin_assert(lambda: self.assertEqual(alice_auto + 1, alice_auto_new)) # Destroy the databases Graph.objects.get(name=self.firstGraphName).destroy() Graph.objects.get(name=self.secondGraphName).destroy() ''' def test_graph_export_gexf_autonow(self): # Create a graph with an auto_increment property create_graph(self, self.firstGraphName) create_advanced_schema(self, self.firstGraphName) create_advanced_type(self, self.firstGraphName, "a") create_advanced_data(self) # Schema export export_advanced_schema(self, self.firstGraphName) # Data export in gexf format data_export_gexf(self) # Create new graph for import the data import_advanced_schema(self, self.firstGraphName, self.secondGraphName) # Data import data_import_gexf(self) bobgraph = Graph.objects.get(name=self.firstGraphName) alicegraph = Graph.objects.get(name=self.secondGraphName) alicegraphNodes = alicegraph.nodes.count() spin_assert(lambda: self.assertEqual( bobgraph.nodes.count(), alicegraph.nodes.count())) spin_assert(lambda: self.assertEqual( bobgraph.relationships.count(), alicegraph.relationships.count())) # We store the auto now value to compare auto_now_date_bob = "" auto_now_date_alice = "" for node in bobgraph.nodes.all(): auto_now_date_bob = node.properties.values()[0] for node in alicegraph.nodes.all(): auto_now_date_alice = node.properties.values()[0] spin_assert(lambda: self.assertEqual( auto_now_date_bob, auto_now_date_alice)) # Add new nodes and relationships and check all is correct self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//a[@class='dataOption new']").first.click() text = self.browser.find_by_id('propertiesTitle').first.value spin_assert(lambda: self.assertEqual(text, 'Properties')) self.browser.find_by_value("Save Bob's type").first.click() text = self.browser.find_by_xpath("//div[@class='pagination']/span[@class='pagination-info']").first.value spin_assert(lambda: self.assertNotEqual( text.find(" elements Bob's type."), -1)) spin_assert(lambda: self.assertEqual( alicegraphNodes + 1, alicegraph.nodes.count())) # Destroy the databases Graph.objects.get(name=self.firstGraphName).destroy() Graph.objects.get(name=self.secondGraphName).destroy() ''' def test_graph_export_gexf_autouser(self): # Create a graph with an auto_increment property create_graph(self, self.firstGraphName) create_advanced_schema(self, self.firstGraphName) create_advanced_type(self, self.firstGraphName, "e") create_advanced_data(self) # Schema export export_advanced_schema(self, self.firstGraphName) # Data export in gexf format data_export_gexf(self) # Create new graph for import the data import_advanced_schema(self, self.firstGraphName, self.secondGraphName) # Data import data_import_gexf(self) bobgraph = Graph.objects.get(name=self.firstGraphName) alicegraph = Graph.objects.get(name=self.secondGraphName) alicegraphNodes = alicegraph.nodes.count() spin_assert(lambda: self.assertEqual(bobgraph.nodes.count(), alicegraph.nodes.count())) spin_assert(lambda: self.assertEqual(bobgraph.relationships.count(), alicegraph.relationships.count())) # Add new nodes and relationships and check all is correct self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//a[@class='dataOption new']").first.click() text = self.browser.find_by_id('propertiesTitle').first.value spin_assert(lambda: self.assertEqual(text, 'Properties')) self.browser.find_by_value("Save Bob's type").first.click() text = self.browser.find_by_xpath( "//div[@class='pagination']/span[@class='pagination-info']" ).first.value spin_assert(lambda: self.assertNotEqual( text.find(" elements Bob's type."), -1)) spin_assert(lambda: self.assertEqual(alicegraphNodes + 1, alicegraph.nodes.count())) # Destroy the databases Graph.objects.get(name=self.firstGraphName).destroy() Graph.objects.get(name=self.secondGraphName).destroy()
countryList = ['australia','chile'] countryTypeList = ['developed_', 'developing_'] typeaNameList = ['Existing_Scotia_Public', 'Existing_Scotia_Private', 'Non_Scotia_Public', 'Non_Scotia_Private'] devdCompanyNameList = ['TOYOTA TSUSHO CORPORATION','BASCO DESEMBUAGE','A&T CORPORATION','asdf'] devgCompanyNameList = [countryList[1],countryList[1],countryList[1],'asdf'] # open browser, navigate to the right page, configure, from splinter import Browser browser = Browser() from selenium.webdriver.common.keys import Keys from selenium import webdriver browser.visit('https://clientnet-uat.gbm.bns:8090/CNETADMIN/login.jsp?ssosrc=http%3A%2F%2Fclientnet-uat.gbm.bns%2FCNETCORP%2Findex.do') browser.fill('uid', 'ychoe') browser.fill('pwd', 'Winter15') browser.find_by_name('signin').first.click() browser.fill('clientSearchString', 'jason\'s client') browser.find_by_name('search').first.click() browser.find_by_value('GO').first.click() while len(browser.find_link_by_text('Delete'))>0: browser.find_link_by_text('Delete').first.click() browser.get_alert().accept()
'level': 5, 'secret': '35926afd4ca93c229ba6201805031273', } browser = Browser('chrome', headless=True) # browser.visit('https://google.co.in') # browser.fill('q', 'splinter - python acceptance testing for web applications') # browser.find_by_name('btnK').click() # # if browser.is_text_present('splinter.readthedocs.io'): # print ("Yes, the official website was found!") # else: # print ("No, it wasn't found... We need to improve our SEO techniques") browser.visit('http://noki.chatovod.com/') button = browser.find_by_value("Enter chat") button.click() if browser.is_text_present('Choose your nick:'): print("Yes, the official website was found!") # iconfb = browser.find_by_css("#loginForm > p.social.alignCenter > a:nth-child(7)").first # browser.is_element_present_by_id(iconfb) # iconfb.click() browser.visit('http://noki.chatovod.com/widget/login?n=fb') browser.fill('email', '*****@*****.**') browser.fill('pass', '') button1 = browser.find_by_name("login") button1.click() browser.visit('http://noki.chatovod.com/')
def register_mac_with_xfinity(mac,output, count = 0,time_out = 30): if(count >= 2): return False if count > 0: output.append("Uncertain if successful, trying again") b = Browser('phantomjs') #'phantomjs' #webdriver.PhantomJS() output.append("Browser Launched") success = False try: url = "https://xfinity.nnu.com/xfinitywifi/?client-mac="+mac.lower() socket.setdefaulttimeout(60) b.visit(url) if b.is_text_present("Your complimentary session has expired."): # b.quit() output.append("Mac has already been registered") success = True if(success==False): b.select("rateplanid","spn") # print b.url email = str(random.randrange(10000000,99999999))+'@comcast.com' zip_code = random.randrange(10000,99999) try: b.fill('spn_postal', zip_code) b.fill('spn_email', email) except: pass try: b.check('spn_terms') except: pass url = b.url b.find_by_value('submit').first.click() b.find_by_value('submit').first.click() output.append("Submitting Mac Request, Waiting for result") waitTime = 0 while(b.url == url and waitTime < time_out): #total shit... time.sleep(1) waitTime += 1 if waitTime>0 and waitTime%15 == 0: output.append("Waiting for request to complete ... ["+str(waitTime)+"s]") #TODO validate..... ie did webpage load/etc if(b.is_text_present("Your complimentary session is now active!")): success = True if(waitTime >= time_out and not success): success = register_mac_with_xfinity(mac,output,count=count+1, time_out=time_out) except Exception, err: print err.message print b.url print b.html success = register_mac_with_xfinity(mac, output, count=count+1, time_out=time_out) pass
class InstagramWebBot(object): '''Instagram Web Bot for automatic things using web browser.''' LOGIN_URL = 'http://instagram.com/accounts/login/'; MANAGE_CLIENTS_URL = 'http://instagram.com/developer/clients/manage/' REGISTER_CLIENT_URL = 'http://instagram.com/developer/clients/register/' REGISTER_DEVELOPER_URL = 'http://instagram.com/developer/register/' LOGOUT_URL = 'http://instagram.com/accounts/logout/' CHANGE_PASSWORD_URL = 'http://instagram.com/accounts/password/change/' APP_REDIRECT_URI = 'http://lovematically.com/complete/instagram' APP_NAME_CHOICES = ['LOVE', 'AMOR', 'AMORE', 'AMOUR'] WAIT_TIME = 5 is_logged_in = False is_developer = False def __init__(self, username, password, browser=None): if browser is not None: self.browser = browser self.browser.visit(self.LOGOUT_URL) else: self.browser = Browser('firefox', user_agent="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36") self.username = username self.password = password def slugify(self, str): return re.sub(r'\W+','_',str).lower() def login(self): '''Creates a login session''' self.browser.visit(self.LOGIN_URL) self.browser.fill('username', self.username) self.browser.fill('password', self.password) btn = self.browser.find_by_value('Log in').first btn.click() if self.browser.title == 'Instagram': self.is_logged_in = True logger.info('Logged in as %s' % self.username) else: self.is_logged_in = False logger.error('Login Failed for %s' % self.username) def change_password(self): self.browser.visit(self.CHANGE_PASSWORD_URL) new_password = random_string_generator(8) self.browser.fill('old_password', self.password) self.browser.fill('new_password1', new_password) self.browser.fill('new_password2', new_password) btn = self.browser.find_by_value('Change Password').first btn.click() if self.browser.is_text_present('Thanks! You have successfully changed your password.'): logger.info('Password Changed') logger.info(new_password) return new_password else: return None def register_developer(self, website="", phone_number="", description=""): ''' Registers the user for developer access in instagram ''' result = {} if not self.is_logged_in: logger.error('Must be logged-in to create register as a developer.') return result; self.browser.visit(self.MANAGE_CLIENTS_URL) if self.browser.is_text_present('Developer Signup'): logger.info('Registering as developer') self.browser.fill('website', website) self.browser.fill('phone_number', phone_number) self.browser.fill('description', description) self.browser.check('accept_terms') btn = self.browser.find_by_value('Sign up').first btn.click() self.register_developer(website, phone_number, description) elif self.browser.is_text_present('Register a New Client'): logger.info('Developer registration done') self.is_developer = True else: logger.info('Developer registration failed') self.is_developer = False return result def create_api_client(self, app_name, description, website_url, redirect_uri): '''Creates a new api client. TODO: 1. Add more error handling cases. ''' result = {} if not self.is_logged_in: logger.error('Must be logged-in to create a api client.') return result; self.browser.visit(self.REGISTER_CLIENT_URL) if self.browser.is_text_present('Too many clients'): logger.error('Client limit exceeded for %s' % self.username) return result; self.browser.fill('name', app_name) self.browser.fill('description', description) self.browser.fill('website_url', website_url) self.browser.fill('redirect_uri', redirect_uri) btn = self.browser.find_by_value('Register').first btn.click() client_card = self.browser.find_by_css('.card.client tbody').first rows = client_card.find_by_tag('tr') for row in rows: key = self.slugify(row.find_by_tag('th').first.text) value = row.find_by_tag('td').first.text result.update({key: value}) return result def fill_api_client_form(self): if not self.is_logged_in: logger.error('Must be logged-in to create a api client.') else: app_name = self.APP_NAME_CHOICES[random.randint(0, (len(self.APP_NAME_CHOICES)-1))] description = random_string_generator() website_url = 'http://'+random_string_generator() redirect_uri = self.APP_REDIRECT_URI self.browser.visit(self.REGISTER_CLIENT_URL) self.browser.fill('name', app_name) self.browser.fill('description', description) self.browser.fill('website_url', website_url) self.browser.fill('redirect_uri', redirect_uri) def shell_fill_api_client_form(self, LOCAL_FILE_URL = "captcha.png"): if not self.is_logged_in: logger.error('Must be logged-in to create a api client.') return False else: self.browser.visit(self.REGISTER_CLIENT_URL) if self.browser.is_text_present('Register new Client ID', wait_time=self.WAIT_TIME): app_name = self.APP_NAME_CHOICES[random.randint(0, (len(self.APP_NAME_CHOICES)-1))] description = random_string_generator() website_url = 'http://'+random_string_generator() redirect_uri = self.APP_REDIRECT_URI self.browser.fill('name', app_name) self.browser.fill('description', description) self.browser.fill('website_url', website_url) self.browser.fill('redirect_uri', redirect_uri) captcha_url = self.browser.find_by_id('recaptcha_challenge_image').first['src'] print captcha_url urllib.urlretrieve(captcha_url, LOCAL_FILE_URL) captcha_input = raw_input('Please enter captcha to continue:') self.browser.find_by_id('recaptcha_response_field').first.fill(captcha_input) btn = self.browser.find_by_value('Register').first btn.click() if self.browser.is_text_present('Invalid captcha', wait_time=self.WAIT_TIME): logger.error('Incorrect CAPTCHA, try again') return self.shell_fill_api_client_form(LOCAL_FILE_URL) elif self.browser.is_text_present('Successfully registered', wait_time=self.WAIT_TIME): logger.info('Client registration successful') return True else: logger.error('Client registration failed: UNKNOW ERROR') return False else: logger.error('Cannot register more clients with this accounts.') return False def get_api_clients(self): self.browser.visit(self.MANAGE_CLIENTS_URL) client_cards = self.browser.find_by_css('.card.client') clients = [] for client_card in client_cards: rows = client_card.find_by_tag('tbody').find_by_tag('tr') name = client_card.find_by_tag('h2').first.text result = {} for row in rows: key = self.slugify(row.find_by_tag('th').first.text) value = row.find_by_tag('td').first.text result.update({key: value}) result.update({'name': name}) clients.append(result) return clients def __del__(self): self.browser.quit()
#create a browser bot and visit the website try: browser = Browser(DRIVER) except: sys.exit('failed to load specified driver ' + DRIVER) #go to the database if JANUS: url = 'http://apps.webofknowledge.com.dianus.libr.tue.nl/DIIDW_AdvancedSearch_input.do?' \ 'SID=V2i7L6wGDEBBsnkAWFI&product=DIIDW&search_mode=AdvancedSearch' browser.visit(url) #this redirects to janus, fill in login info browser.fill('user',USERNAME) browser.fill('pass', PASSWORD) #find and click the login button browser.find_by_value('Login').first.click() else: url = 'http://apps.webofknowledge.com/DIIDW_AdvancedSearch_input.do?SID=N1cpglrQOdCmC16gM44&product=DIIDW&search_mode=AdvancedSearch' browser.visit(url) #if new session needs to be started click link try: browser.find_link_by_partial_text('new session').first.click() except: pass def Build_Query_Citations(codes): #iterate through the list #build the query query = "CD=(" for code in codes:
else: dealType = browser.find_by_name('repayMethod').value facilityName = browser.find_by_name('fcName').value dealName = scenarioName + '_' + dealType + '_' + facilityName file = open('deal_' + dealName + '.xml', 'w+') file.write(noddStr) file.close() if noddStr.find('Cash Flow Active') != -1: browser.find_by_name('cashFlowButton').first.click() nodeCash = browser.find_by_tag('form').first strNodeCash= nodeCash.html.replace('\n','\t').replace('\'','\\\'') browser.find_by_value('Back to Facility').first.click() cashName = dealName + '_cashflow' file = open('deal_' + cashName + '.xml', 'w+') file.write(strNodeCash) file.close() ctypes.windll.user32.MessageBoxW(None, "The obligor/facility is exported.", "", 0) messageBox = ctypes.windll.user32.MessageBoxW returnValue = messageBox(None,"Do you want to continue copying scenario/obligors/facility?","Copy scenario/obligor/facility",0x40 | 0x1) ctypes.windll.user32.MessageBoxW(0, "Finished.", "", 0)
class Crawler(): def __init__(self, url, headers=None, proxies=None): self.url = url self.headers = headers self.proxies = proxies self.browser = None self.BROWSER_LIST = ['firefox', 'chrome'] self.FIND_MODE = ['css', 'xpath', 'tag', 'name', 'text', 'id', 'value'] def set_url(self, url): self.url = url def get_url(self): return self.url def set_headers(self, headers): self.headers = headers def get_headers(self): return self.headers def set_proxies(self, proxies): self.proxies = proxies def get_proxies(self): return self.proxies def get_response(self, url=None): if url: return requests.get(url, proxies=self.proxies, headers=self.headers) return requests.get(self.url, proxies=self.proxies, headers=self.headers) def get_select_element_list(self, select_tag, full_text=None): if not full_text: full_text = self.get_response().text soup = BeautifulSoup(full_text, "html.parser") element_list = soup.select(select_tag) return element_list def get_parent(self, element): return element.find_parent() def get_previous_element(self, element): return element.previous_element def get_previous_sibling(self, element): return element.previous_sibling def get_next_element(self, element, neighbor_tag=None): if neighbor_tag: return element.find_next(neighbor_tag) return element.next_element def need_login(self, select_tag, full_text=None): if not full_text: full_text = self.get_response().text login_button = self.get_select_element_list(select_tag, full_text=full_text) if login_button: return True return False def login(self, login_url, login_parameter): response = requests.post(login_url, login_parameter, proxies=self.proxies, headers=self.headers) def remove_html(self, select_tag, full_text=None): if not full_text: full_text = self.get_response().text soup = BeautifulSoup(full_text, "html.parser") delete_elements = soup.select(select_tag) for element in delete_elements: element.decompose() return soup def open_browser(self, browser_name=None): if browser_name: if browser_name in self.BROWSER_LIST: self.browser = Browser(browser_name) else: raise AttributeError('browser unsupport, support list: ' + str(self.BROWSER_LIST)) else: self.browser = Browser() self.browser.visit(self.url) def close_browser(self): if not self.browser: raise Exception('Please call open_browser() first') self.browser.quit() self.browser = None def go_to_url(self, url=None): if not self.browser: raise Exception('Please call open_browser() first') if url: self.browser.visit(url) else: self.browser.visit(self.url) def find_element_list(self, mode, select_tag): element_list = None if not self.browser: raise Exception('Please call open_browser() first') if mode not in self.FIND_MODE: raise AttributeError('mode unsupport, mode list: ' + str(self.FIND_MODE)) if mode == 'css': element_list = self.browser.find_by_css(select_tag) elif mode == 'xpath': element_list = self.browser.find_by_xpath(select_tag) elif mode == 'tag': element_list = self.browser.find_by_tag(select_tag) elif mode == 'name': element_list = self.browser.find_by_name(select_tag) elif mode == 'text': element_list = self.browser.find_by_text(select_tag) elif mode == 'id': element_list = self.browser.find_by_id(select_tag) elif mode == 'value': element_list = self.browser.find_by_value(select_tag) else: raise Exception('find_element_list exception: ' + mode) return element_list def get_browser_html(self): if not self.browser: raise Exception('Please call open_browser() first') return self.browser.html def get_browser_url(self): if not self.browser: raise Exception('Please call open_browser() first') return self.browser.url def fix_vaild_file_name(self, file_name): invalid_char_list = ['\\', '/', ':', '*', '?', '"', '<', '>', '|'] # for windows invalid file name for invalid_char in invalid_char_list: file_name = file_name.replace(invalid_char, '') return file_name def download_image(self, download_url, image_name=None): if not image_name: image_name = download_url.split('/')[-1] image_name = self.fix_vaild_file_name(image_name) response = requests.get(download_url, proxies=self.proxies, headers=self.headers) with open(image_name, 'wb') as file: file.write(response.content) def get_cookie(self): if not self.browser: raise Exception('Please call open_browser() first') return self.browser.cookies.all() def get_alert(self): if not self.browser: raise Exception('Please call open_browser() first') return self.browser.get_alert() def get_html_tag(self, element): string = str(element) first_index = string.find('<') + 1 last_index = string.find('>') if last_index > string.find(' '): last_index = string.find(' ') tag = string[first_index:last_index] return tag def get_all_feature(self, element): feature_dict = {} keys = element.attrs.keys() for key in keys: feature_dict[key] = element.get(key) return feature_dict def get_element_feature(self, element): id = element.get('id') if id: return 'id', id tag = self.get_html_tag(element) feature_dict = self.get_all_feature(element) xpath = '/' + tag + '[' for feature, value in feature_dict.items(): xpath = xpath + '@' + feature + '=\'' + value + '\'' xpath = xpath + ' and ' xpath = xpath[:-5] # delete ' and ' xpath = xpath + ']' while tag != 'div' and tag != 'tbody': element = self.get_parent(element) tag = self.get_html_tag(element) if tag: xpath = '//' + tag + xpath if feature: return 'xpath', xpath raise Exception('no match feature')
# open a brower browser = Browser('chrome') browser.driver.set_window_size(640, 480) browser.visit('https://www.google.com') #control the website search_bar_xpath = '//*[@id="lst-ib"]' # select first element search_bar = browser.find_by_xpath(search_bar_xpath)[0] #fill search_bar.fill("eurosport.pl") #click search_button = browser.find_by_value('Szukaj w Google')[0] search_button.click() #scrape search_results_xpath = '//h3[@class="r"]/a' # h3 element with class r search_results = browser.find_by_xpath(search_results_xpath) scraped_data = [] for search_result in search_results: title = search_result.text.encode('utf8') link = search_result['href'] scraped_data.append((title, link)) #in tuples # csv df = pd.DataFrame(data=scraped_data, columns=['Title', "Link"]) df.to_csv("links.csv")
class GetAndCleanData: def __init__(self, sign_in_page_url, url, email, password, download_dir, edit_dir, page_numbers, regex, force_download=False): self.sign_in_page_url = sign_in_page_url self.url = url self.email = email self.password = password self.download_dir = download_dir self.edit_dir = edit_dir self.page_numbers = page_numbers self.books = [] self.regex = regex self.force_download = force_download self.browser = Browser('firefox') self.sign_in() self.download_sites() self.authors, self.series = self.separate_joint_data() self.write_data() self.close_browser() # Creates Browser object and logs in def sign_in(self): self.browser.visit(self.sign_in_page_url) self.browser.find_by_id('user_email').fill(self.email) self.browser.find_by_id('user_password').fill(self.password) self.browser.find_by_value('Sign in').first.click() # Closes browser def close_browser(self): self.browser.quit() # Creates directory to save files to def create_directory(self, directory): if not os.path.isdir(directory): os.makedirs(directory, exist_ok=True) # Turns html file into string def read_file_to_str(self, filename): with open(os.path.join(self.download_dir, filename), 'r', encoding='utf8') as dat: return dat.read() # Separates page into dictionaries of regex maches def separate_data(self, match): book_data = match.groupdict() return book_data # Tests wether the code is working or not def get_data_from_text(self, text): return [self.separate_data(x) for x in re.finditer(self.regex, text)] def download_this_website(self, url, filename): self.create_directory(self.download_dir) print('Saving page {} ...'.format(url), end='') sys.stdout.flush() if os.path.isfile(os.path.join(self.download_dir, filename)) and not self.force_download: print('Page already saved') return else: self.browser.visit(url) with open(os.path.join(self.download_dir, filename), 'w', encoding='utf-8') as datoteka: datoteka.write(self.browser.html) print('Page saved') # Creates url from given page number def merge_url_and_number(self, num): return self.url + '?page={}'.format(num) # Writes file to .csv and .json def write_to_csv(self, dictionary, fields, directory, filename): self.create_directory(directory) with open(os.path.join(directory, filename), 'w', encoding='utf-8') as csv_file: writer = csv.DictWriter(csv_file, fieldnames=fields, extrasaction='ignore') writer.writeheader() for dic in dictionary: writer.writerow(dic) def write_to_json(self, dictionary, directory, filename): self.create_directory(directory) with open(os.path.join(directory, filename), 'w', encoding='utf-8') as json_file: json.dump(dictionary, json_file, indent=4, ensure_ascii=False) # Sorts given data def sort_data(self, dic): dic['shelved'] = int(dic['shelved']) dic['avg_rating'] = float(dic['avg_rating']) dic['ratings'] = int(dic['ratings'].replace(',', '')) try: dic['published'] = int(dic['published']) except (TypeError, ValueError): dic['published'] = None try: dic['volume'] = int(dic['volume']) except (TypeError, ValueError): dic['volume'] = None if not dic['volume']: dic['series'] = None # Separates data about authors and series def separate_joint_data(self): authors, series = [], [] for book in self.books: for author in book['author'].split(', '): authors.append({'title': book['title'], 'author': author}) if book['series']: series.append({'series': book['series'], 'title': book['title'], 'volume': book['volume']}) if book['alt_series']: series.append({'series': book['alt_series'], 'title': book['title'], 'volume': book['alt_volume']}) return authors, series # Downloads the pages in given range, sorts the data and appends it to books def download_sites(self): for page_num in self.page_numbers: site_name = 'page_{}.html'.format(page_num) self.download_this_website(self.merge_url_and_number(page_num), site_name) for book in self.get_data_from_text(self.read_file_to_str(site_name)): self.sort_data(book) if book['published']: self.books.append(book) # Writes the data in separate .csv files def write_data(self): self.write_to_json(self.books, self.edit_dir, 'books.json') self.write_to_csv(self.books, ['title', 'shelved', 'avg_rating', 'ratings', 'published'], self.edit_dir, 'books.csv') self.write_to_csv(self.authors, ['title', 'author'], self.edit_dir, 'authors.csv') self.write_to_csv(self.series, ['series', 'title', 'volume'], self.edit_dir, 'series.csv')
# browser = Browser() # browser = Browser('zope.testbrowser') browser = Browser('phantomjs') tubeBase = "http://www.tubeoffline.com/download-1channel-videos.php" sourceBase = "http://www.primewire.ag/" sourceArgs = "tv-1386995-Game-of-Thrones/season-3-episode-7" # analyse url to skip initial visit browser.visit(tubeBase) browser.fill('video', sourceBase+sourceArgs) browser.find_by_value('GET video').click() writeFile('htm',browser.html) browser.find_by_id('generateLink').click() writeFile('htm2',browser.html) # stupid way to do this # for x in xrange(0,1000): # try: # print x # browser.find_by_id('generateLink').click() # except Exception, e:
class SchemaTestCase(LiveServerTestCase): """ A set of tests for testing export schema, import schema and everything related to advanced types (patterns, options, etc.). """ def setUp(self): self.browser = Browser() socket.setdefaulttimeout(30) signup(self, 'bob', '*****@*****.**', 'bob_secret') signin(self, 'bob', 'bob_secret') def tearDown(self): logout(self) self.browser.quit() @classmethod def tearDownClass(cls): sleep(10) # It needs some time for close the LiverServerTestCase super(SchemaTestCase, cls).tearDownClass() def test_export_schema(self): create_graph(self) create_schema(self) create_type(self) self.browser.find_by_id('toolsMenu').first.click() cookies = { self.browser.cookies.all()[0]["name"]: self.browser.cookies.all()[0]["value"], self.browser.cookies.all()[1]["name"]: self.browser.cookies.all()[1]["value"] } result = requests.get(self.live_server_url + '/schemas/bobs-graph/export/', cookies=cookies) spin_assert(lambda: self.assertEqual(result.headers['content-type'], 'application/json')) spin_assert(lambda: self.assertEqual( self.browser.status_code.is_success(), True)) f = open('sylva/sylva/tests/files/bobs-graph_schema.json') spin_assert( lambda: self.assertEqual(f.read().split("\n")[0], result.content)) def test_import_schema(self): create_graph(self) create_schema(self) self.browser.find_by_id('schemaImport').first.click() file_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'files/bobs-graph_schema.json') self.browser.attach_file('file', file_path) self.browser.find_by_value('Continue').first.click() spin_assert(lambda: self.assertEqual(self.browser.title, "SylvaDB - Bob's graph")) text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "Name")) def test_new_type(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_xpath( "//div[@class='content2-first']/p/textarea[@name='description']" ).first.fill('The loved type') self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_xpath( "//div[@id='diagramBox_bobs-type']/div[@class='title']" ).first.value spin_assert(lambda: self.assertNotEqual(text.find("Bob's type"), -1)) def test_new_advanced_type(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill('Name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_name('properties-0-default').first.fill( "Bob's node default name") self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "Name")) def test_new_advanced_type_string_empty(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill('String name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Basic']/option[@value='s']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "String name")) # Testing data self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//td[@class='dataActions']/a[@class='dataOption new']" ).first.click() self.browser.find_by_name('String name').first.fill('') self.browser.find_by_value("Save Bob's type").first.click() text = self.browser.find_by_xpath( "//ul[@class='errorlist']/li").first.text spin_assert(lambda: self.assertEqual(text, 'This field is required.')) Graph.objects.get(name="Bob's graph").destroy() def test_new_advanced_type_boolean(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill( 'Boolean name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Basic']/option[@value='b']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "Boolean name")) def test_new_advanced_type_number(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill('Number name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Basic']/option[@value='n']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "Number name")) def test_new_advanced_type_number_float(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill('Number name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Basic']/option[@value='n']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "Number name")) # Testing data self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//td[@class='dataActions']/a[@class='dataOption new']" ).first.click() self.browser.find_by_name('Number name').first.fill('1.5') self.browser.find_by_value("Save Bob's type").first.click() text = self.browser.find_by_css('input:invalid').first.value spin_assert(lambda: self.assertEqual(text, '1.5')) Graph.objects.get(name="Bob's graph").destroy() def test_new_advanced_type_number_string(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill('Number name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Basic']/option[@value='n']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "Number name")) # Testing data self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//td[@class='dataActions']/a[@class='dataOption new']" ).first.click() self.browser.find_by_name('Number name').first.fill('number') self.browser.find_by_value("Save Bob's type").first.click() text = self.browser.find_by_css('input:invalid').first.outer_html spin_assert(lambda: self.assertEqual( text, '<input id="id_Number name" name="Number name" type="number">')) Graph.objects.get(name="Bob's graph").destroy() def test_new_advanced_type_text(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill('Text name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Advanced']/option[@value='x']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "Text name")) def test_new_advanced_type_date(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill('Date name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Advanced']/option[@value='d']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "Date name")) def test_new_advanced_type_time(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill('Time name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Advanced']/option[@value='t']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "Time name")) def test_new_advanced_type_time_string(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill('Time name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Advanced']/option[@value='t']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "Time name")) # Testing data self.browser.find_by_id('dataMenu').first.click() self.browser.find_by_xpath( "//td[@class='dataActions']/a[@class='dataOption new']" ).first.click() self.browser.find_by_name('Time name').first.fill('0123456789') sleep(5) # Wating to the datepicker to open self.browser.find_by_xpath( "//button[@class='ui-datepicker-close ui-state-default ui-priority-primary ui-corner-all']" ).first.click() sleep(5) # Waiting to the datepicker to close self.browser.find_by_value("Save Bob's type").first.click() text = self.browser.find_by_xpath( "//ul[@class='errorlist']/li").first.text spin_assert(lambda: self.assertEqual(text, 'Enter a valid time.')) Graph.objects.get(name="Bob's graph").destroy() def test_new_advanced_type_choices(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill( 'Choices name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Advanced']/option[@value='c']" ).first.click() self.browser.find_by_name('properties-0-default').first.fill( 'Bob, Alice') self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "Choices name")) def test_new_advanced_type_float(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill('float name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Advanced']/option[@value='f']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "float name")) def test_new_advanced_type_collaborator(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill( 'collaborator name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Advanced']/option[@value='r']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "collaborator name")) def test_new_advanced_type_auto_now(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill( 'auto now name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Auto']/option[@value='w']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "auto now name")) def test_new_advanced_type_auto_now_add(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill( 'auto now add name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Auto']/option[@value='a']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "auto now add name")) def test_new_advanced_type_auto_increment(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill( 'auto increment name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Auto']/option[@value='i']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "auto increment name")) def test_new_advanced_type_auto_increment_update(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill( 'auto increment update') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Auto']/option[@value='o']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "auto increment update")) def test_new_advanced_type_auto_user(self): create_graph(self) create_schema(self) self.browser.find_link_by_href( '/schemas/bobs-graph/types/create/').first.click() text = self.browser.find_by_xpath( "//div[@class='content2-first']/h2").first.value spin_assert(lambda: self.assertEqual(text, 'Type')) self.browser.find_by_name('name').first.fill("Bob's type") self.browser.find_by_id('advancedModeButton').first.click() self.browser.find_by_name('properties-0-key').first.fill( 'auto user name') self.browser.find_by_name('properties-0-display').first.check() self.browser.find_by_name('properties-0-required').first.check() self.browser.find_by_xpath( "//select[@id='id_properties-0-datatype']/optgroup[@label='Auto']/option[@value='e']" ).first.click() self.browser.find_by_name('properties-0-order').first.fill('1') self.browser.find_by_name('properties-0-description').first.fill( "The name of this Bob's node") self.browser.find_by_value('Save Type').first.click() text = self.browser.find_by_id( 'diagramBoxField_bobs-graph.bobs-type.undefined').first.value spin_assert(lambda: self.assertEqual(text, "auto user name")) def test_schema_allowed_rel_addition(self): create_graph(self) spin_assert(lambda: self.assertEqual(self.browser.title, 'SylvaDB - Dashboard')) create_schema(self) create_type(self) spin_assert(lambda: self.assertEqual(self.browser.title, "SylvaDB - Bob's graph")) self.browser.find_by_id('allowedRelations').first.click() self.browser.select('source', '1') self.browser.find_by_name('name').fill("Bob's rel") self.browser.select('target', '1') self.browser.find_by_id('id_description').fill( "This the allowed relationship for Bob's graph") self.browser.find_by_value('Save Type').first.click() spin_assert(lambda: self.assertEqual(self.browser.title, "SylvaDB - Bob's graph")) text = self.browser.find_by_xpath( "//div[@class='form-row indent']/label").first.value spin_assert(lambda: self.assertNotEqual(text.find("Bob's rel"), -1)) def test_schema_allowed_rel_addition_deletion(self): create_graph(self) spin_assert(lambda: self.assertEqual(self.browser.title, 'SylvaDB - Dashboard')) create_schema(self) create_type(self) spin_assert(lambda: self.assertEqual(self.browser.title, "SylvaDB - Bob's graph")) self.browser.find_by_id('allowedRelations').first.click() self.browser.select('source', '1') self.browser.find_by_name('name').fill("Bob's rel") self.browser.select('target', '1') self.browser.find_by_id('id_description').fill( "This the allowed relationship for Bob's graph") self.browser.find_by_value('Save Type').first.click() spin_assert(lambda: self.assertEqual(self.browser.title, "SylvaDB - Bob's graph")) text = self.browser.find_by_xpath( "//div[@class='form-row indent']/label").first.value spin_assert(lambda: self.assertNotEqual(text.find("Bob's rel"), -1)) self.browser.find_by_xpath( "//div[@class='form-row indent']/div[@class='form-row indent']/a" ).first.click() self.browser.find_by_xpath( "//span[@class='buttonLinkOption buttonLinkRight']/a[@class='delete']" ).first.click() self.browser.choose('confirm', '1') self.browser.find_by_value('Continue').first.click() notExists = self.browser.is_element_not_present_by_xpath( "//div[@class='form-row indent']/label") spin_assert(lambda: self.assertEqual(notExists, True))