def append_path(url, url_path): target = URL(url_path) if url_path.startswith('/'): url = url.path(target.path()) else: url = url.add_path_segment(target.path()) if target.query(): url = url.query(target.query()) return url.as_string()
def append_path(url, path): target = URL(path) if target.path(): url = add_multi_path_segments(url, target.path()) if target.query(): url = url.query(target.query()) #print("Target: ", target.path) #raise NotImplementedError("target2: %r %r %r" % (target, target.query, url)) return url.as_string()
def get_bib(args): uploaded = load(args.data_file('repos', 'cdstar.json')) fname_to_cdstar = {} for type_ in ['texts', 'docs', 'data']: for hash_, paths in load(args.data_file('repos', type_ + '.json')).items(): if hash_ in uploaded: for path in paths: fname_to_cdstar[path.split('/')[-1]] = uploaded[hash_] for hash_, paths in load(args.data_file('repos', 'edmond.json')).items(): if hash_ in uploaded: for path in paths: fname_to_cdstar[path.split('/')[-1]] = uploaded[hash_] db = Database.from_file(args.data_file('repos', 'Dogon.bib'), lowercase=True) for rec in db: doc = Document(rec) newurls = [] for url in rec.get('url', '').split(';'): if not url.strip(): continue if url.endswith('sequence=1'): newurls.append(url) continue url = URL(url.strip()) if url.host() in ['dogonlanguages.org', 'github.com', '']: fname = url.path().split('/')[-1] doc.files.append((fname, fname_to_cdstar[fname])) else: newurls.append(url.as_string()) doc.rec['url'] = '; '.join(newurls) yield doc
def cc_link(req, license_url, button="regular"): if license_url == "http://en.wikipedia.org/wiki/Public_domain": license_url = "http://creativecommons.org/publicdomain/zero/1.0/" license_url = URL(license_url) if license_url.host() != "creativecommons.org": return comps = license_url.path().split("/") if len(comps) < 3: return # pragma: no cover known = { "zero": "Public Domain", "by": "Creative Commons Attribution License", "by-nc": "Creative Commons Attribution-NonCommercial License", "by-nc-nd": "Creative Commons Attribution-NonCommercial-NoDerivatives License", "by-nc-sa": "Creative Commons Attribution-NonCommercial-ShareAlike License", "by-nd": "Creative Commons Attribution-NoDerivatives License", "by-sa": "Creative Commons Attribution-ShareAlike License", } if comps[2] not in known: return icon = "cc-" + comps[2] + ("-small" if button == "small" else "") + ".png" img_attrs = dict(alt=known[comps[2]], src=req.static_url("clld:web/static/images/" + icon)) height, width = (15, 80) if button == "small" else (30, 86) img_attrs.update(height=height, width=width) return HTML.a(HTML.img(**img_attrs), href=license_url, rel="license")
def cc_link(req, license_url, button='regular'): if license_url == 'https://en.wikipedia.org/wiki/Public_domain': license_url = 'https://creativecommons.org/publicdomain/zero/1.0/' # pragma: no cover license_url = URL(license_url) if license_url.host() != 'creativecommons.org': return comps = license_url.path().split('/') if len(comps) < 3: return # pragma: no cover known = { 'zero': 'Public Domain', 'by': 'Creative Commons Attribution License', 'by-nc': 'Creative Commons Attribution-NonCommercial License', 'by-nc-nd': 'Creative Commons Attribution-NonCommercial-NoDerivatives License', 'by-nc-sa': 'Creative Commons Attribution-NonCommercial-ShareAlike License', 'by-nd': 'Creative Commons Attribution-NoDerivatives License', 'by-sa': 'Creative Commons Attribution-ShareAlike License'} if comps[2] not in known: return icon = 'cc-' + comps[2] + ('-small' if button == 'small' else '') + '.png' img_attrs = dict( alt=known[comps[2]], src=req.static_url('clld:web/static/images/' + icon)) height, width = (15, 80) if button == 'small' else (30, 86) img_attrs.update(height=height, width=width) return HTML.a(HTML.img(**img_attrs), href=license_url, rel='license')
def cc_link(req, license_url, button='regular'): if license_url == 'https://en.wikipedia.org/wiki/Public_domain': license_url = 'https://creativecommons.org/publicdomain/zero/1.0/' license_url = URL(license_url) if license_url.host() != 'creativecommons.org': return comps = license_url.path().split('/') if len(comps) < 3: return # pragma: no cover known = { 'zero': 'Public Domain', 'by': 'Creative Commons Attribution License', 'by-nc': 'Creative Commons Attribution-NonCommercial License', 'by-nc-nd': 'Creative Commons Attribution-NonCommercial-NoDerivatives License', 'by-nc-sa': 'Creative Commons Attribution-NonCommercial-ShareAlike License', 'by-nd': 'Creative Commons Attribution-NoDerivatives License', 'by-sa': 'Creative Commons Attribution-ShareAlike License'} if comps[2] not in known: return icon = 'cc-' + comps[2] + ('-small' if button == 'small' else '') + '.png' img_attrs = dict( alt=known[comps[2]], src=req.static_url('clld:web/static/images/' + icon)) height, width = (15, 80) if button == 'small' else (30, 86) img_attrs.update(height=height, width=width) return HTML.a(HTML.img(**img_attrs), href=license_url, rel='license')
def __call__(self, data: str): try: url = URL(data) except ValueError: raise ValidationError(_("URL cannot be parsed"), code="parse_error") if url.has_query_param('db'): if not url.query_param('db').isdigit(): raise ValidationError(_("Invalid port specified"), code="invalid_port") if url.scheme() == "unix": if url.host(): raise ValidationError( _("Hostname not supported for unix domain sockets"), code="unix_domain_socket_hostname") if url.port(): raise ValidationError( _("Port not supported for unix domain sockets"), code="unix_domain_socket_port") if not url.path(): raise ValidationError( _("No path specified for unix domain socket"), code="unix_domain_socket_path") if url.scheme() in ("redis", "redis+tls"): if not url.host(): raise ValidationError(_("No host specified"), code="host_missing")
def get_data(q_link): url = URL(q_link) if url.domain() not in ['quora.com', 'www.quora.com']: return 'error, not quora' url = URL( scheme='https', host='www.quora.com', path=url.path(), query='share=1').as_string() soup = BeautifulSoup(requests.get(url).text) question = {} question['url'] = url question['title'] = soup.find("div", {"class": "question_text_edit"}).text question['topics'] = [topic.text for topic in soup.find_all("div", {"class": "topic_list_item"})] question['details'] = soup.find("div", {"class": "question_details_text"}).text answers = [] divs = soup.find_all("div", {"class": "pagedlist_item"}) try: ans_count = soup.find("div", {"class": "answer_header_text"}).text.strip() count = int(re.match(r'(\d+) Answers', ans_count).groups()[0]) except: return jsonify(question=question, answers=answers) question['answer_count'] = count count = len(divs) - 1 if count < 6 else 6 for i in range(count): one_answer = { 'votes': '-1', 'rank': 0, 'answer': '' } try: author = {} author['name'] = divs[i].find("div", {"class": "answer_user"}).find("span", {"class": "answer_user_wrapper"}).find("a", {"class": "user"}).string author['bio'] = divs[i].find("div", {"class": "answer_user"}).find("span", {"class": "answer_user_wrapper"}).find_all("span", {"class": "rep"})[1].find("span", {"class": "hidden"}).text except: author['name'] = 'Anonymous' author['bio'] = '' one_answer['author'] = author one_answer['votes'] = divs[i].find("span", {"class":"numbers"}).text html_block = divs[i].find("div", {"id": re.compile("(.*)_container")}).contents answer_html = '' for p in range(len(html_block) - 1): answer_html += str(html_block[p]) one_answer['answer_html'] = answer_html one_answer['answer'] = divs[i].find("div", {"class": "answer_content"}).text one_answer['rank'] = i + 1 answers.append(one_answer) return jsonify(question=question, answers=answers)
def test_facebook_login_url(): facebook_client = FacebookClient(local_host='localhost') facebook_login_url = URL(facebook_client.get_login_uri()) query = facebook_login_url.query_params() callback_url = URL(query['redirect_uri'][0]) func, _args, kwargs = resolve(callback_url.path()) assert func is oauth_callback assert kwargs['service'] == FACEBOOK assert query['scope'][0] == FacebookClient.scope assert query['client_id'][0] == str(FacebookClient.client_id)
def test_google_login_url(google_client, settings): settings.GOOGLE_CLIENT_ID = '112233' google_login_url = URL(google_client.get_login_uri()) params = google_login_url.query_params() callback_url = URL(params['redirect_uri'][0]) func, _args, kwargs = resolve(callback_url.path()) assert func is oauth_callback assert kwargs['service'] == GOOGLE assert params['scope'][0] == google_client.scope assert params['client_id'][0] == str(google_client.client_id)
def test_facebook_login_url(facebook_client, settings): settings.FACEBOOK_APP_ID = '112233' facebook_login_url = URL(facebook_client.get_login_uri()) query = facebook_login_url.query_params() callback_url = URL(query['redirect_uri'][0]) func, _args, kwargs = resolve(callback_url.path()) assert func is oauth_callback assert kwargs['service'] == FACEBOOK assert query['scope'][0] == facebook_client.scope assert query['client_id'][0] == str(facebook_client.client_id)
def test_google_login_url(): google_client = GoogleClient(local_host='local_host') google_login_url = URL(google_client.get_login_uri()) params = google_login_url.query_params() callback_url = URL(params['redirect_uri'][0]) func, _args, kwargs = resolve(callback_url.path()) assert func is oauth_callback assert kwargs['service'] == GOOGLE assert params['scope'][0] == GoogleClient.scope assert params['client_id'][0] == str(GoogleClient.client_id)
def test_facebook_login_url(facebook_client, settings): settings.FACEBOOK_APP_ID = '112233' facebook_login_url = URL(facebook_client.get_login_uri()) query = facebook_login_url.query_params() callback_url = URL(query['redirect_uri'][0]) func, _args, kwargs = resolve(callback_url.path()) assert func is oauth_callback assert kwargs['service'] == FACEBOOK assert query['scope'][0] == facebook_client.scope assert query['client_id'][0] == str(facebook_client.client_id)
def test_google_login_url(google_client, settings): settings.GOOGLE_CLIENT_ID = '112233' google_login_url = URL(google_client.get_login_uri()) params = google_login_url.query_params() callback_url = URL(params['redirect_uri'][0]) func, _args, kwargs = resolve(callback_url.path()) assert func is oauth_callback assert kwargs['service'] == GOOGLE assert params['scope'][0] == google_client.scope assert params['client_id'][0] == str(google_client.client_id)
def get_questions(s_link): url = URL(s_link) if url.domain() not in ['quora.com', 'www.quora.com']: return 'error, not quora' quora_url = URL(scheme='https', host='www.quora.com', path=url.path(), query='share=1').as_string() soup = BeautifulSoup(requests.get(quora_url).get_text()) topic = {} topic['url'] = quora_url topic['title'] = url.path().split('/')[-1] questions = [] divs = soup.find_all("div", {"class": "pagedlist_item"}) count = len(divs) - 1 for i in range(count): one_question = {'url': '', 'title': ''} try: one_question['url'] = divs[i].find( "a", {"class": "question_link"})['href'] one_question['title'] = divs[i].find("a", { "class": "question_link" }).find("span", { "class": "link_text" }).text except: jsonify(topic=topic, questions=questions, parse_failure=one_question) one_question['url'] = URL(scheme='https', host='www.quora.com', path=one_question['url']).as_string() if one_question['title'] != "": questions.append(one_question) print(f'{type(topic)}, {type(questions)}') return jsonify(topic=topic, questions=questions)
def test_facebook_login_url(self): """Facebook login url is properly generated""" facebook_client = FacebookClient(local_host='localhost') facebook_login_url = URL(facebook_client.get_login_uri()) query = facebook_login_url.query_params() callback_url = URL(query['redirect_uri'][0]) func, _args, kwargs = resolve(callback_url.path()) self.assertEquals(func, oauth_callback) self.assertEquals(kwargs['service'], FACEBOOK) self.assertEqual(query['scope'][0], FacebookClient.scope) self.assertEqual(query['client_id'][0], FacebookClient.client_id)
def test_google_login_url(self): """Google login url is properly generated""" google_client = GoogleClient(local_host='local_host') google_login_url = URL(google_client.get_login_uri()) params = google_login_url.query_params() callback_url = URL(params['redirect_uri'][0]) func, _args, kwargs = resolve(callback_url.path()) self.assertEquals(func, oauth_callback) self.assertEquals(kwargs['service'], GOOGLE) self.assertIn(params['scope'][0], GoogleClient.scope) self.assertEqual(params['client_id'][0], GoogleClient.client_id)
def test_facebook_login_url(self): """Facebook login url is properly generated""" facebook_client = FacebookClient(local_host='localhost') facebook_login_url = URL(facebook_client.get_login_uri()) query = facebook_login_url.query_params() callback_url = URL(query['redirect_uri'][0]) func, _args, kwargs = resolve(callback_url.path()) self.assertEquals(func, oauth_callback) self.assertEquals(kwargs['service'], FACEBOOK) self.assertEqual(query['scope'][0], FacebookClient.scope) self.assertEqual(query['client_id'][0], str(FacebookClient.client_id))
def test_google_login_url(self): """Google login url is properly generated""" google_client = GoogleClient(local_host='local_host') google_login_url = URL(google_client.get_login_uri()) params = google_login_url.query_params() callback_url = URL(params['redirect_uri'][0]) func, _args, kwargs = resolve(callback_url.path()) self.assertEquals(func, oauth_callback) self.assertEquals(kwargs['service'], GOOGLE) self.assertTrue(params['scope'][0] in GoogleClient.scope) self.assertEqual(params['client_id'][0], str(GoogleClient.client_id))
def license_name(license_url): if license_url == "http://commons.wikimedia.org/wiki/GNU_Free_Documentation_License": return 'GNU Free Documentation License' if license_url == 'http://en.wikipedia.org/wiki/Public_domain': license_url = 'http://creativecommons.org/publicdomain/zero/1.0/' license_url_ = URL(license_url) if license_url_.host() != 'creativecommons.org': return license_url comps = license_url_.path().split('/') if len(comps) < 3: return license_url return { 'zero': 'Public Domain', }.get(comps[2], '(CC) %s' % comps[2].upper())
def license_name(license_url): if license_url == "http://commons.wikimedia.org/wiki/GNU_Free_Documentation_License": return 'GNU Free Documentation License' if license_url == 'http://en.wikipedia.org/wiki/Public_domain': license_url = 'http://creativecommons.org/publicdomain/zero/1.0/' license_url_ = URL(license_url) if license_url_.host() != 'creativecommons.org': return license_url comps = license_url_.path().split('/') if len(comps) < 3: return license_url return { 'zero': 'Public Domain', }.get(comps[2], '(CC) %s' % comps[2].upper())
class SplinterTestCase(LiveServerTestCase): username = '******' email = '*****@*****.**' password = '******' is_anonymous = True is_staff = False is_logged_in = True def setUp(self): settings.DEBUG = True super(SplinterTestCase, self).setUp() self.user = None self.base_url = URL(self.live_server_url) self.browser = Browser(SPLINTER_WEBDRIVER) if self.is_anonymous and not self.is_staff: return self.user = factories.UserFactory( username=self.username, email=self.email, password=self.password, is_staff=self.is_staff, ) if self.is_logged_in: self.goto(reverse('admin:index')) self.browser.fill_form({ 'username': self.username, 'password': self.password, }) self.browser.find_by_css("input[type='submit']").first.click() self.assertIn('Log out', self.browser.html) def tearDown(self): super(SplinterTestCase, self).tearDown() self.browser.quit() def goto(self, path): url = self.base_url.path(path) return self.browser.visit(url.as_string())
class SplinterTestCase(LiveServerTestCase): username = '******' email = '*****@*****.**' password = '******' is_anonymous = True is_staff = False is_logged_in = True def setUp(self): settings.DEBUG = True super(SplinterTestCase, self).setUp() self.user = None self.base_url = URL(self.live_server_url) self.browser = Browser(SPLINTER_WEBDRIVER) if self.is_anonymous and not self.is_staff: return self.user = factories.UserFactory( username=self.username, email=self.email, password=self.password, is_staff=self.is_staff, ) if self.is_logged_in: self.goto(reverse('admin:index')) self.browser.fill_form({ 'username': self.username, 'password': self.password, }) self.browser.find_by_css("input[type='submit']").first.click() self.assertIn('Log out', self.browser.html) def tearDown(self): super(SplinterTestCase, self).tearDown() self.browser.quit() def goto(self, path): url = self.base_url.path(path) return self.browser.visit(url.as_string())
def get_questions(s_link): url = URL(s_link) if url.domain() not in ['quora.com', 'www.quora.com']: return 'error, not quora' url = URL( scheme='https', host='www.quora.com', path=url.path(), query='share=1').as_string() soup = BeautifulSoup(requests.get(url).text) topic = {} topic['url'] = url topic['title'] = soup.find("span", {"class": "TopicName"}).text questions = [] divs = soup.find_all("div", {"class": "pagedlist_item"}) count = len(divs) - 1 for i in range(count): one_question = { 'url': '', 'title': '' } try: one_question['url'] = divs[i].find("a", {"class": "question_link"})['href'] one_question['title'] = divs[i].find("a", {"class": "question_link"}).find("span", {"class": "link_text"}).text except: jsonify(topic=topic, questions=questions, parse_failure=one_question) one_question['url'] = URL( scheme='https', host='www.quora.com', path=one_question['url']).as_string() if one_question['title'] != "": questions.append(one_question) return jsonify(topic=topic, questions=questions)
class SplinterTestCase(LiveServerTestCase): username = '******' email = '*****@*****.**' password = '******' is_anonymous = True is_staff = False is_logged_in = True use_remote = os.getenv('TRAVIS', False) or os.getenv('USE_REMOTE', False) def get_remote_browser(self): remote_url = "http://{}:{}@localhost:4445/wd/hub".format( SAUCE_USERNAME, SAUCE_ACCESS_KEY) caps = { 'name': getattr(self, 'name', self.__class__.__name__), 'browser': 'firefox', 'platform': "Linux", 'version': "29"} if os.getenv('TRAVIS', False): caps['tunnel-identifier'] = os.environ['TRAVIS_JOB_NUMBER'] caps['build'] = os.environ['TRAVIS_BUILD_NUMBER'] caps['tags'] = [os.environ['TRAVIS_PYTHON_VERSION'], 'CI'] return Browser(driver_name='remote', url=remote_url, **caps) def get_local_browser(self): return Browser(SPLINTER_WEBDRIVER) def setUp(self): super(SplinterTestCase, self).setUp() self.user = None self.base_url = URL(self.live_server_url) if self.use_remote: self.browser = self.get_remote_browser() else: self.browser = self.get_local_browser() if self.is_anonymous and not self.is_staff: return self.user = factories.UserFactory( username=self.username, email=self.email, password=self.password, is_staff=self.is_staff, ) if self.is_logged_in: self.goto(reverse('admin:index')) self.browser.fill_form({ 'username': self.username, 'password': self.password, }) self.browser.find_by_css("input[type='submit']").first.click() exists = self.browser.is_text_present('Log out', wait_time=2) self.assertTrue(exists) def report_test_result(self): result = {'passed': sys.exc_info() == (None, None, None)} url = 'https://saucelabs.com/rest/v1/{username}/jobs/{job}'.format( username=SAUCE_USERNAME, job=self.browser.driver.session_id) try: response = requests.put(url, data=json.dumps(result), auth=(SAUCE_USERNAME, SAUCE_ACCESS_KEY)) except requests.exceptions.RequestsExceptions: print "Could not set test status in Sauce Labs." return response.status_code == requests.codes.ok def tearDown(self): super(SplinterTestCase, self).tearDown() if not os.getenv('SPLINTER_DEBUG'): self.browser.quit() if self.use_remote: self.report_test_result() def goto(self, path): url = self.base_url.path(path) return self.browser.visit(url.as_string()) def wait_for_editor_reload(self, wait_for=3): if self.use_remote: wait_for += 5 time.sleep(wait_for) def ensure_element(self, element_or_list, index=0): """ Selects either the element with *index* from the list of elements given in *element_or_list* or returns the single element if it is not a list. This make it possible to handle an element and a list of elements where only a single element is required. :param element: ``Element`` instance or ``ElementList``. :parem int index: Index of element to be returned if a list. (Default: 0) :rtype: Element """ if isinstance(element_or_list, ElementList): return element_or_list[index] return element_or_list def find_and_click_by_css(self, browser, selector, wait_time=3): browser.is_element_present_by_css(selector, wait_time) elem = self.ensure_element(browser.find_by_css(selector)) return elem.click()
class RestSession(): def __init__(self, host, user, password, need_auth, parent): self.parent = weakref.ref(parent) self._host = host self._user = user self._password = password self._need_auth = need_auth self._rest_session_container = RestSessionContainer() self._url = URL(scheme='https', host=host) self._auth_url = self._url.path('/api/v1.0/auth') self._commit_url = self._url.path('/api/v1.0/commit') self._rollback_url = self._url.path('/api/v1.0/rollback') self._check_auth_url = self._url.path('/api/v1.0/operation/chassis/epi') if self._need_auth: self._login() def _check_auth(self): if self._host not in self._rest_session_container.session.keys(): return False response_obj = requests.request('GET', self._check_auth_url, headers=DEFAULT_HEADER, auth=self._rest_session_container.session[self._host], verify=False) response_json = response_obj.json() if 'response' in response_json.keys() and isinstance(response_json['response'], str) and 'Authentication failed' in response_json['response']: return False return True def _login(self): if not self._check_auth(): auth = HTTPBasicAuth(self._user, self._password) response_obj = requests.request('POST', self._auth_url, headers=DEFAULT_HEADER, auth=auth, verify=False) response_json = response_obj.json() if 'auth' in response_json.keys() and 'status' in response_json['auth']: raise RequestException auth_key = response_json['auth'] self._rest_session_container.session[self._host] = HTTPBasicAuth(self._user, auth_key) def get_auth(self): if not self._need_auth: return HTTPBasicAuth(self._user, self._password) if not self._check_auth(): self._login() return self._rest_session_container.session[self._host] def del_auth(self): if self._need_auth and self._check_auth(): response_obj = requests.request('DELETE', self._auth_url, headers=DEFAULT_HEADER, auth=self._rest_session_container.session[self._host], verify=False) # response_json = response_obj.json() # if 'auth' in response_json.keys() and 'status' in response_json['auth']: # raise RequestException def commit(self): if not self._check_auth(): raise RequestException response_obj = requests.request('POST', self._commit_url, headers=DEFAULT_HEADER, auth=self._rest_session_container.session[self._host], verify=False) response_json = response_obj.json() if 'status' in response_json.keys() and not ( 'ok' in response_json['status'] or '200' in response_json['status']): raise RequestException def rollback(self): if not self._check_auth(): raise RequestException requests.request('POST', self._rollback_url, headers=DEFAULT_HEADER, auth=self._rest_session_container.session[self._host], verify=False)
def test_add_path_segment(self): url = URL('http://example.com').add_path_segment('one')\ .add_path_segment('two')\ .add_path_segment('three') self.assertEqual('/one/two/three', url.path())
def test_app_path_segment(self): url = URL('http://example.com').add_path_segment('one')\ .add_path_segment('two')\ .add_path_segment('three') self.assertEqual('/one/two/three', url.path())
def append_path(url, url_path_segment): target = URL(url_path_segment) url = url.add_path_segment(target.path()) if target.query(): url = url.query(target.query()) return url.as_string()
print(argument_url) print(argument_url.as_string()) inline_url = URL().scheme('https').domain('www.google.com').path( 'search').query_param('q', 'google') print(inline_url) print(inline_url.as_string()) u = URL('postgres://*****:*****@localhost:1234/test?ssl=true') print(u.scheme()) print(u.host()) print(u.domain()) print(u.username()) print(u.password()) print(u.netloc()) print(u.port()) print(u.path()) print(u.query()) print(u.path_segments()) print(u.query_param('ssl')) print(u.query_param('ssl', as_list=True)) print(u.query_params()) print(u.has_query_param('ssl')) print(u.subdomains()) u = URL.from_string('https://github.com/minwook-shin') print(u.path_segment(0)) new_url = u.add_path_segment('minwook-shin.github.com') print(new_url.as_string()) from purl import expand
def url(self, path): base = URL(scheme=self.scheme, host=self.host) return base.path(path)
class SplinterTestCase(LiveServerTestCase): username = '******' email = '*****@*****.**' password = '******' is_anonymous = True is_staff = False is_logged_in = True def setUp(self): super(SplinterTestCase, self).setUp() self.user = None self.base_url = URL(self.live_server_url) self.browser = Browser(SPLINTER_WEBDRIVER) self.browser.driver.set_window_size(1280, 1024) if self.is_anonymous and not self.is_staff: return self.user = factories.UserFactory( username=self.username, email=self.email, password=self.password, is_staff=self.is_staff, ) if self.is_logged_in: self.goto(reverse('admin:index')) self.browser.fill_form({ 'username': self.username, 'password': self.password, }) self.browser.find_by_css("input[type='submit']").first.click() exists = self.browser.is_text_present('Log out', wait_time=2) self.assertTrue(exists) def tearDown(self): super(SplinterTestCase, self).tearDown() if not os.getenv('SPLINTER_DEBUG'): self.browser.quit() def goto(self, path): url = self.base_url.path(path) return self.browser.visit(url.as_string()) def wait_for_editor_reload(self, wait_for=3): time.sleep(wait_for) def ensure_element(self, element_or_list, index=0): """ Selects either the element with *index* from the list of elements given in *element_or_list* or returns the single element if it is not a list. This make it possible to handle an element and a list of elements where only a single element is required. :param element: ``Element`` instance or ``ElementList``. :parem int index: Index of element to be returned if a list. (Default: 0) :rtype: Element """ if isinstance(element_or_list, ElementList): return element_or_list[index] return element_or_list def find_and_click_by_css(self, browser, selector, wait_time=3): browser.is_element_present_by_css(selector, wait_time) elem = self.ensure_element(browser.find_by_css(selector)) return elem.click()
def url(self, path): base = URL(scheme=self.scheme, host=self.host) return base.path(path)
class VaResturlfactory: """ VaResturlfactory generates url cooresponding to va rest request """ api = '/api/v1.0' auth = '/auth' micro_segmentation = '/operation/micro-segmentation' microsegmentation = '/operation/orchestration/inventory/microsegmentation' enable_auto_microsegment = '/operation/orchestration/inventory/microsegmentation/auto-convert-enable' disable_auto_microsegment = '/operation/orchestration/inventory/microsegmentation/auto-convert-disable' configMicro_segmentation = '/config/micro-segmentation' configChassisEpi = '/config/chassis/epi' segmentationInfo = '/operation/orchestration/inventory/segmentation/info' revert = '/operation/orchestration/inventory/microsegmentation/revert' segmentationStats = '/operation/orchestration/inventory/segmentation/stats' inventory = '/operation/orchestration/inventory' inventory_updatestatus = '/operation/orchestration/inventory/updatestatus' inventoryHost = '/operation/orchestration/inventory/host' chassisEpi = '/operation/chassis/epi' configOrchestration = '/config/orchestration' checkOrchestration = '/operation/orchestration' operation_deployment_deploy = '/operation/orchestration/inventory/deployment/deploy' operation_deployment_remove = '/operation/orchestration/inventory/deployment/remove' operation_deployment_info = '/operation/orchestration/inventory/deployment/info' commit = '/commit' rollback = '/rollback' def __init__(self, ip): """ constructor generates URL object Args: ip: director ip """ self.__url = URL(scheme='https', host=ip) def va_get_url(self, feature=None): """ get_url returns url objects based on feature input :param feature (str): va rest feature :return (URL): url object """ return { 'auth': self.__url.path(VaResturlfactory.api + VaResturlfactory.auth), 'microsegmentation': self.__url.path(VaResturlfactory.api + VaResturlfactory.microsegmentation), 'micro_segmentation': self.__url.path(VaResturlfactory.api + VaResturlfactory.micro_segmentation), 'enable_auto_microsegment': self.__url.path(VaResturlfactory.api + VaResturlfactory.enable_auto_microsegment), 'disable_auto_microsegment': self.__url.path(VaResturlfactory.api + VaResturlfactory.disable_auto_microsegment), 'configMicro_segmentation': self.__url.path(VaResturlfactory.api + VaResturlfactory.configMicro_segmentation), 'configChassisEpi': self.__url.path(VaResturlfactory.api + VaResturlfactory.configChassisEpi), 'revert': self.__url.path(VaResturlfactory.api + VaResturlfactory.revert), 'segmentationInfo': self.__url.path(VaResturlfactory.api + VaResturlfactory.segmentationInfo), 'segmentationStats': self.__url.path(VaResturlfactory.api + VaResturlfactory.segmentationStats), 'inventory': self.__url.path(VaResturlfactory.api + VaResturlfactory.inventory), 'inventoryHost': self.__url.path(VaResturlfactory.api + VaResturlfactory.inventoryHost), 'inventoryUpdatestatus': self.__url.path(VaResturlfactory.api + VaResturlfactory.inventory_updatestatus), 'chassisEpi': self.__url.path(VaResturlfactory.api + VaResturlfactory.chassisEpi), 'configOrchestration': self.__url.path(VaResturlfactory.api + VaResturlfactory.configOrchestration), 'checkOrchestration': self.__url.path(VaResturlfactory.api + VaResturlfactory.checkOrchestration), 'operationDeployment': self.__url.path(VaResturlfactory.api + VaResturlfactory.operation_deployment_deploy), 'operationRemoveDeployment': self.__url.path(VaResturlfactory.api + VaResturlfactory.operation_deployment_remove), 'operationDeploymentInfo': self.__url.path(VaResturlfactory.api + VaResturlfactory.operation_deployment_info), 'commit': self.__url.path(VaResturlfactory.api + VaResturlfactory.commit), 'rollback': self.__url.path(VaResturlfactory.api + VaResturlfactory.rollback) }[feature]
# coding=utf-8 # 测试URL基本使用方法 from purl import URL # 三种初始化方式(通过完整的url字符串, 通过关键字参数,通过) from_str = URL('https://www.google.com/search?q=testing') from_kwargs = URL(scheme='https', host='www.google.com', path='/search', query='q=testing') from_combo = URL('https://www.google.com').path('search') print type(from_combo), type(from_combo.path("haha"))
def test_add_path_segment(self): url = URL("http://example.com").add_path_segment("one").add_path_segment("two").add_path_segment("three") self.assertEqual("/one/two/three", url.path())
class SplinterTestCase(LiveServerTestCase): username = '******' email = '*****@*****.**' password = '******' is_anonymous = True is_staff = False is_logged_in = True def setUp(self): super(SplinterTestCase, self).setUp() self.user = None self.base_url = URL(self.live_server_url) self.browser = Browser(SPLINTER_WEBDRIVER) self.browser.driver.set_window_size(1280, 1024) if self.is_anonymous and not self.is_staff: return self.user = factories.UserFactory( username=self.username, email=self.email, password=self.password, is_staff=self.is_staff, ) if self.is_logged_in: self.goto(reverse('admin:index')) self.browser.fill_form({ 'username': self.username, 'password': self.password, }) self.browser.find_by_css("input[type='submit']").first.click() exists = self.browser.is_text_present('Log out', wait_time=2) self.assertTrue(exists) def tearDown(self): super(SplinterTestCase, self).tearDown() if not os.getenv('SPLINTER_DEBUG'): self.browser.quit() def goto(self, path): url = self.base_url.path(path) return self.browser.visit(url.as_string()) def wait_for_editor_reload(self, wait_for=3): time.sleep(wait_for) def ensure_element(self, element_or_list, index=0): """ Selects either the element with *index* from the list of elements given in *element_or_list* or returns the single element if it is not a list. This make it possible to handle an element and a list of elements where only a single element is required. :param element: ``Element`` instance or ``ElementList``. :parem int index: Index of element to be returned if a list. (Default: 0) :rtype: Element """ if isinstance(element_or_list, ElementList): return element_or_list[index] return element_or_list def find_and_click_by_css(self, browser, selector, wait_time=3): browser.is_element_present_by_css(selector, wait_time) elem = self.ensure_element(browser.find_by_css(selector)) return elem.click()