def visit_loginurl(aggregate): """Check for a login URL and visit it.""" config = aggregate.config url = config["loginurl"] if not url: return if not fileutil.has_module("twill"): msg = strformat.format_feature_warning(module=u'twill', feature=u'login URL visit', url=u'http://twill.idyll.org/') log.warn(LOG_CHECK, msg) return from twill import commands as tc log.debug(LOG_CHECK, u"Visiting login URL %s", url) configure_twill(tc) tc.go(url) if tc.get_browser().get_code() != 200: log.warn(LOG_CHECK, _("Error visiting login URL %(url)s.") % \ {"url": url}) return submit_login_form(config, url, tc) if tc.get_browser().get_code() != 200: log.warn(LOG_CHECK, _("Error posting form at login URL %(url)s.") % \ {"url": url}) return store_cookies(tc.get_browser().cj, aggregate.cookies, url) resulturl = tc.get_browser().get_url() log.debug(LOG_CHECK, u"URL after POST is %s" % resulturl) # add result URL to check list from ..checker import get_url_from aggregate.urlqueue.put(get_url_from(resulturl, 0, aggregate))
def visit_loginurl (aggregate): """Check for a login URL and visit it.""" config = aggregate.config url = config["loginurl"] if not url: return if not fileutil.has_module("twill"): msg = strformat.format_feature_warning(module=u'twill', feature=u'login URL visit', url=u'http://twill.idyll.org/') log.warn(LOG_CHECK, msg) return from twill import commands as tc log.debug(LOG_CHECK, u"Visiting login URL %s", url) configure_twill(tc) tc.go(url) if tc.get_browser().get_code() != 200: log.warn(LOG_CHECK, _("Error visiting login URL %(url)s.") % \ {"url": url}) return submit_login_form(config, url, tc) if tc.get_browser().get_code() != 200: log.warn(LOG_CHECK, _("Error posting form at login URL %(url)s.") % \ {"url": url}) return #XXX store_cookies(tc.get_browser().cj, aggregate.cookies, url) resulturl = tc.get_browser().get_url() log.debug(LOG_CHECK, u"URL after POST is %s" % resulturl) # add result URL to check list from ..checker import get_url_from aggregate.urlqueue.put(get_url_from(resulturl, 0, aggregate))
def require(what): """ >> require <what> After each page is loaded, require that 'what' be satisfied. 'what' can be: * 'success' -- HTTP return code is 200 * 'links_ok' -- all of the links on the page load OK (see 'check_links' extension module) """ global _requirements from twill import commands # # install the post-load hook function. # if _require_post_load_hook not in commands.get_browser()._post_load_hooks: if DEBUG: print>>commands.OUT, 'INSTALLING POST-LOAD HOOK' commands.get_browser()._post_load_hooks.append(_require_post_load_hook) # # add the requirement. # if what not in _requirements: if DEBUG: print>>commands.OUT, 'Adding requirement', what _requirements.append(what)
def download_project_export(): url = get_browser().get_url() assert url.endswith(".zip") zipcontents = get_browser().get_html() output = StringIO() output.write(zipcontents) z = zipfile.ZipFile(output, "r") name = url.split("/")[-1] globals, locals = get_twill_glocals() globals["__project_export__"] = (z, name)
def download_project_export(): url = get_browser().get_url() assert url.endswith(".zip") zipcontents = get_browser().get_html() output = StringIO() output.write(zipcontents) z = zipfile.ZipFile(output, 'r') name = url.split('/')[-1] globals, locals = get_twill_glocals() globals['__project_export__'] = (z, name)
def fetch_disease_model(id): from twill import set_output set_output(open('/dev/null', 'w')) dismod_server_login() twc.go(DISMOD_DOWNLOAD_URL % id) result_json = twc.show() twc.get_browser()._browser._response.close() # end the connection, so that apache doesn't get upset dm = DiseaseJson(result_json) return dm
def fetch_disease_model(id): from twill import set_output set_output(open("/dev/null", "w")) dismod_server_login() twc.go(DISMOD_DOWNLOAD_URL % id) result_json = twc.show() twc.get_browser()._browser._response.close() # end the connection, so that apache doesn't get upset dm = DiseaseJson(result_json) return dm
def no_require(): """ >> no_require Remove all post-load requirements. """ from twill import commands l = commands.get_browser()._post_load_hooks l = [ fn for fn in l if fn != _require_post_load_hook ] commands.get_browser()._post_load_hooks = l global _requirements _requirements = []
def wyloguj(commands): forms = commands.showforms() # Pobranie formularzy naszForm = None # Zmienna do ktorej zapiszemy znaleziony przez nas form for form in forms: # Petla po formualrzach if form.action == 'https://edukacja.pwr.wroc.pl/EdukacjaWeb/logOutUser.do': # Jesli akcja danego formularza przenosi do szczegolow naszForm = form # To zapisujemy znaleziony formularz #print(naszForm) # DO USUNIECIA! Wypisuje znaleziony formularz ctrl = naszForm.controls # pobieram ze znalezionego formularza wszystkie kontrolki for ct in ctrl: if ct.type == 'submit': # szukam wsrod niej tej co ma typ submit commands.get_browser().clicked(naszForm, ct.attrs['name']) # klikam na ten przycisk commands.get_browser().submit()
def test_raw(): """ test parsing of raw, unfixed HTML. """ b = commands.get_browser() commands.config('use_tidy', '0') commands.config('use_BeautifulSoup', '0') commands.config('allow_parse_errors', '0') commands.go(url) ### # Apparently, mechanize is more tolerant than it used to be. # commands.go('/tidy_fixable_html') # forms = [ i for i in b._browser.forms() ] # logging.info("forms: %s", forms) # assert len(forms) == 0, "there should be no correct forms on this page" ### commands.go('/BS_fixable_html') forms = [ i for i in b._browser.forms() ] assert len(forms) == 1, "there should be one mangled form on this page"
def test_tidy(): """ test parsing of tidy-processed HTML. """ b = commands.get_browser() commands.config('use_tidy', '1') commands.config('use_BeautifulSoup', '0') commands.config('allow_parse_errors', '0') commands.go(url) ### commands.go('/tidy_fixable_html') forms = [ i for i in b._browser.forms() ] assert len(forms) == 1, \ "you must have 'tidy' installed for this test to pass" ### commands.go('/BS_fixable_html') forms = [ i for i in b._browser.forms() ] assert len(forms) == 1, \ "there should be one mangled form on this page"
def _run_xpath(xpath): _, twill_locals = get_twill_glocals() browser = get_browser() html = browser.get_html() tree = lxml.html.document_fromstring(html) try: results = tree.xpath(xpath) except XPathEvalError: err_msg = "Invalid xpath expression: '%s'" % xpath log_error(err_msg) raise TwillException(err_msg) # XXX we aggregate all the values together and warn when there is more than # one result if results: if len(results) > 1: log_warn("xpath '%s' found multiple results: using all of them" % xpath) result = "\n".join(lxml.html.tostring(r) for r in results) else: log_error("xpath '%s' found no results") result = "" # in case we want to cache it at some point twill_locals["__xpath_result__"] = result twill_locals["__xpath_expr__"] = xpath return result
def remove_students(ssid, students_list): b = cmd.get_browser() mb = b._browser params = [('setID', ssid), ('remove', 'Remove'), ('id_list[]', ','.join([str(d) for d in students_list]))] r = mb.open('/py/teacher/studentset/student_remove/', urllib.urlencode(params))
def test_effed_up_forms2(): """ should always succeed; didn't back ~0.7. """ commands.config('use_tidy', '1') commands.config('use_BeautifulSoup', '1') commands.config('allow_parse_errors', '0') commands.go(url) commands.go('/effed_up_forms2') b = commands.get_browser() forms = b.get_all_forms() form = forms[0] inputs = [i for i in form.inputs] assert len(inputs) == 3, \ "you must have 'tidy' installed for this test to pass" # with a more correct form parser this would work like the above. commands.config('use_tidy', '0') commands.reload() forms = b.get_all_forms() form = forms[0] inputs = [i for i in form.inputs] assert len(inputs) == 3, "lxml should find 3 form inputs"
def get_cert(cert): try: if not completed_certs.has_key(cert): reset_browser() go('http://www2.fdic.gov/idasp/ExternalConfirmation.asp?inCert1=%s' % (cert)) log.write(cert) html = get_browser().get_html() bhc_links = bhc_cert_href.findall(html) if len(bhc_links) > 0: get_bhc(cert) log.write(' holding\n') else: get_bank(cert) log.write(' bank\n') log.flush() except Exception, e: print e
def get_or_create_studentset(self, name, students, access_type='formal'): """ create studentset (if not exists) and return info dict """ try: find(name) except TwillAssertionError: # below code can be simplified assert access_type in ['formal', 'casual'] cmd.add_extra_header('Content-Type', 'application/xml') cmd.add_extra_header('charset', 'UTF-8') b = cmd.get_browser() mb = b._browser r = mb.open( '/py/teacher/studentset/new/ajax/?', urllib.urlencode( dict(setname=name, namelist=students, access_type=access_type, nameorder='dir', submit='Submit'))) # # return info # cmd.go('/py/teacher/studentset/list/') # id = int(soup().findAll(text=name)[0].parent['href'].split('/')[-2]) # return get_studentset_info(id) return dict(name=name, )
def test_BeautifulSoup(): """ test parsing of BS-processed HTML. """ b = commands.get_browser() commands.config('use_tidy', '0') commands.config('use_BeautifulSoup', '1') commands.config('allow_parse_errors', '0') commands.go(url) ### commands.go('/tidy_fixable_html') forms = [ i for i in b._browser.forms() ] assert len(forms) == 0, \ "there should be no correct forms on this page" ### commands.go('/BS_fixable_html') forms = [ i for i in b._browser.forms() ] assert len(forms) == 1, \ "there should be one mangled form on this page"
def wyloguj(commands): forms = commands.showforms() # Pobranie formularzy naszForm = None # Zmienna do ktorej zapiszemy znaleziony przez nas form for form in forms: # Petla po formualrzach if form.action == 'https://edukacja.pwr.wroc.pl/EdukacjaWeb/logOutUser.do': # Jesli akcja danego formularza przenosi do szczegolow naszForm = form # To zapisujemy znaleziony formularz #print(naszForm) # DO USUNIECIA! Wypisuje znaleziony formularz ctrl = naszForm.controls # pobieram ze znalezionego formularza wszystkie kontrolki for ct in ctrl: if ct.type == 'submit': # szukam wsrod niej tej co ma typ submit commands.get_browser().clicked( naszForm, ct.attrs['name']) # klikam na ten przycisk commands.get_browser().submit()
def test_BeautifulSoup(): """ test parsing of BS-processed HTML. """ b = commands.get_browser() commands.config('use_tidy', '0') commands.config('use_BeautifulSoup', '1') commands.config('allow_parse_errors', '0') commands.go(url) ### commands.go('/tidy_fixable_html') forms = b.get_all_forms() assert len(forms) == 1, "lxml should find one form on this page" ### commands.go('/BS_fixable_html') forms = b.get_all_forms() assert len(forms) == 1, \ "there should be one mangled form on this page"
def _run_xpath(xpath): _, twill_locals = get_twill_glocals() browser = get_browser() html = browser.get_html() tree = lxml.html.document_fromstring(html) try: results = tree.xpath(xpath) except XPathEvalError: err_msg = "Invalid xpath expression: '%s'" % xpath log_error(err_msg) raise TwillException(err_msg) #XXX we aggregate all the values together and warn when there is more than #one result if results: if len(results) > 1: log_warn("xpath '%s' found multiple results: using all of them" % xpath) result = '\n'.join(lxml.html.tostring(r) for r in results) else: log_error("xpath '%s' found no results") result = '' # in case we want to cache it at some point twill_locals['__xpath_result__'] = result twill_locals['__xpath_expr__'] = xpath return result
def test_tidy(): """ test parsing of tidy-processed HTML. """ b = commands.get_browser() commands.config('use_tidy', '1') commands.config('use_BeautifulSoup', '0') commands.config('allow_parse_errors', '0') commands.go(url) ### commands.go('/tidy_fixable_html') forms = b.get_all_forms() assert len(forms) == 1, \ "you must have 'tidy' installed for this test to pass" ### commands.go('/BS_fixable_html') forms = b.get_all_forms() assert len(forms) == 1, \ "there should be one mangled form on this page"
def test_BeautifulSoup(): """ test parsing of BS-processed HTML. """ b = commands.get_browser() commands.config('use_tidy', '0') commands.config('use_BeautifulSoup', '1') commands.config('allow_parse_errors', '0') commands.go(url) ### commands.go('/tidy_fixable_html') forms = [i for i in b._browser.forms()] assert len(forms) == 0, \ "there should be no correct forms on this page" ### commands.go('/BS_fixable_html') forms = [i for i in b._browser.forms()] assert len(forms) == 1, \ "there should be one mangled form on this page"
def get_bank(cert, get_bhc=False): try: if not completed_certs.has_key(cert): completed_certs[cert] = True go('http://www2.fdic.gov/idasp/confirmation.asp?inCert1=%s&AsOf=9/30/2008' % (cert)) html = get_browser().get_html() bhc_links = bhc_cert_href.findall(html) if bhc_links is not None: for bhc_link in bhc_links: pending_certs.append(bhc_link) save_html('%s_bank.html' % (cert)) fv('1', 'ReportName', '99') submit() fv('2', 'ReportName', '99') submit() save_html('%s_report.html' % (cert)) go('http://www2.fdic.gov/sod/sodInstBranchRpt.asp?rCert=%s&baritem=1&ryear=2008' % (cert)) save_html('%s_sod.html' % (cert)) except Exception, e: print e
def leave_all_studentsets_named(studentset_name): b = cmd.get_browser() cmd.go("/py/teacher/studentset/list/") soup = BeautifulSoup(cmd.show()) for tststudentset in soup.findAll('a', text=studentset_name): stsid = int(tststudentset.parent['href'].split('/')[-2]) studentset_leave(stsid)
def authAndRedirect(username, password): tw.reset_browser() tw.go(SYS_REDIRECT_URL) tw.fv('1', "username", username) tw.fv('1', "password", password) tw.formaction('1', AUTH_URL) tw.submit() return tw.get_browser().get_html()
def url_qs(what): browser = get_browser() qs = urllib.splitquery(browser.get_url())[-1] qs = qs.split('&') qsdict = {} for q in qs: q = q.split('=') qsdict[q[0]] = q[1] if what not in qsdict: raise TwillAssertionError("no match to '%s' in %s" % (what, qs))
def test_global_form(): """ test the handling of global form elements """ b = commands.get_browser() commands.config('use_tidy', '0') commands.go(url) commands.go('/effed_up_forms') forms = b.get_all_forms() assert len(forms) == 2
def split(what): """ >> split <regexp> Sets __matchlist__ to re.split(regexp, page). """ page = get_browser().get_html() m = re.split(what, page) global_dict, local_dict = get_twill_glocals() local_dict['__matchlist__'] = m
def try_posting_disease_model(dm, ntries): # error handling: in case post fails try again, but stop after 3 tries from twill.errors import TwillAssertionError import random import time url = "" for ii in range(ntries): try: url = post_disease_model(dm) break except TwillAssertionError: pass if ii < ntries - 1: debug("posting disease model failed, retrying in a bit") time.sleep(random.random() * 30) else: debug("posting disease model failed %d times, giving up" % (ii + 1)) twc.get_browser()._browser._response.close() # end the connection, so that apache doesn't get upset return ""
def provide_formname(self, prefix): names = [] forms = commands.get_browser()._browser.forms() for f in forms: id = f.attrs.get('id') if id and id.startswith(prefix): names.append(id) continue name = f.name if name and name.startswith(prefix): names.append(name) return names
def create_user(self, user, pw): self.get("/user/create") tc.fv("1", "email", user) tc.fv("1", "password", pw) tc.fv("1", "confirm", pw) tc.submit("Create") tc.code(200) if len(tc.get_browser().get_all_forms()) > 0: p = userParser() p.feed(tc.browser.get_html()) if p.already_exists: raise Exception, 'The user you were trying to create already exists'
def create_user(self, user, pw): self.get("/user/create") tc.fv("1", "email", user) tc.fv("1", "password", pw) tc.fv("1", "confirm", pw) tc.submit("Submit") tc.code(200) if len(tc.get_browser().get_all_forms()) > 0: p = userParser() p.feed(tc.browser.get_html()) if p.already_exists: raise Exception('The user you were trying to create already exists')
def test_global_form(): """ test the handling of global form elements """ b = commands.get_browser() commands.config('use_tidy', '0') commands.go(url) commands.go('/effed_up_forms') forms = list(b._browser.forms()) assert len(forms) == 1 assert b._browser.global_form()
def studentset_leave(id, teacher_id=221): cmd.go('/py/teacher/studentset/%d/' % id) cmd.code(200) b = cmd.get_browser() mb = b._browser r = mb.open( '/py/teacher/studentset/teacher_remove/', urllib.urlencode({ 'setID': id, 'remove': 'Remove', 'id_list[]': '221' }))
def css_len(css, num): num = int(num) _, twill_locals = get_twill_glocals() browser = get_browser() html = browser.get_html() tree = lxml.html.document_fromstring(html) sel = CSSSelector(css) results = sel(tree) actual_num = len(results) if actual_num != num: raise TwillAssertionError('Expected %s matches to "%s"; found %s' % (num, css, actual_num))
def search_formname (fieldnames, tc): """Search form that has all given CGI fieldnames.""" browser = tc.get_browser() for formcounter, form in enumerate(browser.get_all_forms()): for name in fieldnames: try: browser.get_form_field(form, name) except tc.TwillException: break else: return form.name or form.attrs.get('id') or formcounter # none found return None
def try_posting_disease_model(dm, ntries): """ error handling: in case post fails try again, but stop after some specified number of tries""" from twill.errors import TwillAssertionError import random import time url = '' for ii in range(ntries): try: url = post_disease_model(dm) break except TwillAssertionError: pass if ii < ntries-1: debug('posting disease model failed, retrying in a bit') time.sleep(random.random()*30) else: debug('posting disease model failed %d times, giving up' % (ii+1)) twc.get_browser()._browser._response.close() # end the connection, so that apache doesn't get upset return ''
def has_link(text, url=''): browser = twc.get_browser() html = browser.get_html() if html: for match in _LINK.finditer(html): linkurl = match.group(1) linktext = match.group(2) if linktext == text: # if url is specified linkurl must match if url and linkurl != url: continue return raise AssertionError('link %s (%s) not found' % (text, url))
def search_formname(fieldnames, tc): """Search form that has all given CGI fieldnames.""" browser = tc.get_browser() for form in browser.get_all_forms(): for name in fieldnames: try: browser.get_form_field(form, name) except tc.TwillException: break else: return form.name or form.attrs.get('id') # none found return None
def findall(what): """ >> findall <regexp> Sets __matchlist__ to re.findall(regexp, page). """ page = get_browser().get_html() regexp = re.compile(what, re.DOTALL) m = regexp.findall(page) global_dict, local_dict = get_twill_glocals() local_dict['__matchlist__'] = m
def test_allow_parse_errors(): """ test nice parsing. """ b = commands.get_browser() commands.config('use_tidy', '0') commands.config('use_BeautifulSoup', '1') commands.config('allow_parse_errors', '1') commands.go(url) commands.go('/unfixable_html') b._browser.forms()
def css_len(css, num): num = int(num) _, twill_locals = get_twill_glocals() browser = get_browser() html = browser.get_html() tree = lxml.html.document_fromstring(html) sel = CSSSelector(css) results = sel(tree) actual_num = len(results) if actual_num != num: raise TwillAssertionError("Expected %s matches to \"%s\"; found %s" % (num, css, actual_num))
def provide_field(self, formname, prefix): names = [] form = commands.get_browser().get_form(formname) if not form: return [] for c in form.controls: id = c.id if id and id.startswith(prefix): names.append(id) continue name = c.name if name and name.startswith(prefix): names.append(name) return names
def test_allow_parse_errors(): """ test nice parsing. """ b = commands.get_browser() commands.config('use_tidy', '0') commands.config('use_BeautifulSoup', '1') commands.config('allow_parse_errors', '1') commands.go(url) commands.go('/unfixable_html') b.get_all_forms()
def get_mail(): browser = get_browser() mails = None for cookie in browser.cj: if cookie.name != 'debug-mail-location': continue mails = cookie.value if mails.startswith('"'): mails = mails.strip('"') break if mails is None: mails = [] else: mails = mails.split(';') return mails
def login(self, user, pw): self.get("/user/login") tc.fv("1", "login", user) tc.fv("1", "password", pw) tc.submit("Login") tc.code(200) if len(tc.get_browser().get_all_forms()) > 0: # uh ohs, fail p = userParser() p.feed(tc.browser.get_html()) if p.bad_pw: raise Exception("Password is incorrect") else: raise Exception("Unknown error logging in") tc.save_cookies(self.cookie_jar)
def test_effed_up_forms2(): """ should always succeed; didn't back ~0.7. """ commands.config('use_tidy', '1') commands.config('use_BeautifulSoup', '1') commands.config('allow_parse_errors', '0') commands.go(url) commands.go('/effed_up_forms2') b = commands.get_browser() forms = [ i for i in b._browser.forms() ] form = forms[0] assert len(form.controls) == 3, "you must have 'tidy' installed for this test to pass"
def send(self, msg, *send_to): web.go(self.SERVICE_URL) self._retry_find("editableSmsComposeForm", 5) try: page = web.get_browser().get_html() web.notfind("inputCaptcha") except twill.errors.TwillAssertionError, e: found = re.search("(/composer/public/jcaptcha\?id=.*)", page) assert found web.go(found.groups()[0]) with tempfile.NamedTemporaryFile(suffix=".jpeg") as captcha: web.save_html(captcha.name) web.back() os.system("open %s " % captcha.name) web.formvalue("editableSmsComposeForm", "inputCaptcha", raw_input("Captcha: "))
def __init__(self): self.server = server self.handler = handler self.waited = -1 self.tool = 'echo_' + handler self._hda_id = None self._hda_state = None self._history_id = None if not self.server.startswith('http'): self.server = 'http://' + self.server self.cookie_jar = os.path.join(var_dir, "cookie_jar") dprint("cookie jar path: %s" % self.cookie_jar) if not os.access(self.cookie_jar, os.R_OK): dprint("no cookie jar at above path, creating") tc.save_cookies(self.cookie_jar) tc.load_cookies(self.cookie_jar) self.opener = build_opener(HTTPCookieProcessor(tc.get_browser().cj))
def login(self, user, pw): self.get("/user/login") tc.fv("1", "email", user) tc.fv("1", "password", pw) tc.submit("Login") tc.code(200) if len(tc.get_browser().get_all_forms()) > 0: # uh ohs, fail p = userParser() p.feed(tc.browser.get_html()) if p.no_user: dprint("user does not exist, will try creating") self.create_user(user, pw) elif p.bad_pw: raise Exception("Password is incorrect") else: raise Exception("Unknown error logging in") tc.save_cookies(self.cookie_jar)
def url_qs(what, val=None): browser = get_browser() qs = urllib.splitquery(browser.get_url())[-1] qs = qs.split('&') qsdict = {} for q in qs: q = q.split('=') qsdict[q[0]] = q[1] if what not in qsdict: raise TwillAssertionError("no match to '%s' in %s" % (what, qs)) if val is None: return if qsdict[what] != val: raise TwillAssertionError("Expected query_string argument %s to be %s, but it's %s instead" % (what, val, qsdict[what]))
def login(self, user, pw): self.get("/user/login") tc.fv("1", "email", user) tc.fv("1", "password", pw) tc.submit("Login") tc.code(200) if len(tc.get_browser().get_all_forms()) > 0: # uh ohs, fail p = userParser() p.feed(tc.browser.get_html()) if p.no_user: dprint("user does not exist, will try creating") self.create_user(user, pw) elif p.bad_pw: raise Exception, "Password is incorrect" else: raise Exception, "Unknown error logging in" tc.save_cookies(self.cookie_jar)
def assign(ssid, content): cmd.go('/py/teacher/assignment/new/') cmd.find(content[0]) soup = BeautifulSoup(cmd.show()) cmd.go(soup.find('a', text=content[0]).parent['href']) soup = BeautifulSoup(cmd.show()) cmd.go(soup.find('a', text=content[1]).parent['href']) soup = BeautifulSoup(cmd.show()) assignment = soup.find( 'b', text=content[2]).parent.parent.parent.find('input')['value'] cmd.fv(2, 'task', assignment) cmd.config('readonly_controls_writeable', '+') cmd.fv(2, 'mainform_force_next', 'yes') cmd.submit() b = cmd.get_browser() mb = b._browser r = mb.open(b.get_url(), 'studentset_id=%d' % ssid) cmd.submit()
def start(args): parser = OptionParser(version=VERSION) parser.add_option("-p", "--password", help="account password") parser.add_option("-u", "--username", help="account user name") parser.add_option("-g", "--groupname", help="group name") (options, args) = parser.parse_args(args) password = options.password username = options.username yahoo_group = options.groupname if not (username and yahoo_group): parser.print_help() sys.exit(1) if not password: password = getpass.getpass() if not password: parser.print_help() sys.exit(1) yg_url = "%s/%s/"%(YG_BASE_URL, yahoo_group) tc.go(yg_url) tc.follow("Sign In") tc.formvalue(1, 'login', username) tc.formvalue(1, 'passwd', password) tc.submit() tc.follow("Messages") b = tc.get_browser() browser_title = b.get_title() m = re.search(r'Messages : (\d+)-(\d+) of (\d+)', browser_title) if not m: print "regular expression failed" sys.exit(1) start, end, total_messages = m.groups() for i in range(int(total_messages)): message_id = i + 1 message_path = '%s/%s'%(yahoo_group, message_id) download_message(message_id, message_path, yahoo_group)
def url_qs(what, val=None): browser = get_browser() qs = urllib.splitquery(browser.get_url())[-1] qs = qs.split('&') qsdict = {} for q in qs: q = q.split('=') qsdict[q[0]] = q[1] if what not in qsdict: raise TwillAssertionError("no match to '%s' in %s" % (what, qs)) if val is None: return if qsdict[what] != val: raise TwillAssertionError( "Expected query_string argument %s to be %s, but it's %s instead" % (what, val, qsdict[what]))
def find_in_css(what, css): _, twill_locals = get_twill_glocals() browser = get_browser() html = browser.get_html() tree = lxml.html.document_fromstring(html) sel = CSSSelector(css) results = sel(tree) results = '\n'.join(lxml.html.tostring(r) for r in results) regexp = re.compile(what, re.IGNORECASE) m = regexp.search(results) if not m: raise TwillAssertionError("no match to '%s' in '%s'" % (what, results)) if m.groups(): match_str = m.group(1) else: match_str = m.group(0) twill_locals['__match__'] = match_str
def check_redir(self, url): try: tc.get_browser()._browser.set_handle_redirect(False) tc.go(url) tc.code(302) tc.get_browser()._browser.set_handle_redirect(True) dprint("%s is returning redirect (302)" % url) return (True) except twill.errors.TwillAssertionError as e: tc.get_browser()._browser.set_handle_redirect(True) dprint("%s is not returning redirect (302): %s" % (url, e)) code = tc.browser.get_code() if code == 502: sys.exit("Galaxy is down (code 502)") return False