def download_system_cases(self, session: Optional[requests.Session] = None) -> List[TestCase]: session = session or utils.new_default_session() # get # example: https://www.hackerrank.com/rest/contests/hourrank-1/challenges/beautiful-array/download_testcases url = 'https://www.hackerrank.com/rest/contests/{}/challenges/{}/download_testcases'.format(self.contest_slug, self.challenge_slug) resp = utils.request('GET', url, session=session, raise_for_status=False) if resp.status_code != 200: log.error('response: %s', resp.content.decode()) return [] # parse with zipfile.ZipFile(io.BytesIO(resp.content)) as fh: # list names names = [] # type: List[str] pattern = re.compile(r'(in|out)put/\1put(\d+).txt') for filename in sorted(fh.namelist()): # "input" < "output" if filename.endswith('/'): continue log.debug('filename: %s', filename) m = pattern.match(filename) assert m if m.group(1) == 'in': names += [m.group(2)] # zip samples samples = [] # type: List[TestCase] for name in names: inpath = 'input/input{}.txt'.format(name) outpath = 'output/output{}.txt'.format(name) indata = fh.read(inpath).decode() outdata = fh.read(outpath).decode() samples += [TestCase(LabeledString(inpath, indata), LabeledString(outpath, outdata))] return samples
def generate_scanner(args: 'argparse.Namespace') -> None: if not args.silent: log.warning('This feature is ' + log.red('experimental') + '.') if args.silent: for handler in log.logger.handlers: log.removeHandler(handler) problem = onlinejudge.dispatch.problem_from_url(args.url) if problem is None: sys.exit(1) with utils.with_cookiejar(utils.new_default_session(), path=args.cookie) as sess: it = problem.get_input_format(session=sess) # type: Any if not it: log.error('input format not found') sys.exit(1) try: log.debug('original data: %s', repr(it)) it = list(tokenize(it)) log.debug('tokenized: %s', str(it)) it = list(parse(it)) log.debug('parsed: %s', str(it)) it = postprocess(it) log.debug('postprocessed: %s', str(it)) it = export(it, use_scanf=args.scanf, repeat_macro=args.repeat_macro) log.debug('result: %s', repr(it)) except: log.error('something wrong') raise log.success('success:') print(log.bold(it.rstrip())) # to stdout
def get_language_dict(self, session=None): session = session or utils.new_default_session() # get url = 'http://{}.contest.atcoder.jp/submit'.format(self.contest_id) resp = utils.request('GET', url, session=session) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) if AtCoderService._report_messages(msgs, unexpected=True): return {} # check whether logged in path = utils.normpath(urllib.parse.urlparse(resp.url).path) if path.startswith('/login'): log.error('not logged in') return {} # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) select = soup.find( 'select', class_='submit-language-selector' ) # NOTE: AtCoder can vary languages depending on tasks, even in one contest. here, ignores this fact. language_dict = {} for option in select.find_all('option'): language_dict[option.attrs['value']] = { 'description': option.string } return language_dict
def _get_task_id(self, session: Optional[requests.Session] = None) -> int: if self._task_id is None: session = session or utils.new_default_session() # get # 自分で書き換えた箇所(初期 AtCoder の問題 URl は末尾が数字になっているため) url = self.get_url() for _ in range(2): resp = _request('GET', url, session=session) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) if AtCoderService._report_messages(msgs, unexpected=True): raise SubmissionError # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) submit = soup.find('a', href=re.compile(r'submit\?taskScreenName=')) if submit != None: break else: url = url[:-1] + chr(ord(url[-1]) - ord('a') + ord('1')) if not submit: log.error('link to submit not found') raise SubmissionError # 自分で書き換えた箇所 regex = re.compile(r'(.+)/submit\?taskScreenName=(.+)$') m = regex.search(submit.attrs['href']) assert m self._task_id = m.group(2) return self._task_id
def download_samples(self, session=None): session = session or utils.new_default_session() # get resp = utils.request('GET', self.get_url(), session=session) # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) samples = utils.SampleZipper() for pre in soup.find_all('pre'): log.debug('pre: %s', str(pre)) hn = utils.previous_sibling_tag(pre) if hn is None: div = pre.parent if div is not None: log.debug('div: %s', str(hn)) hn = utils.previous_sibling_tag(div) log.debug('hN: %s', str(hn)) log.debug(hn) keywords = ['sample', 'example', '入力例', '出力例'] if hn and hn.name in ['h2', 'h3'] and hn.string and any( filter(lambda keyword: keyword in hn.string.lower(), keywords)): s = utils.textfile(pre.string.lstrip()) name = hn.string samples.add(s, name) return samples.get()
def login(self, get_credentials: onlinejudge.service.CredentialsProvider, session: Optional[requests.Session] = None) -> bool: session = session or utils.new_default_session() url = 'https://codeforces.com/enter' # get resp = utils.request('GET', url, session=session) if resp.url != url: # redirected log.info('You have already signed in.') return True # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find('form', id='enterForm') log.debug('form: %s', str(form)) username, password = get_credentials() form = utils.FormSender(form, url=resp.url) form.set('handleOrEmail', username) form.set('password', password) form.set('remember', 'on') # post resp = form.request(session) resp.raise_for_status() if resp.url != url: # redirected log.success('Welcome, %s.', username) return True else: log.failure('Invalid handle or password.') return False
def login_with_github(self, get_credentials, session=None): session = session or utils.new_default_session() url = "https://yukicoder.me/auth/github" # get resp = utils.request("GET", url, session=session) if urllib.parse.urlparse(resp.url).hostname == "yukicoder.me": log.info("You have already signed in.") return True # redirect to github.com # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find("form") if not form: log.error("form not found") log.info("Did you logged in?") return False log.debug("form: %s", str(form)) # post username, password = get_credentials() form = utils.FormSender(form, url=resp.url) form.set("login", username) form.set("password", password) resp = form.request(session) resp.raise_for_status() if urllib.parse.urlparse(resp.url).hostname == "yukicoder.me": log.success("You signed in.") return True else: log.failure("You failed to sign in. Wrong user ID or password.") return False
def download_system( self, session: Optional[requests.Session] = None) -> List[TestCase]: session = session or utils.new_default_session() get_url = lambda case, type: 'http://analytic.u-aizu.ac.jp:8080/aoj/testcase.jsp?id={}&case={}&type={}'.format( self.problem_id, case, type) testcases: List[TestCase] = [] for case in itertools.count(1): # input # get resp = utils.request('GET', get_url(case, 'in'), session=session, raise_for_status=False) if resp.status_code != 200: break in_txt = resp.text if case == 2 and testcases[0].input.data == in_txt: break # if the querystring case=??? is ignored # output # get resp = utils.request('GET', get_url(case, 'out'), session=session) out_txt = resp.text testcases += [ TestCase( LabeledString('in%d.txt' % case, in_txt), LabeledString('out%d.txt' % case, out_txt), ) ]
def login(self, get_credentials, session=None): session = session or utils.new_default_session() url = "https://practice.contest.atcoder.jp/login" # get resp = utils.request("GET", url, session=session, allow_redirects=False) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) for msg in msgs: log.status("message: %s", msg) if msgs: return "login" not in resp.url # post username, password = get_credentials() resp = utils.request( "POST", url, session=session, data={ "name": username, "password": password }, allow_redirects=False, ) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) AtCoderService._report_messages(msgs) return "login" not in resp.url # AtCoder redirects to the top page if success
def download_system(self, session: Optional[requests.Session] = None) -> List[TestCase]: session = session or utils.new_default_session() # get header # reference: http://developers.u-aizu.ac.jp/api?key=judgedat%2Ftestcases%2F%7BproblemId%7D%2Fheader_GET url = 'https://judgedat.u-aizu.ac.jp/testcases/{}/header'.format(self.problem_id) resp = utils.request('GET', url, session=session) header = json.loads(resp.content) # get testcases via the official API testcases: List[TestCase] = [] for header in header['headers']: # reference: http://developers.u-aizu.ac.jp/api?key=judgedat%2Ftestcases%2F%7BproblemId%7D%2F%7Bserial%7D_GET url = 'https://judgedat.u-aizu.ac.jp/testcases/{}/{}'.format(self.problem_id, header['serial']) resp = utils.request('GET', url, session=session) testcase = json.loads(resp.content) skipped = False for type in ('in', 'out'): if testcase[type].endswith('..... (terminated because of the limitation)\n'): log.error('AOJ API says: terminated because of the limitation') skipped = True if skipped: log.warning("skipped due to the limitation of AOJ API") continue testcases += [ TestCase( LabeledString(header['name'], testcase['in']), LabeledString(header['name'], testcase['out']), ) ] return testcases
def download_samples( self, session: Optional[requests.Session] = None) -> List[TestCase]: session = session or utils.new_default_session() # get resp = utils.request('GET', self.get_url(), session=session) # parse soup = bs4.BeautifulSoup( resp.content, utils.html_parser ) # NOTE: resp.content is not decoded for workaround, see https://github.com/kmyk/online-judge-tools/pull/186 samples = utils.SampleZipper() for pre in soup.find_all('pre'): log.debug('pre: %s', str(pre)) hn = utils.previous_sibling_tag(pre) if hn is None: div = pre.parent if div is not None: log.debug('div: %s', str(hn)) hn = utils.previous_sibling_tag(div) log.debug('hN: %s', str(hn)) log.debug(hn) keywords = ['sample', 'example', '入力例', '出力例'] if hn and hn.name in ['h2', 'h3'] and hn.string and any( filter(lambda keyword: keyword in hn.string.lower(), keywords)): s = utils.textfile(pre.string.lstrip()) name = hn.string samples.add(s, name) return samples.get()
def download_with_running_code( self, session: Optional[requests.Session] = None) -> List[TestCase]: session = session or utils.new_default_session() # get resp = utils.request('GET', self.get_url(), session=session) # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) csrftoken = soup.find('meta', attrs={ 'name': 'csrf-token' }).attrs['content'] # post url = 'https://www.hackerrank.com/rest/contests/{}/challenges/{}/compile_tests'.format( self.contest_slug, self.challenge_slug) payload = {'code': ':', 'language': 'bash', 'customtestcase': False} log.debug('payload: %s', payload) resp = utils.request('POST', url, session=session, json=payload, headers={'X-CSRF-Token': csrftoken}) # parse it = json.loads(resp.content.decode()) log.debug('json: %s', it) if not it['status']: log.error('Run Code: failed') return [] model_id = it['model']['id'] now = datetime.datetime.now() unixtime = int(datetime.datetime.now().timestamp() * 10**3) url = 'https://www.hackerrank.com/rest/contests/{}/challenges/{}/compile_tests/{}?_={}'.format( self.contest_slug, self.challenge_slug, it['model']['id'], unixtime) # sleep log.status('sleep(3)') time.sleep(3) # get resp = utils.request('GET', url, session=session, headers={'X-CSRF-Token': csrftoken}) # parse it = json.loads(resp.content.decode()) log.debug('json: %s', it) if not it['status']: log.error('Run Code: failed') return [] samples: List[TestCase] = [] for i, (inf, outf) in enumerate( zip(it['model']['stdin'], it['model']['expected_output'])): inname = 'Testcase {} Input'.format(i) outname = 'Testcase {} Expected Output'.format(i) samples += [ TestCase( LabeledString(inname, utils.textfile(inf)), LabeledString(outname, utils.textfile(outf)), ) ] return samples
def download_with_parsing_html( self, session: Optional[requests.Session] = None) -> List[TestCase]: session = session or utils.new_default_session() url = 'https://www.hackerrank.com/rest/contests/{}/challenges/{}'.format( self.contest_slug, self.challenge_slug) raise NotImplementedError
def submit(self, code, language, session=None): session = session or utils.new_default_session() # get resp = utils.request("GET", self.get_url(), session=session) # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) csrftoken = soup.find("meta", attrs={ "name": "csrf-token" }).attrs["content"] # post url = "https://www.hackerrank.com/rest/contests/{}/challenges/{}/submissions".format( self.contest_slug, self.challenge_slug) payload = {"code": code, "language": language} log.debug("payload: %s", payload) resp = utils.request( "POST", url, session=session, json=payload, headers={"X-CSRF-Token": csrftoken}, ) # parse it = json.loads(resp.content.decode()) log.debug("json: %s", it) if not it["status"]: log.failure("Submit Code: failed") return None model_id = it["model"]["id"] url = self.get_url().rstrip("/") + "/submissions/code/{}".format( model_id) log.success("success: result: %s", url) return onlinejudge.submission.CompatibilitySubmission(url, problem=self)
def login_with_github( self, get_credentials: onlinejudge.type.CredentialsProvider, session: Optional[requests.Session] = None) -> bool: session = session or utils.new_default_session() url = 'https://yukicoder.me/auth/github' # get resp = utils.request('GET', url, session=session) if urllib.parse.urlparse(resp.url).hostname == 'yukicoder.me': log.info('You have already signed in.') return True # redirect to github.com # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find('form') if not form: log.error('form not found') log.info('Did you logged in?') return False log.debug('form: %s', str(form)) # post username, password = get_credentials() form = utils.FormSender(form, url=resp.url) form.set('login', username) form.set('password', password) resp = form.request(session) resp.raise_for_status() if urllib.parse.urlparse(resp.url).hostname == 'yukicoder.me': log.success('You signed in.') return True else: log.failure('You failed to sign in. Wrong user ID or password.') return False
def get_standings(self, session: Optional[requests.Session] = None) -> onlinejudge.type.Standings: session = session or utils.new_default_session() header = None # type: Optional[List[str]] rows = [] # type: List[Dict[str, str]] for start in itertools.count(1, 100): # get url = 'https://community.topcoder.com/longcontest/?sc=&sd=&nr=100&sr={}&rd={}&module=ViewStandings'.format(start, self.rd) resp = utils.request('GET', url, session=session) # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) table = soup.find('table', class_='statTable') trs = table.find_all('tr') if header is None: tr = trs[1] header = [ td.text.strip() for td in tr.find_all('td') ] for tr in trs[2 :]: row = collections.OrderedDict() # type: Dict[str, str] for key, td in zip(header, tr.find_all('td')): value = td.text.strip() if not value: value = None elif value.isdigit(): value = int(value) row[key] = value rows += [ row ] # check whether the next page exists link = soup.find('a', text='next >>') if link is None: break assert header is not None return header, rows
def login_with_twitter( self, get_credentials: onlinejudge.type.CredentialsProvider, session: Optional[requests.Session] = None) -> bool: session = session or utils.new_default_session() url = 'https://yukicoder.me/auth/twitter' raise NotImplementedError
def get_language_dict(self, session=None): session = session or utils.new_default_session() # at 2017/09/21 return { "Java": { "value": "1", "description": "Java 8" }, "C++": { "value": "3", "description": "C++11" }, "C#": { "value": "4", "description": "" }, "VB": { "value": "5", "description": "" }, "Python": { "value": "6", "description": "Pyhton 2" }, }
def download( self, session: Optional[requests.Session] = None ) -> List[onlinejudge.problem.TestCase]: session = session or utils.new_default_session() # get resp = utils.request('GET', self.get_url(), session=session) # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) samples = utils.SampleZipper() for tag in soup.find_all('div', class_=re.compile( '^(in|out)put$')): # Codeforces writes very nice HTML :) log.debug('tag: %s', str(tag)) assert len(list(tag.children)) title, pre = list(tag.children) assert 'title' in title.attrs['class'] assert pre.name == 'pre' s = '' for it in pre.children: if it.name == 'br': s += '\n' else: s += it.string s = s.lstrip() samples.add(s, title.string) return samples.get()
def login(args): # get service service = onlinejudge.dispatch.service_from_url(args.url) if service is None: sys.exit(1) # configure kwargs = {} if service.get_name() == 'yukicoder': if not args.method: args.method = 'github' if args.method not in [ 'github', 'twitter' ]: log.failure('login for yukicoder: invalid option: --method %s', args.method) sys.exit(1) kwargs['method'] = args.method else: if args.method: log.failure('login for %s: invalid option: --method %s', service.get_name(), args.method) sys.exit(1) # login def get_credentials(): if args.username is None: args.username = input('Username: ') if args.password is None: args.password = getpass.getpass() return args.username, args.password with utils.with_cookiejar(utils.new_default_session(), path=args.cookie) as sess: service.login(get_credentials, session=sess, **kwargs)
def login(self, get_credentials: onlinejudge.type.CredentialsProvider, session: Optional[requests.Session] = None) -> bool: session = session or utils.new_default_session() # 自分で書き換えた箇所 # url = 'https://practice.contest.atcoder.jp/login' url = 'https://atcoder.jp/login' # get resp = _request('GET', url, session=session, allow_redirects=False) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) for msg in msgs: log.status('message: %s', msg) if msgs: return 'login' not in resp.url # post username, password = get_credentials() # 自分で書き換えた箇所(CSRF対策) soup = bs4.BeautifulSoup(resp.text, 'lxml') csrftoken = soup.find_all('input')[0]['value'] resp = _request('POST', url, session=session, data={ 'csrf_token': csrftoken, 'username': username, 'password': password }, allow_redirects=False) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) AtCoderService._report_messages(msgs) return 'login' not in resp.url # AtCoder redirects to the top page if success
def generate_scanner(args): if not args.silent: log.warning("This feature is " + log.red("experimental") + ".") if args.silent: for handler in log.logger.handlers: log.removeHandler(handler) problem = onlinejudge.dispatch.problem_from_url(args.url) if problem is None: sys.exit(1) with utils.with_cookiejar(utils.new_default_session(), path=args.cookie) as sess: it = problem.get_input_format(session=sess) if not it: log.error("input format not found") sys.exit(1) try: log.debug("original data: %s", repr(it)) it = list(tokenize(it)) log.debug("tokenized: %s", str(it)) it = list(parse(it)) log.debug("parsed: %s", str(it)) it = postprocess(it) log.debug("postprocessed: %s", str(it)) it = export(it, use_scanf=args.scanf, repeat_macro=args.repeat_macro) log.debug("result: %s", repr(it)) except: log.error("something wrong") raise log.success("success:") print(log.bold(it.rstrip())) # to stdout
def get_language_dict( self, session: Optional[requests.Session] = None ) -> Dict[str, Dict[str, str]]: session = session or utils.new_default_session() # at 2017/09/21 return { 'Java': { 'value': '1', 'description': 'Java 8' }, 'C++': { 'value': '3', 'description': 'C++11' }, 'C#': { 'value': '4', 'description': '' }, 'VB': { 'value': '5', 'description': '' }, 'Python': { 'value': '6', 'description': 'Pyhton 2' }, }
def download_sample_cases( self, session: Optional[requests.Session] = None ) -> List[onlinejudge.type.TestCase]: session = session or utils.new_default_session() # get url = self.get_url(contests=False) + '/file/statement/samples.zip' resp = utils.request('GET', url, session=session, raise_for_status=False) if resp.status_code == 404: log.warning('samples.zip not found') log.info( 'this 404 happens in both cases: 1. no sample cases as intended; 2. just an error' ) return [] resp.raise_for_status() # parse with zipfile.ZipFile(io.BytesIO(resp.content)) as fh: samples = [] # type: List[TestCase] for filename in sorted(fh.namelist()): log.debug('filename: %s', filename) if filename.endswith('.in'): inpath = filename outpath = filename[:-3] + '.ans' indata = fh.read(inpath).decode() outdata = fh.read(outpath).decode() samples += [ TestCase(LabeledString(inpath, indata), LabeledString(outpath, outdata)) ] return samples
def download_system(self, session=None): session = session or utils.new_default_session() get_url = lambda case, type: 'http://analytic.u-aizu.ac.jp:8080/aoj/testcase.jsp?id={}&case={}&type={}'.format( self.problem_id, case, type) testcases = [] for case in itertools.count(1): # input # get resp = utils.request('GET', get_url(case, 'in'), session=session, raise_for_status=False) if resp.status_code != 200: break in_txt = resp.text if case == 2 and testcases[0]['input']['data'] == in_txt: break # if the querystring case=??? is ignored # output # get resp = utils.request('GET', get_url(case, 'out'), session=session) out_txt = resp.text testcases += [{ 'input': { 'data': in_txt, 'name': 'in%d.txt' % case }, 'output': { 'data': out_txt, 'name': 'out%d.txt' % case }, }]
def login(self, get_credentials, session=None): session = session or utils.new_default_session() url = "http://codeforces.com/enter" # get resp = utils.request("GET", url, session=session) if resp.url != url: # redirected log.info("You have already signed in.") return True # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find("form", id="enterForm") log.debug("form: %s", str(form)) username, password = get_credentials() form = utils.FormSender(form, url=resp.url) form.set("handleOrEmail", username) form.set("password", password) form.set("remember", "on") # post resp = form.request(session) resp.raise_for_status() if resp.url != url: # redirected log.success("Welcome, %s.", username) return True else: log.failure("Invalid handle or password.") return False
def login(self, get_credentials, session=None): session = session or utils.new_default_session() url = 'https://practice.contest.atcoder.jp/login' # get resp = utils.request('GET', url, session=session, allow_redirects=False) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) for msg in msgs: log.status('message: %s', msg) if msgs: return 'login' not in resp.url # post username, password = get_credentials() resp = utils.request('POST', url, session=session, data={ 'name': username, 'password': password }, allow_redirects=False) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) AtCoderService._report_messages(msgs) return 'login' not in resp.url # AtCoder redirects to the top page if success
def download_system_cases( self, session: Optional[requests.Session] = None) -> List[TestCase]: session = session or utils.new_default_session() # get url = 'https://yukicoder.me/problems/no/{}/testcase.zip'.format( self.problem_no) resp = utils.request('GET', url, session=session) # parse basenames = collections.defaultdict( dict) # type: Dict[str, Dict[str, LabeledString]] with zipfile.ZipFile(io.BytesIO(resp.content)) as fh: for filename in sorted(fh.namelist()): # "test_in" < "test_out" dirname = os.path.dirname(filename) basename = os.path.basename(filename) kind = {'test_in': 'input', 'test_out': 'output'}[dirname] content = fh.read(filename).decode() name = basename if os.path.splitext( name)[1] == '.in': # ".in" extension is confusing name = os.path.splitext(name)[0] basenames[basename][kind] = LabeledString(name, content) samples = [] # type: List[TestCase] for basename in sorted(basenames.keys()): data = basenames[basename] if 'input' not in data or 'output' not in data or len(data) != 2: log.error('dangling sample found: %s', str(data)) else: samples += [TestCase(data['input'], data['output'])] return samples
def download(self, session=None): session = session or utils.new_default_session() # get resp = utils.request('GET', self.get_url(), session=session) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) if AtCoderService._report_messages(msgs, unexpected=True): # example message: "message: You cannot see this page." log.warning('are you logged in?') return [] # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) samples = utils.SampleZipper() lang = None for pre, h3 in self._find_sample_tags(soup): s = utils.textfile(utils.dos2unix(pre.string.lstrip())) name = h3.string l = self._get_tag_lang(pre) if lang is None: lang = l elif lang != l: log.info( 'skipped due to language: current one is %s, not %s: %s ', lang, l, name) continue samples.add(s, name) return samples.get()
def login(self, get_credentials: onlinejudge.type.CredentialsProvider, session: Optional[requests.Session] = None) -> bool: session = session or utils.new_default_session() url = 'https://www.hackerrank.com/auth/login' # get resp = utils.request('GET', url, session=session) if resp.url != url: log.debug('redirected: %s', resp.url) log.info('You have already signed in.') return True # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) csrftoken = soup.find('meta', attrs={'name': 'csrf-token'}).attrs['content'] tag = soup.find('input', attrs={'name': 'username'}) while tag.name != 'form': tag = tag.parent form = tag # post username, password = get_credentials() form = utils.FormSender(form, url=resp.url) form.set('login', username) form.set('password', password) form.set('remember_me', 'true') form.set('fallback', 'true') resp = form.request(session, method='POST', action='/rest/auth/login', headers={'X-CSRF-Token': csrftoken}) resp.raise_for_status() log.debug('redirected: %s', resp.url) # result if '/auth' not in resp.url: log.success('You signed in.') return True else: log.failure('You failed to sign in. Wrong user ID or password.') return False