def get_latest_version_from_pypi() -> str: pypi_url = 'https://pypi.org/pypi/{}/json'.format(version.__package_name__) version_cache_path = cache_dir / "pypi.json" update_interval = 60 * 60 * 8 # 8 hours # load cache if version_cache_path.exists(): with version_cache_path.open() as fh: cache = json.load(fh) if time.time() < cache['time'] + update_interval: return cache['version'] # get try: resp = request('GET', pypi_url, session=requests.Session()) data = json.loads(resp.content.decode()) value = data['info']['version'] except requests.RequestException as e: log.error(str(e)) value = '0.0.0' # ignore since this failure is not important cache = { 'time': int( time.time() ), # use timestamp because Python's standard datetime library is too weak to parse strings 'version': value, } # store cache version_cache_path.parent.mkdir(parents=True, exist_ok=True) with version_cache_path.open('w') as fh: json.dump(cache, fh) return value
def submit(self, code, language, session=None): assert language in self.get_language_dict(session=session) session = session or requests.Session() url = self.get_url() + '/submit' # get log.status('GET: %s', url) resp = session.get(url) log.status(utils.describe_status_code(resp.status_code)) resp.raise_for_status() # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find('form', action=re.compile(r'/submit$')) if not form: log.error('form not found') return None log.debug('form: %s', str(form)) # post form = utils.FormSender(form, url=resp.url) if False: form.set('source', code) else: form.set_file('file', ('source', code)) form.set('lang', language) resp = form.request(session=session) resp.raise_for_status() # result if '/submissions/' in resp.url: log.success('success: result: %s', resp.url) return resp.url else: log.failure('failure') return None
def generate_scanner(args: 'argparse.Namespace') -> None: if not args.silent: log.warning('This feature is ' + log.red('experimental') + '.') if args.silent: for handler in log.logger.handlers: log.removeHandler(handler) problem = onlinejudge.dispatch.problem_from_url(args.url) if problem is None: sys.exit(1) with utils.with_cookiejar(utils.new_default_session(), path=args.cookie) as sess: it = problem.get_input_format(session=sess) # type: Any if not it: log.error('input format not found') sys.exit(1) try: log.debug('original data: %s', repr(it)) it = list(tokenize(it)) log.debug('tokenized: %s', str(it)) it = list(parse(it)) log.debug('parsed: %s', str(it)) it = postprocess(it) log.debug('postprocessed: %s', str(it)) it = export(it, use_scanf=args.scanf, repeat_macro=args.repeat_macro) log.debug('result: %s', repr(it)) except: log.error('something wrong') raise log.success('success:') print(log.bold(it.rstrip())) # to stdout
def _get_task_id(self, session: Optional[requests.Session] = None) -> int: if self._task_id is None: session = session or utils.new_default_session() # get # 自分で書き換えた箇所(初期 AtCoder の問題 URl は末尾が数字になっているため) url = self.get_url() for _ in range(2): resp = _request('GET', url, session=session) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) if AtCoderService._report_messages(msgs, unexpected=True): raise SubmissionError # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) submit = soup.find('a', href=re.compile(r'submit\?taskScreenName=')) if submit != None: break else: url = url[:-1] + chr(ord(url[-1]) - ord('a') + ord('1')) if not submit: log.error('link to submit not found') raise SubmissionError # 自分で書き換えた箇所 regex = re.compile(r'(.+)/submit\?taskScreenName=(.+)$') m = regex.search(submit.attrs['href']) assert m self._task_id = m.group(2) return self._task_id
def generate_scanner(args): if not args.silent: log.warning("This feature is " + log.red("experimental") + ".") if args.silent: for handler in log.logger.handlers: log.removeHandler(handler) problem = onlinejudge.dispatch.problem_from_url(args.url) if problem is None: sys.exit(1) with utils.with_cookiejar(utils.new_default_session(), path=args.cookie) as sess: it = problem.get_input_format(session=sess) if not it: log.error("input format not found") sys.exit(1) try: log.debug("original data: %s", repr(it)) it = list(tokenize(it)) log.debug("tokenized: %s", str(it)) it = list(parse(it)) log.debug("parsed: %s", str(it)) it = postprocess(it) log.debug("postprocessed: %s", str(it)) it = export(it, use_scanf=args.scanf, repeat_macro=args.repeat_macro) log.debug("result: %s", repr(it)) except: log.error("something wrong") raise log.success("success:") print(log.bold(it.rstrip())) # to stdout
def download_system_cases( self, session: Optional[requests.Session] = None) -> List[TestCase]: session = session or utils.new_default_session() # get url = 'https://yukicoder.me/problems/no/{}/testcase.zip'.format( self.problem_no) resp = utils.request('GET', url, session=session) # parse basenames = collections.defaultdict( dict) # type: Dict[str, Dict[str, LabeledString]] with zipfile.ZipFile(io.BytesIO(resp.content)) as fh: for filename in sorted(fh.namelist()): # "test_in" < "test_out" dirname = os.path.dirname(filename) basename = os.path.basename(filename) kind = {'test_in': 'input', 'test_out': 'output'}[dirname] content = fh.read(filename).decode() name = basename if os.path.splitext( name)[1] == '.in': # ".in" extension is confusing name = os.path.splitext(name)[0] basenames[basename][kind] = LabeledString(name, content) samples = [] # type: List[TestCase] for basename in sorted(basenames.keys()): data = basenames[basename] if 'input' not in data or 'output' not in data or len(data) != 2: log.error('dangling sample found: %s', str(data)) else: samples += [TestCase(data['input'], data['output'])] return samples
def download_with_running_code( self, session: Optional[requests.Session] = None) -> List[TestCase]: session = session or utils.new_default_session() # get resp = utils.request('GET', self.get_url(), session=session) # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) csrftoken = soup.find('meta', attrs={ 'name': 'csrf-token' }).attrs['content'] # post url = 'https://www.hackerrank.com/rest/contests/{}/challenges/{}/compile_tests'.format( self.contest_slug, self.challenge_slug) payload = {'code': ':', 'language': 'bash', 'customtestcase': False} log.debug('payload: %s', payload) resp = utils.request('POST', url, session=session, json=payload, headers={'X-CSRF-Token': csrftoken}) # parse it = json.loads(resp.content.decode()) log.debug('json: %s', it) if not it['status']: log.error('Run Code: failed') return [] model_id = it['model']['id'] now = datetime.datetime.now() unixtime = int(datetime.datetime.now().timestamp() * 10**3) url = 'https://www.hackerrank.com/rest/contests/{}/challenges/{}/compile_tests/{}?_={}'.format( self.contest_slug, self.challenge_slug, it['model']['id'], unixtime) # sleep log.status('sleep(3)') time.sleep(3) # get resp = utils.request('GET', url, session=session, headers={'X-CSRF-Token': csrftoken}) # parse it = json.loads(resp.content.decode()) log.debug('json: %s', it) if not it['status']: log.error('Run Code: failed') return [] samples: List[TestCase] = [] for i, (inf, outf) in enumerate( zip(it['model']['stdin'], it['model']['expected_output'])): inname = 'Testcase {} Input'.format(i) outname = 'Testcase {} Expected Output'.format(i) samples += [ TestCase( LabeledString(inname, utils.textfile(inf)), LabeledString(outname, utils.textfile(outf)), ) ] return samples
def download_system(self, session: Optional[requests.Session] = None) -> List[TestCase]: session = session or utils.new_default_session() # get header # reference: http://developers.u-aizu.ac.jp/api?key=judgedat%2Ftestcases%2F%7BproblemId%7D%2Fheader_GET url = 'https://judgedat.u-aizu.ac.jp/testcases/{}/header'.format(self.problem_id) resp = utils.request('GET', url, session=session) header = json.loads(resp.content) # get testcases via the official API testcases: List[TestCase] = [] for header in header['headers']: # reference: http://developers.u-aizu.ac.jp/api?key=judgedat%2Ftestcases%2F%7BproblemId%7D%2F%7Bserial%7D_GET url = 'https://judgedat.u-aizu.ac.jp/testcases/{}/{}'.format(self.problem_id, header['serial']) resp = utils.request('GET', url, session=session) testcase = json.loads(resp.content) skipped = False for type in ('in', 'out'): if testcase[type].endswith('..... (terminated because of the limitation)\n'): log.error('AOJ API says: terminated because of the limitation') skipped = True if skipped: log.warning("skipped due to the limitation of AOJ API") continue testcases += [ TestCase( LabeledString(header['name'], testcase['in']), LabeledString(header['name'], testcase['out']), ) ] return testcases
def login_with_github( self, get_credentials: onlinejudge.type.CredentialsProvider, session: Optional[requests.Session] = None) -> bool: session = session or utils.new_default_session() url = 'https://yukicoder.me/auth/github' # get resp = utils.request('GET', url, session=session) if urllib.parse.urlparse(resp.url).hostname == 'yukicoder.me': log.info('You have already signed in.') return True # redirect to github.com # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find('form') if not form: log.error('form not found') log.info('Did you logged in?') return False log.debug('form: %s', str(form)) # post username, password = get_credentials() form = utils.FormSender(form, url=resp.url) form.set('login', username) form.set('password', password) resp = form.request(session) resp.raise_for_status() if urllib.parse.urlparse(resp.url).hostname == 'yukicoder.me': log.success('You signed in.') return True else: log.failure('You failed to sign in. Wrong user ID or password.') return False
def download_system_cases(self, session: Optional[requests.Session] = None) -> List[TestCase]: session = session or utils.new_default_session() # get # example: https://www.hackerrank.com/rest/contests/hourrank-1/challenges/beautiful-array/download_testcases url = 'https://www.hackerrank.com/rest/contests/{}/challenges/{}/download_testcases'.format(self.contest_slug, self.challenge_slug) resp = utils.request('GET', url, session=session, raise_for_status=False) if resp.status_code != 200: log.error('response: %s', resp.content.decode()) return [] # parse with zipfile.ZipFile(io.BytesIO(resp.content)) as fh: # list names names = [] # type: List[str] pattern = re.compile(r'(in|out)put/\1put(\d+).txt') for filename in sorted(fh.namelist()): # "input" < "output" if filename.endswith('/'): continue log.debug('filename: %s', filename) m = pattern.match(filename) assert m if m.group(1) == 'in': names += [m.group(2)] # zip samples samples = [] # type: List[TestCase] for name in names: inpath = 'input/input{}.txt'.format(name) outpath = 'output/output{}.txt'.format(name) indata = fh.read(inpath).decode() outdata = fh.read(outpath).decode() samples += [TestCase(LabeledString(inpath, indata), LabeledString(outpath, outdata))] return samples
def login_with_github(self, get_credentials, session=None): session = session or utils.new_default_session() url = "https://yukicoder.me/auth/github" # get resp = utils.request("GET", url, session=session) if urllib.parse.urlparse(resp.url).hostname == "yukicoder.me": log.info("You have already signed in.") return True # redirect to github.com # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find("form") if not form: log.error("form not found") log.info("Did you logged in?") return False log.debug("form: %s", str(form)) # post username, password = get_credentials() form = utils.FormSender(form, url=resp.url) form.set("login", username) form.set("password", password) resp = form.request(session) resp.raise_for_status() if urllib.parse.urlparse(resp.url).hostname == "yukicoder.me": log.success("You signed in.") return True else: log.failure("You failed to sign in. Wrong user ID or password.") return False
def submit_code(self, code: bytes, language: str, session: Optional['requests.Session'] = None) -> onlinejudge.type.Submission: # or SubmissionError session = session or utils.new_default_session() # get resp = utils.request('GET', self.get_url(), session=session) # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find('form', class_='submitForm') if form is None: log.error('not logged in') raise SubmissionError log.debug('form: %s', str(form)) # make data form = utils.FormSender(form, url=resp.url) form.set('programTypeId', language) form.set_file('sourceFile', 'code', code) resp = form.request(session=session) resp.raise_for_status() # result if resp.url.endswith('/my'): # example: https://codeforces.com/contest/598/my log.success('success: result: %s', resp.url) return onlinejudge.type.DummySubmission(resp.url) else: log.failure('failure') log.debug('redirected to %s', resp.url) # parse error messages soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) for span in soup.findAll('span', class_='error'): log.warning('Codeforces says: "%s"', span.string) raise SubmissionError
def get_language_dict(self, session=None): session = session or utils.new_default_session() # get url = 'http://{}.contest.atcoder.jp/submit'.format(self.contest_id) resp = utils.request('GET', url, session=session) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) if AtCoderService._report_messages(msgs, unexpected=True): return {} # check whether logged in path = utils.normpath(urllib.parse.urlparse(resp.url).path) if path.startswith('/login'): log.error('not logged in') return {} # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) select = soup.find( 'select', class_='submit-language-selector' ) # NOTE: AtCoder can vary languages depending on tasks, even in one contest. here, ignores this fact. language_dict = {} for option in select.find_all('option'): language_dict[option.attrs['value']] = { 'description': option.string } return language_dict
def call_generate_scanner(self, url, expected, options=[]): cmd = [self.ojtools, '-v', 'generate-scanner', url] + options output = subprocess.check_output(cmd, stderr=sys.stderr).decode() log.status('result:\n%s', output) if expected != output: log.error('expected:\n%s' % expected) self.assertEqual(expected, output) time.sleep(1)
def run_webdriver(webdriver, target_url, cookie_path, headless=True): # launch if webdriver is None: log.error('webdriver is not specified') sys.exit(1) elif 'phantomjs' in os.path.basename(webdriver): kwargs = {} if '/' in webdriver: kwargs['executable_path'] = webdriver driver = selenium.webdriver.PhantomJS(**kwargs) elif 'chrom' in os.path.basename(webdriver): kwargs = {} if '/' in webdriver: kwargs['executable_path'] = webdriver kwargs['chrome_options'] = selenium.webdriver.ChromeOptions() if headless: kwargs['chrome_options'].add_argument('--headless') kwargs['chrome_options'].add_argument('--disable-gpu') driver = selenium.webdriver.Chrome(**kwargs) else: parser.error('unknown webdriver: %s', webdriver) # workaround # NOTE: selenium can read/write only cookies of the current domain domain = '.'.join(urllib.parse.urlparse(target_url).netloc.split('.')[-2:]) # default cookie path default_selenium_cookie_path = os.path.join( default_data_dir, 'cookie-' + driver.name + '-' + domain + '.jar') cookie_path = cookie_path or default_selenium_cookie_path # load cookie if os.path.exists(cookie_path): log.info('load cookie for %s from: %s', driver.name, cookie_path) driver.get(target_url) time.sleep(1) with open(cookie_path) as fh: cookies = ast.literal_eval(fh.read()) for cookie in cookies: log.debug('cookie: %s', repr(cookie)) try: driver.add_cookie(cookie) except selenium.common.exceptions.WebDriverException as e: log.debug('exception:\n%s', str(e)) yield driver # save cookie log.info('save cookie for %s to: %s', driver.name, cookie_path) driver.get(target_url) time.sleep(1) if os.path.dirname(cookie_path): os.makedirs(os.path.dirname(cookie_path), exist_ok=True) with open(cookie_path, 'w') as fh: fh.write(repr(driver.get_cookies()) + '\n') os.chmod(cookie_path, 0o600) # NOTE: to make secure a little bit driver.close()
def submit_code(self, code: bytes, language: str, session: Optional[requests.Session] = None, kind: str = 'example') -> onlinejudge.type.Submission: assert kind in [ 'example', 'full' ] session = session or utils.new_default_session() # module=MatchDetails url = 'https://community.topcoder.com/tc?module=MatchDetails&rd=%d' % self.rd resp = utils.request('GET', url, session=session) soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) path = soup.find('a', text='Register/Submit').attrs['href'] assert path.startswith('/') and 'module=ViewReg' in path # module=ViewActiveContests url = 'https://community.topcoder.com' + path resp = utils.request('GET', url, session=session) soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) path = [ tag.attrs['href'] for tag in soup.find_all('a', text='Submit') if ('rd=%d' % self.rd) in tag.attrs['href'] ] if len(path) == 0: log.error('link to submit not found: Are you logged in? Are you registered? Is the contest running?') raise onlinejudge.type.SubmissionError assert len(path) == 1 path = path[0] assert path.startswith('/') and 'module=Submit' in path query = dict(urllib.parse.parse_qsl(urllib.parse.urlparse(path).query)) self.cd = query['cd'] self.compid = query['compid'] # module=Submit submit_url = 'https://community.topcoder.com' + path resp = utils.request('GET', submit_url, session=session) soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) # post url = 'https://community.topcoder.com/longcontest/' language_id = self.get_language_dict(session=session)[language]['value'] data = { 'module': 'Submit', 'rd': self.rd, 'cd': self.cd, 'compid': self.compid, 'Action': 'submit', 'exOn': { 'example': 'true', 'full': 'false' }[kind], 'lid': language_id, 'code': code, } resp = utils.request('POST', url, session=session, data=data) # check if module=SubmitSuccess if 'module=SubmitSuccess' in resp.content.decode(resp.encoding): url = 'http://community.topcoder.com/longcontest/?module=SubmitSuccess&rd={}&cd={}&compid={}'.format(self.rd, self.cd, self.compid) log.success('success: result: %s', url) return onlinejudge.type.CompatibilitySubmission(url, self) else: # module=Submit to get error messages resp = utils.request('GET', submit_url, session=session) soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) messages = soup.find('textarea', { 'name': 'messages' }).text log.failure('%s', messages) raise onlinejudge.type.SubmissionError
def download(args: 'argparse.Namespace') -> None: # prepare values problem = onlinejudge.dispatch.problem_from_url(args.url) if problem is None: sys.exit(1) kwargs = {} if args.system: supported_service_names = ['aoj', 'yukicoder'] if problem.get_service().get_name() not in supported_service_names: log.error('--system for %s is not supported', problem.get_service().get_name()) sys.exit(1) kwargs['is_system'] = True if args.format is None: if kwargs.get('is_system'): if problem.get_service().get_name() == 'yukicoder': args.format = '%b.%e' else: args.format = '%i.%e' else: args.format = 'sample-%i.%e' # get samples from the server with utils.with_cookiejar(utils.new_default_session(), path=args.cookie) as sess: samples = problem.download(session=sess, **kwargs) # type: ignore # write samples to files for i, sample in enumerate(samples): log.emit('') log.info('sample %d', i) for kind in ['input', 'output']: ext = kind[:-3] data = getattr(sample, kind).data name = getattr(sample, kind).name table = {} table['i'] = str(i + 1) table['e'] = ext table['n'] = name table['b'] = os.path.basename(name) table['d'] = os.path.dirname(name) path = os.path.join(args.directory, utils.parcentformat(args.format, table)) log.status('%sput: %s', ext, name) log.emit(colorama.Style.BRIGHT + data.rstrip() + colorama.Style.RESET_ALL) if args.dry_run: continue if os.path.exists(path): log.warning('file already exists: %s', path) if not args.overwrite: log.warning('skipped') continue os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, 'w') as fh: fh.write(data) log.success('saved to: %s', path)
def download(args): # prepare values problem = onlinejudge.dispatch.problem_from_url(args.url) if problem is None: sys.exit(1) kwargs = {} if args.system: supported_service_names = ["aoj", "yukicoder"] if problem.get_service().get_name() not in supported_service_names: log.error("--system for %s is not supported", problem.get_service().get_name()) sys.exit(1) kwargs["is_system"] = True if args.format is None: if kwargs.get("is_system"): if problem.get_service().get_name() == "yukicoder": args.format = "%b.%e" else: args.format = "%i.%e" else: args.format = "sample-%i.%e" # get samples from the server with utils.with_cookiejar(utils.new_default_session(), path=args.cookie) as sess: samples = problem.download(session=sess, **kwargs) # write samples to files for i, sample in enumerate(samples): log.emit("") log.info("sample %d", i) for kind in ["input", "output"]: ext = kind[:-3] data = sample[kind]["data"] name = sample[kind]["name"] table = {} table["i"] = str(i + 1) table["e"] = ext table["n"] = name table["b"] = os.path.basename(name) table["d"] = os.path.dirname(name) path = os.path.join(args.directory, utils.parcentformat(args.format, table)) log.status("%sput: %s", ext, name) log.emit(colorama.Style.BRIGHT + data.rstrip() + colorama.Style.RESET_ALL) if args.dry_run: continue if os.path.exists(path): log.warning("file already exists: %s", path) if not args.overwrite: log.warning("skipped") continue os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, "w", encoding="utf-8") as fh: fh.write(data) log.success("saved to: %s", path)
def _get_model(self, session: Optional[requests.Session] = None) -> Dict[str, Any]: session = session or utils.new_default_session() # get url = 'https://www.hackerrank.com/rest/contests/{}/challenges/{}'.format(self.contest_slug, self.challenge_slug) resp = utils.request('GET', url, session=session) # parse it = json.loads(resp.content.decode()) log.debug('json: %s', it) if not it['status']: log.error('get model: failed') raise onlinejudge.type.SubmissionError return it['model']
def submit(args): problem = onlinejudge.dispatch.problem_from_url(args.url) if problem is None: sys.exit(1) # code with open(args.file) as fh: code = fh.buffer.read() try: s = code.decode() # for logging except UnicodeDecodeError as e: log.failure('%s: %s', e.__class__.__name__, str(e)) s = repr(code)[1:] log.info('code:') log.emit(log.bold(s)) # session with utils.session(cookiejar=args.cookie) as sess: # language langs = problem.get_language_dict(session=sess) if args.language not in langs: log.error('language is unknown') log.info('supported languages are:') for lang in sorted(langs.keys()): log.emit('%s (%s)', lang, langs[lang]['description']) sys.exit(1) # confirm if args.wait: log.status('sleep(%.2f)', args.wait) time.sleep(args.wait) if not args.yes: sys.stdout.write('Are you sure? [y/N] ') sys.stdout.flush() c = sys.stdin.read(1) if c != 'y': log.info('terminated.') return # submit url = problem.submit(code, language=args.language, session=sess) if url and args.open: if not isinstance(args.open, str): args.open = None for browser in default_url_opener: args.open = shutil.which(browser) if args.open: break if not args.open: log.failure('couldn\'t open the url. please specify a browser') else: log.info('open the submission page with: %s', args.open) subprocess.check_call([args.open, url], stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr)
def _get_model(self, session=None): session = session or utils.new_default_session() # get url = 'https://www.hackerrank.com/rest/contests/{}/challenges/{}'.format( self.contest_slug, self.challenge_slug) resp = utils.request('GET', url, session=session) # parse it = json.loads(resp.content.decode()) log.debug('json: %s', it) if not it['status']: log.error('get model: failed') return None return it['model']
def _get_model(self, session=None): session = session or utils.new_default_session() # get url = "https://www.hackerrank.com/rest/contests/{}/challenges/{}".format( self.contest_slug, self.challenge_slug) resp = utils.request("GET", url, session=session) # parse it = json.loads(resp.content.decode()) log.debug("json: %s", it) if not it["status"]: log.error("get model: failed") return None return it["model"]
def submit_code( self, code: bytes, language: str, session: Optional[requests.Session] = None ) -> onlinejudge.type.DummySubmission: assert language in self.get_language_dict(session=session) session = session or utils.new_default_session() # get url = 'http://{}.contest.atcoder.jp/submit'.format( self.contest_id) # TODO: use beta.atcoder.jp resp = _request('GET', url, session=session) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) if AtCoderService._report_messages(msgs, unexpected=True): raise SubmissionError # check whether logged in path = utils.normpath(urllib.parse.urlparse(resp.url).path) if path.startswith('/login'): log.error('not logged in') raise SubmissionError # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find('form', action=re.compile(r'^/submit\?task_id=')) if not form: log.error('form not found') raise SubmissionError log.debug('form: %s', str(form)) # post task_id = self._get_task_id(session=session) form = utils.FormSender(form, url=resp.url) form.set('task_id', str(task_id)) form.set('source_code', code) form.set('language_id_{}'.format(task_id), language) resp = form.request(session=session) resp.raise_for_status() # result msgs = AtCoderService._get_messages_from_cookie(resp.cookies) AtCoderService._report_messages(msgs) if '/submissions/me' in resp.url: # example: https://practice.contest.atcoder.jp/submissions/me#32174 # CAUTION: this URL is not a URL of the submission log.success('success: result: %s', resp.url) # NOTE: ignore the returned legacy URL and use beta.atcoder.jp's one url = 'https://beta.atcoder.jp/contests/{}/submissions/me'.format( self.contest_id) return onlinejudge.type.DummySubmission(url) else: log.failure('failure') log.debug('redirected to %s', resp.url) raise SubmissionError
def get_language_dict(self, session: Optional['requests.Session'] = None) -> Dict[str, onlinejudge.type.Language]: session = session or utils.new_default_session() # get resp = utils.request('GET', self.get_url(), session=session) # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) select = soup.find('select', attrs={'name': 'programTypeId'}) if select is None: log.error('not logged in') return {} language_dict = {} for option in select.findAll('option'): language_dict[option.attrs['value']] = {'description': option.string} return language_dict
def exec_command(command, timeout=None, **kwargs): try: proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=sys.stderr, **kwargs) except FileNotFoundError: log.error('No such file or directory: %s', command) sys.exit(1) except PermissionError: log.error('Permission denied: %s', command) sys.exit(1) try: answer, _ = proc.communicate(timeout=timeout) except subprocess.TimeoutExpired: answer = b'' return answer, proc
def main(args: Optional[List[str]] = None) -> None: log.addHandler(log.logging.StreamHandler(sys.stderr)) log.setLevel(log.logging.INFO) version_check() parser = get_parser() namespace = parser.parse_args(args=args) try: run_program(namespace, parser=parser) except NotImplementedError as e: log.debug('\n' + traceback.format_exc()) log.error('NotImplementedError') log.info( 'The operation you specified is not supported yet. Pull requests are welcome.' ) log.info( 'see: https://github.com/kmyk/online-judge-tools/blob/master/CONTRIBUTING.md' )
def submit(self, code, language, session=None): assert language in self.get_language_dict(session=session) session = session or utils.new_default_session() # get url = "http://{}.contest.atcoder.jp/submit".format( self.contest_id) # TODO: use beta.atcoder.jp resp = utils.request("GET", url, session=session) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) if AtCoderService._report_messages(msgs, unexpected=True): return None # check whether logged in path = utils.normpath(urllib.parse.urlparse(resp.url).path) if path.startswith("/login"): log.error("not logged in") return None # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find("form", action=re.compile(r"^/submit\?task_id=")) if not form: log.error("form not found") return None log.debug("form: %s", str(form)) # post task_id = self._get_task_id(session=session) form = utils.FormSender(form, url=resp.url) form.set("task_id", str(task_id)) form.set("source_code", code) form.set("language_id_{}".format(task_id), language) resp = form.request(session=session) resp.raise_for_status() # result msgs = AtCoderService._get_messages_from_cookie(resp.cookies) AtCoderService._report_messages(msgs) if "/submissions/me" in resp.url: # example: https://practice.contest.atcoder.jp/submissions/me#32174 # CAUTION: this URL is not a URL of the submission log.success("success: result: %s", resp.url) # NOTE: ignore the returned legacy URL and use beta.atcoder.jp's one url = "https://beta.atcoder.jp/contests/{}/submissions/me".format( self.contest_id) return onlinejudge.submission.CompatibilitySubmission(url) else: log.failure("failure") return None
def _get_task_id(self, session: Optional[requests.Session] = None) -> int: if self._task_id is None: session = session or utils.new_default_session() # get resp = utils.request('GET', self.get_url(), session=session) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) if AtCoderService._report_messages(msgs, unexpected=True): raise SubmissionError # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) submit = soup.find('a', href=re.compile(r'^/submit\?task_id=')) if not submit: log.error('link to submit not found') raise SubmissionError m = re.match(r'^/submit\?task_id=([0-9]+)$', submit.attrs['href']) assert m self._task_id = int(m.group(1)) return self._task_id
def _get_task_id(self, session=None): if self._task_id is None: session = session or utils.new_default_session() # get resp = utils.request("GET", self.get_url(), session=session) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) if AtCoderService._report_messages(msgs, unexpected=True): return {} # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) submit = soup.find("a", href=re.compile(r"^/submit\?task_id=")) if not submit: log.error("link to submit not found") return False m = re.match(r"^/submit\?task_id=([0-9]+)$", submit.attrs["href"]) assert m self._task_id = int(m.group(1)) return self._task_id
def download(self, session=None): session = session or utils.new_default_session() # get resp = utils.request('GET', self.get_url(), session=session) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) if AtCoderService._report_messages(msgs, unexpected=True): return [] # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) code = None for pre in soup.find_all('pre'): log.debug('pre tag: %s', str(pre)) prv = utils.previous_sibling_tag(pre) if not (prv and prv.name == 'h3' and 'Source code' in prv.text): continue code = pre.string if code is None: log.error('source code not found') return code