def main(args: Optional[List[str]] = None) -> None: log.addHandler(log.logging.StreamHandler(sys.stderr)) log.setLevel(log.logging.INFO) is_updated = update_checking.run() parser = get_parser() namespace = parser.parse_args(args=args) try: run_program(namespace, parser=parser) except NotImplementedError as e: log.debug('\n' + traceback.format_exc()) log.error('NotImplementedError') log.info( 'The operation you specified is not supported yet. Pull requests are welcome.' ) log.info( 'see: https://github.com/kmyk/online-judge-tools/blob/master/CONTRIBUTING.md' ) if not is_updated: log.info('hint: try updating the version of online-judge-tools') sys.exit(1) except Exception as e: log.debug('\n' + traceback.format_exc()) log.error(str(e)) if not is_updated: log.info('hint: try updating the version of online-judge-tools') sys.exit(1)
def run_program(args: argparse.Namespace, parser: argparse.ArgumentParser) -> None: if args.version: print('online-judge-tools {} (+ online-judge-api-client {})'.format( version.__version__, api_version.__version__)) sys.exit(0) if args.verbose: log.setLevel(log.logging.DEBUG) log.debug('args: %s', str(args)) if args.subcommand in ['download', 'd', 'dl']: download(args) elif args.subcommand in ['login', 'l']: login(args) elif args.subcommand in ['submit', 's']: submit(args) elif args.subcommand in ['test', 't']: test(args) elif args.subcommand in ['test-reactive', 't/r']: test_reactive(args) elif args.subcommand in ['generate-output', 'g/o']: generate_output(args) elif args.subcommand in ['generate-input', 'g/i']: generate_input(args) else: parser.print_help(file=sys.stderr) sys.exit(1)
def exec_command(command_str: str, *, stdin: Optional[IO[Any]] = None, input: Optional[bytes] = None, timeout: Optional[float] = None, gnu_time: Optional[str] = None) -> Tuple[Dict[str, Any], subprocess.Popen]: if input is not None: assert stdin is None stdin = subprocess.PIPE # type: ignore if gnu_time is not None: context = tempfile.NamedTemporaryFile(delete=True) # type: Any else: context = contextlib.ExitStack() # TODO: we should use contextlib.nullcontext() if possible with context as fh: command = shlex.split(command_str) if gnu_time is not None: command = [gnu_time, '-f', '%M', '-o', fh.name, '--'] + command if os.name == 'nt': # HACK: without this encoding and decoding, something randomly fails with multithreading; see https://github.com/kmyk/online-judge-tools/issues/468 command = command_str.encode().decode() # type: ignore begin = time.perf_counter() # We need kill processes called from the "time" command using process groups. Without this, orphans spawn. see https://github.com/kmyk/online-judge-tools/issues/640 preexec_fn = None if gnu_time is not None and os.name == 'posix': preexec_fn = os.setsid try: proc = subprocess.Popen(command, stdin=stdin, stdout=subprocess.PIPE, stderr=sys.stderr, preexec_fn=preexec_fn) except FileNotFoundError: log.error('No such file or directory: %s', command) sys.exit(1) except PermissionError: log.error('Permission denied: %s', command) sys.exit(1) answer = None # type: Optional[bytes] try: answer, _ = proc.communicate(input=input, timeout=timeout) except subprocess.TimeoutExpired: pass finally: if preexec_fn is not None: try: os.killpg(os.getpgid(proc.pid), signal.SIGTERM) except ProcessLookupError: pass else: proc.terminate() end = time.perf_counter() memory = None # type: Optional[float] if gnu_time is not None: with open(fh.name) as fh1: reported = fh1.read() log.debug('GNU time says:\n%s', reported) if reported.strip() and reported.splitlines()[-1].isdigit(): memory = int(reported.splitlines()[-1]) / 1000 info = { 'answer': answer, # Optional[byte] 'elapsed': end - begin, # float, in second 'memory': memory, # Optional[float], in megabyte } return info, proc
def glob_with_format(directory: pathlib.Path, format: str) -> List[pathlib.Path]: if os.name == 'nt': format = format.replace('/', '\\') table = {} table['s'] = '*' table['e'] = '*' pattern = (glob.escape(str(directory) + os.path.sep) + percentformat(glob.escape(format).replace(glob.escape('%'), '%'), table)) paths = list(map(pathlib.Path, glob.glob(pattern))) for path in paths: log.debug('testcase globbed: %s', path) return paths
def check_gnu_time(gnu_time: str) -> bool: try: with tempfile.NamedTemporaryFile(delete=True) as fh: proc = subprocess.run([gnu_time, '-f', '%M KB', '-o', fh.name, '--', 'true']) assert proc.returncode == 0 with open(fh.name) as fh1: data = fh1.read() int(utils.remove_suffix(data.rstrip().splitlines()[-1], ' KB')) return True except NameError: raise # NameError is not a runtime error caused by the environment, but a coding mistake except AttributeError: raise # AttributeError is also a mistake except Exception as e: log.debug(traceback.format_exc()) return False
def request(method: str, url: str, session: requests.Session, raise_for_status: bool = True, **kwargs) -> requests.Response: assert method in ['GET', 'POST'] kwargs.setdefault('allow_redirects', True) log.status('%s: %s', method, url) if 'data' in kwargs: log.debug('data: %s', repr(kwargs['data'])) resp = session.request(method, url, **kwargs) if resp.url != url: log.status('redirected: %s', resp.url) log.status(describe_status_code(resp.status_code)) if raise_for_status: resp.raise_for_status() return resp
def get(self, *, directory: pathlib.Path) -> List[str]: if not self.path.exists(): return [] log.status('read history from: %s', self.path) found = set() with open(str(self.path)) as fh: for line in fh: try: data = json.loads(line) except json.decoder.JSONDecodeError as e: log.warning('corrupted line found in: %s', self.path) log.debug('%s', traceback.format_exc()) continue if pathlib.Path(data['directory']) == directory: found.add(data['url']) log.status('found urls in history:\n%s', '\n'.join(found)) return list(found)
def get_latest_version_from_pypi(package_name: str) -> str: pypi_url = 'https://pypi.org/pypi/{}/json'.format(package_name) version_cache_path = user_cache_dir / "pypi.json" update_interval = 60 * 60 * 8 # 8 hours # load cache cache = {} # type: Dict[str, Any] if version_cache_path.exists(): try: log.debug('load the cache for update checking: %s', str(version_cache_path)) with version_cache_path.open() as fh: cache = json.load(fh) if time.time() < cache[package_name]['time'] + update_interval: return cache[package_name]['version'] except Exception as e: log.warning('failed to load the cache in update checking: %s', e) # get try: resp = request('GET', pypi_url, session=requests.Session()) data = json.loads(resp.content.decode()) value = data['info']['version'] except requests.RequestException as e: log.error(str(e)) value = '0.0.0' # ignore since this failure is not important cache[package_name] = { 'time': int( time.time() ), # use timestamp because Python's standard datetime library is too weak to parse strings 'version': value, } # store cache log.debug('store the cache for update checking: %s', str(version_cache_path)) version_cache_path.parent.mkdir(parents=True, exist_ok=True) with version_cache_path.open('w') as fh: json.dump(cache, fh) return value
def guess_lang_ids_of_file(filename: pathlib.Path, code: bytes, language_dict, cxx_latest: bool = False, cxx_compiler: str = 'all', python_version: str = 'all', python_interpreter: str = 'all') -> List[str]: assert cxx_compiler in ('gcc', 'clang', 'all') assert python_version in ('2', '3', 'auto', 'all') assert python_interpreter in ('cpython', 'pypy', 'all') ext = filename.suffix lang_ids = language_dict.keys() log.debug('file extension: %s', ext) ext = ext.lstrip('.') if ext in ('cpp', 'cxx', 'cc', 'C'): log.debug('language guessing: C++') # memo: https://stackoverflow.com/questions/1545080/c-code-file-extension-cc-vs-cpp lang_ids = list(filter(lambda lang_id: is_cplusplus_description(language_dict[lang_id]), lang_ids)) if not lang_ids: return [] log.debug('all lang ids for C++: %s', lang_ids) # compiler found_gcc = False found_clang = False for lang_id in lang_ids: compiler = parse_cplusplus_compiler(language_dict[lang_id]) if compiler == 'gcc': found_gcc = True elif compiler == 'clang': found_clang = True if found_gcc and found_clang: log.status('both GCC and Clang are available for C++ compiler') if cxx_compiler == 'gcc': log.status('use: GCC') lang_ids = list(filter(lambda lang_id: parse_cplusplus_compiler(language_dict[lang_id]) in ('gcc', None), lang_ids)) elif cxx_compiler == 'clang': log.status('use: Clang') lang_ids = list(filter(lambda lang_id: parse_cplusplus_compiler(language_dict[lang_id]) in ('clang', None), lang_ids)) else: assert cxx_compiler == 'all' log.debug('lang ids after compiler filter: %s', lang_ids) # version if cxx_latest: saved_lang_ids = lang_ids lang_ids = [] for compiler in ('gcc', 'clang'): # use the latest for each compiler ids = list(filter(lambda lang_id: parse_cplusplus_compiler(language_dict[lang_id]) in (compiler, None), saved_lang_ids)) if not ids: continue ids.sort(key=lambda lang_id: (parse_cplusplus_version(language_dict[lang_id]) or '', language_dict[lang_id])) lang_ids += [ids[-1]] # since C++11 < C++1y < ... as strings log.debug('lang ids after version filter: %s', lang_ids) assert lang_ids lang_ids = sorted(set(lang_ids)) return lang_ids elif ext == 'py': log.debug('language guessing: Python') # interpreter lang_ids = list(filter(lambda lang_id: is_python_description(language_dict[lang_id]), lang_ids)) if any([parse_python_interpreter(language_dict[lang_id]) == 'pypy' for lang_id in lang_ids]): log.status('PyPy is available for Python interpreter') if python_interpreter != 'all': lang_ids = list(filter(lambda lang_id: parse_python_interpreter(language_dict[lang_id]) == python_interpreter, lang_ids)) # version three_found = False two_found = False for lang_id in lang_ids: version = parse_python_version(language_dict[lang_id]) log.debug('%s (%s) is recognized as Python %s', lang_id, language_dict[lang_id], str(version or 'unknown')) if version == 3: three_found = True if version == 2: two_found = True if two_found and three_found: log.status('both Python2 and Python3 are available for version of Python') if python_version in ('2', '3'): versions = [int(python_version)] # type: List[Optional[int]] elif python_version == 'all': versions = [2, 3] else: assert python_version == 'auto' lines = code.splitlines() if code.startswith(b'#!'): s = lines[0] # use shebang else: s = b'\n'.join(lines[:10] + lines[-5:]) # use modelines versions = [] for version in (2, 3): if re.search(r'python *(version:? *)?%d'.encode() % version, s.lower()): versions += [version] if not versions: log.status('no version info in code') versions = [3] log.status('use: %s', ', '.join(map(str, versions))) lang_ids = list(filter(lambda lang_id: parse_python_version(language_dict[lang_id]) in versions + [None], lang_ids)) lang_ids = sorted(set(lang_ids)) return lang_ids else: log.debug('language guessing: others') table = [ { 'names': [ 'awk' ], 'exts': [ 'awk' ] }, { 'names': [ 'bash' ], 'exts': [ 'sh' ] }, { 'names': [ 'brainfuck' ], 'exts': [ 'bf' ] }, { 'names': [ 'c#' ], 'exts': [ 'cs' ] }, { 'names': [ 'c' ], 'exts': [ 'c' ], 'split': True }, { 'names': [ 'ceylon' ], 'exts': [ 'ceylon' ] }, { 'names': [ 'clojure' ], 'exts': [ 'clj' ] }, { 'names': [ 'common lisp' ], 'exts': [ 'lisp', 'lsp', 'cl' ] }, { 'names': [ 'crystal' ], 'exts': [ 'cr' ] }, { 'names': [ 'd' ], 'exts': [ 'd' ], 'split': True }, { 'names': [ 'f#' ], 'exts': [ 'fs' ] }, { 'names': [ 'fortran' ], 'exts': [ 'for', 'f', 'f90', 'f95', 'f03' ] }, { 'names': [ 'go' ], 'exts': [ 'go' ], 'split': True }, { 'names': [ 'haskell' ], 'exts': [ 'hs' ] }, { 'names': [ 'java' ], 'exts': [ 'java' ] }, { 'names': [ 'javascript' ], 'exts': [ 'js' ] }, { 'names': [ 'julia' ], 'exts': [ 'jl' ] }, { 'names': [ 'kotlin' ], 'exts': [ 'kt', 'kts' ] }, { 'names': [ 'lua' ], 'exts': [ 'lua' ] }, { 'names': [ 'nim' ], 'exts': [ 'nim' ] }, { 'names': [ 'moonscript' ], 'exts': [ 'moon' ] }, { 'names': [ 'objective-c' ], 'exts': [ 'm' ] }, { 'names': [ 'ocaml' ], 'exts': [ 'ml' ] }, { 'names': [ 'octave' ], 'exts': [ 'm' ] }, { 'names': [ 'pascal' ], 'exts': [ 'pas' ] }, { 'names': [ 'perl6' ], 'exts': [ 'p6', 'pl6', 'pm6' ] }, { 'names': [ 'perl' ], 'exts': [ 'pl', 'pm' ], 'split': True }, { 'names': [ 'php' ], 'exts': [ 'php' ] }, { 'names': [ 'ruby' ], 'exts': [ 'rb' ] }, { 'names': [ 'rust' ], 'exts': [ 'rs' ] }, { 'names': [ 'scala' ], 'exts': [ 'scala' ] }, { 'names': [ 'scheme' ], 'exts': [ 'scm' ] }, { 'names': [ 'sed' ], 'exts': [ 'sed' ] }, { 'names': [ 'standard ml' ], 'exts': [ 'sml' ] }, { 'names': [ 'swift' ], 'exts': [ 'swift' ] }, { 'names': [ 'text' ], 'exts': [ 'txt' ] }, { 'names': [ 'typescript' ], 'exts': [ 'ts' ] }, { 'names': [ 'unlambda' ], 'exts': [ 'unl' ] }, { 'names': [ 'vim script' ], 'exts': [ 'vim' ] }, { 'names': [ 'visual basic' ], 'exts': [ 'vb' ] }, ] # type: List[Dict[str, Any]] # yapf: disable lang_ids = [] for data in table: if ext in data['exts']: for name in data['names']: lang_ids += select_ids_of_matched_languages([name], language_dict.keys(), language_dict=language_dict, split=data.get('split', False)) return sorted(set(lang_ids))
def new_session_with_our_user_agent(*, path: pathlib.Path) -> Iterator[requests.Session]: session = requests.Session() session.headers['User-Agent'] = '{}/{} (+{})'.format(version.__package_name__, version.__version__, version.__url__) log.debug('User-Agent: %s', session.headers['User-Agent']) with utils.with_cookiejar(session, path=path) as session: yield session