def download_sample_cases( self, session: Optional[requests.Session] = None ) -> List[onlinejudge.type.TestCase]: session = session or utils.new_default_session() # get url = self.get_url(contests=False) + '/file/statement/samples.zip' resp = utils.request('GET', url, session=session, raise_for_status=False) if resp.status_code == 404: log.warning('samples.zip not found') log.info( 'this 404 happens in both cases: 1. no sample cases as intended; 2. just an error' ) return [] resp.raise_for_status() # parse with zipfile.ZipFile(io.BytesIO(resp.content)) as fh: samples = [] # type: List[TestCase] for filename in sorted(fh.namelist()): log.debug('filename: %s', filename) if filename.endswith('.in'): inpath = filename outpath = filename[:-3] + '.ans' indata = fh.read(inpath).decode() outdata = fh.read(outpath).decode() samples += [ TestCase(LabeledString(inpath, indata), LabeledString(outpath, outdata)) ] return samples
def download(self, session=None): session = session or utils.new_default_session() # get resp = utils.request('GET', self.get_url(), session=session) msgs = AtCoderService._get_messages_from_cookie(resp.cookies) if AtCoderService._report_messages(msgs, unexpected=True): # example message: "message: You cannot see this page." log.warning('are you logged in?') return [] # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) samples = utils.SampleZipper() lang = None for pre, h3 in self._find_sample_tags(soup): s = utils.textfile(utils.dos2unix(pre.string.lstrip())) name = h3.string l = self._get_tag_lang(pre) if lang is None: lang = l elif lang != l: log.info( 'skipped due to language: current one is %s, not %s: %s ', lang, l, name) continue samples.add(s, name) return samples.get()
def login(self, get_credentials, session=None): driver = session # rename assert isinstance(driver, selenium.webdriver.remote.webdriver.WebDriver) # go to the login page url = 'https://accounts.topcoder.com/member' driver.get(url) log.status('wait for redirect...') time.sleep(3) if driver.current_url != url: log.info('You have already logged in.') return True # type credentials and click login username, password = get_credentials() driver.find_element_by_xpath( '//form[@name="vm.loginForm"]//input[@name="username"]').send_keys( username) driver.find_element_by_xpath( '//form[@name="vm.loginForm"]//input[@id="current-password-input"]' ).send_keys(password) driver.find_element_by_xpath( '//form[@name="vm.loginForm"]//button[@type="submit" and text()="Log In"]' ).click() # wait a minute log.info('Logging in...') time.sleep(6) if driver.current_url != url: log.success('Success') return True else: log.failure('Failure') return False
def login(self, get_credentials, session=None): session = session or utils.new_default_session() url = "http://codeforces.com/enter" # get resp = utils.request("GET", url, session=session) if resp.url != url: # redirected log.info("You have already signed in.") return True # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find("form", id="enterForm") log.debug("form: %s", str(form)) username, password = get_credentials() form = utils.FormSender(form, url=resp.url) form.set("handleOrEmail", username) form.set("password", password) form.set("remember", "on") # post resp = form.request(session) resp.raise_for_status() if resp.url != url: # redirected log.success("Welcome, %s.", username) return True else: log.failure("Invalid handle or password.") return False
def login(self, get_credentials, session=None): session = session or requests.Session() url = 'http://codeforces.com/enter' # get log.status('GET: %s', url) resp = session.get(url) log.status(utils.describe_status_code(resp.status_code)) resp.raise_for_status() if resp.url != url: # redirected log.info('You have already signed in.') return True # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find('form', id='enterForm') log.debug('form: %s', str(form)) username, password = get_credentials() form = utils.FormSender(form, url=resp.url) form.set('handle', username) form.set('password', password) form.set('remember', 'on') # post resp = form.request(session) resp.raise_for_status() if resp.url != url: # redirected log.success('Welcome, %s.', username) return True else: log.failure('Invalid handle or password.') return False
def login(self, get_credentials: onlinejudge.service.CredentialsProvider, session: Optional[requests.Session] = None) -> bool: session = session or utils.new_default_session() url = 'https://codeforces.com/enter' # get resp = utils.request('GET', url, session=session) if resp.url != url: # redirected log.info('You have already signed in.') return True # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find('form', id='enterForm') log.debug('form: %s', str(form)) username, password = get_credentials() form = utils.FormSender(form, url=resp.url) form.set('handleOrEmail', username) form.set('password', password) form.set('remember', 'on') # post resp = form.request(session) resp.raise_for_status() if resp.url != url: # redirected log.success('Welcome, %s.', username) return True else: log.failure('Invalid handle or password.') return False
def login_with_github(self, get_credentials, session=None): session = session or utils.new_default_session() url = "https://yukicoder.me/auth/github" # get resp = utils.request("GET", url, session=session) if urllib.parse.urlparse(resp.url).hostname == "yukicoder.me": log.info("You have already signed in.") return True # redirect to github.com # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find("form") if not form: log.error("form not found") log.info("Did you logged in?") return False log.debug("form: %s", str(form)) # post username, password = get_credentials() form = utils.FormSender(form, url=resp.url) form.set("login", username) form.set("password", password) resp = form.request(session) resp.raise_for_status() if urllib.parse.urlparse(resp.url).hostname == "yukicoder.me": log.success("You signed in.") return True else: log.failure("You failed to sign in. Wrong user ID or password.") return False
def code_statistics(args: 'argparse.Namespace') -> None: with open(args.file, 'rb') as fh: code = fh.read() stat = get_statistics(code) stat['size'] = len(code) for key in ('size', 'binary', 'alnum', 'symbol', 'whitespace'): log.info('%s = %d', key, stat[key])
def code_statistics(args): with open(args.file, "rb") as fh: code = fh.read() stat = get_statistics(code) stat["size"] = len(code) for key in ("size", "binary", "alnum", "symbol", "whitespace"): log.info("%s = %d", key, stat[key])
def login(self, get_credentials: onlinejudge.type.CredentialsProvider, session: Optional[requests.Session] = None) -> bool: session = session or utils.new_default_session() url = 'https://www.hackerrank.com/auth/login' # get resp = utils.request('GET', url, session=session) if resp.url != url: log.debug('redirected: %s', resp.url) log.info('You have already signed in.') return True # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) csrftoken = soup.find('meta', attrs={'name': 'csrf-token'}).attrs['content'] tag = soup.find('input', attrs={'name': 'username'}) while tag.name != 'form': tag = tag.parent form = tag # post username, password = get_credentials() form = utils.FormSender(form, url=resp.url) form.set('login', username) form.set('password', password) form.set('remember_me', 'true') form.set('fallback', 'true') resp = form.request(session, method='POST', action='/rest/auth/login', headers={'X-CSRF-Token': csrftoken}) resp.raise_for_status() log.debug('redirected: %s', resp.url) # result if '/auth' not in resp.url: log.success('You signed in.') return True else: log.failure('You failed to sign in. Wrong user ID or password.') return False
def download(self, session=None): session = session or requests.Session() url = self.get_url() # get log.status('GET: %s', url) resp = session.get(url) log.status(utils.describe_status_code(resp.status_code)) resp.raise_for_status() msgs = AtCoderService._get_messages_from_cookie(resp.cookies) if AtCoderService._report_messages(msgs, unexpected=True): return [] # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) samples = utils.SampleZipper() lang = None for pre, h3 in self._find_sample_tags(soup): s = utils.textfile(utils.dos2unix(pre.string.lstrip())) name = h3.string l = self._get_tag_lang(pre) if lang is None: lang = l elif lang != l: log.info( 'skipped due to language: current one is %s, not %s: %s ', lang, l, name) continue samples.add(s, name) return samples.get()
def login(self, get_credentials: onlinejudge.type.CredentialsProvider, session: Optional[requests.Session] = None) -> bool: session = session or utils.new_default_session() url = 'https://toph.co/login' # get resp = utils.request('GET', url, session=session) if resp.url != url: # redirected log.info('You are already logged in.') return True # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find('form', class_='login-form') log.debug('form: %s', str(form)) username, password = get_credentials() form[ 'action'] = '/login' # to avoid KeyError inside form.request method as Toph does not have any defined action form = utils.FormSender(form, url=resp.url) form.set('handle', username) form.set('password', password) # post resp = form.request(session) resp.raise_for_status() resp = utils.request( 'GET', url, session=session ) # Toph's Location header is not getting the expected value if resp.url != url: log.success('Welcome, %s.', username) return True else: log.failure('Invalid handle/email or password.') return False
def split_input(args): with open(args.input) as fh: inf = fh.read() if args.footer == split_input_auto_footer: args.footer = inf.splitlines(keepends=True)[-1] with subprocess.Popen(args.command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=sys.stderr) as proc: index = 0 acc = '' for line in inf.splitlines(keepends=True): if args.ignore: args.ignore -= 1 else: acc += line proc.stdin.write(line.encode()) proc.stdin.flush() time.sleep(args.time) if non_block_read(proc.stdout): # if output exists index += 1 path = utils.parcentformat(args.output, {'i': str(index)}) log.info('case found: %d', index) if args.header: acc = args.header + acc if args.footer: acc = acc + args.footer log.emit(log.bold(acc)) with open(path, 'w') as fh: fh.write(acc) log.success('saved to: %s', path) acc = '' while non_block_read(proc.stdout): # consume all pass
def login_with_github( self, get_credentials: onlinejudge.type.CredentialsProvider, session: Optional[requests.Session] = None) -> bool: session = session or utils.new_default_session() url = 'https://yukicoder.me/auth/github' # get resp = utils.request('GET', url, session=session) if urllib.parse.urlparse(resp.url).hostname == 'yukicoder.me': log.info('You have already signed in.') return True # redirect to github.com # parse soup = bs4.BeautifulSoup(resp.content.decode(resp.encoding), utils.html_parser) form = soup.find('form') if not form: log.error('form not found') log.info('Did you logged in?') return False log.debug('form: %s', str(form)) # post username, password = get_credentials() form = utils.FormSender(form, url=resp.url) form.set('login', username) form.set('password', password) resp = form.request(session) resp.raise_for_status() if urllib.parse.urlparse(resp.url).hostname == 'yukicoder.me': log.success('You signed in.') return True else: log.failure('You failed to sign in. Wrong user ID or password.') return False
def version_check() -> None: if utils.is_update_available_on_pypi(): log.warning('update available: %s -> %s', version.__version__, utils.get_latest_version_from_pypi()) log.info('run: $ pip3 install -U %s', version.__package_name__) log.info( 'see: https://github.com/kmyk/online-judge-tools/blob/master/CHANGELOG.md' )
def format_code(code, dos2unix=False, rstrip=False): if dos2unix: log.info("dos2unix...") code = code.replace(b"\r\n", b"\n") if rstrip: log.info("rstrip...") code = code.rstrip() return code
def run_webdriver(webdriver, target_url, cookie_path, headless=True): # launch if webdriver is None: log.error('webdriver is not specified') sys.exit(1) elif 'phantomjs' in os.path.basename(webdriver): kwargs = {} if '/' in webdriver: kwargs['executable_path'] = webdriver driver = selenium.webdriver.PhantomJS(**kwargs) elif 'chrom' in os.path.basename(webdriver): kwargs = {} if '/' in webdriver: kwargs['executable_path'] = webdriver kwargs['chrome_options'] = selenium.webdriver.ChromeOptions() if headless: kwargs['chrome_options'].add_argument('--headless') kwargs['chrome_options'].add_argument('--disable-gpu') driver = selenium.webdriver.Chrome(**kwargs) else: parser.error('unknown webdriver: %s', webdriver) # workaround # NOTE: selenium can read/write only cookies of the current domain domain = '.'.join(urllib.parse.urlparse(target_url).netloc.split('.')[-2:]) # default cookie path default_selenium_cookie_path = os.path.join( default_data_dir, 'cookie-' + driver.name + '-' + domain + '.jar') cookie_path = cookie_path or default_selenium_cookie_path # load cookie if os.path.exists(cookie_path): log.info('load cookie for %s from: %s', driver.name, cookie_path) driver.get(target_url) time.sleep(1) with open(cookie_path) as fh: cookies = ast.literal_eval(fh.read()) for cookie in cookies: log.debug('cookie: %s', repr(cookie)) try: driver.add_cookie(cookie) except selenium.common.exceptions.WebDriverException as e: log.debug('exception:\n%s', str(e)) yield driver # save cookie log.info('save cookie for %s to: %s', driver.name, cookie_path) driver.get(target_url) time.sleep(1) if os.path.dirname(cookie_path): os.makedirs(os.path.dirname(cookie_path), exist_ok=True) with open(cookie_path, 'w') as fh: fh.write(repr(driver.get_cookies()) + '\n') os.chmod(cookie_path, 0o600) # NOTE: to make secure a little bit driver.close()
def download(args: 'argparse.Namespace') -> None: # prepare values problem = onlinejudge.dispatch.problem_from_url(args.url) if problem is None: sys.exit(1) kwargs = {} if args.system: supported_service_names = ['aoj', 'yukicoder'] if problem.get_service().get_name() not in supported_service_names: log.error('--system for %s is not supported', problem.get_service().get_name()) sys.exit(1) kwargs['is_system'] = True if args.format is None: if kwargs.get('is_system'): if problem.get_service().get_name() == 'yukicoder': args.format = '%b.%e' else: args.format = '%i.%e' else: args.format = 'sample-%i.%e' # get samples from the server with utils.with_cookiejar(utils.new_default_session(), path=args.cookie) as sess: samples = problem.download(session=sess, **kwargs) # type: ignore # write samples to files for i, sample in enumerate(samples): log.emit('') log.info('sample %d', i) for kind in ['input', 'output']: ext = kind[:-3] data = getattr(sample, kind).data name = getattr(sample, kind).name table = {} table['i'] = str(i + 1) table['e'] = ext table['n'] = name table['b'] = os.path.basename(name) table['d'] = os.path.dirname(name) path = os.path.join(args.directory, utils.parcentformat(args.format, table)) log.status('%sput: %s', ext, name) log.emit(colorama.Style.BRIGHT + data.rstrip() + colorama.Style.RESET_ALL) if args.dry_run: continue if os.path.exists(path): log.warning('file already exists: %s', path) if not args.overwrite: log.warning('skipped') continue os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, 'w') as fh: fh.write(data) log.success('saved to: %s', path)
def download(args): # prepare values problem = onlinejudge.dispatch.problem_from_url(args.url) if problem is None: sys.exit(1) kwargs = {} if args.system: supported_service_names = ["aoj", "yukicoder"] if problem.get_service().get_name() not in supported_service_names: log.error("--system for %s is not supported", problem.get_service().get_name()) sys.exit(1) kwargs["is_system"] = True if args.format is None: if kwargs.get("is_system"): if problem.get_service().get_name() == "yukicoder": args.format = "%b.%e" else: args.format = "%i.%e" else: args.format = "sample-%i.%e" # get samples from the server with utils.with_cookiejar(utils.new_default_session(), path=args.cookie) as sess: samples = problem.download(session=sess, **kwargs) # write samples to files for i, sample in enumerate(samples): log.emit("") log.info("sample %d", i) for kind in ["input", "output"]: ext = kind[:-3] data = sample[kind]["data"] name = sample[kind]["name"] table = {} table["i"] = str(i + 1) table["e"] = ext table["n"] = name table["b"] = os.path.basename(name) table["d"] = os.path.dirname(name) path = os.path.join(args.directory, utils.parcentformat(args.format, table)) log.status("%sput: %s", ext, name) log.emit(colorama.Style.BRIGHT + data.rstrip() + colorama.Style.RESET_ALL) if args.dry_run: continue if os.path.exists(path): log.warning("file already exists: %s", path) if not args.overwrite: log.warning("skipped") continue os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, "w", encoding="utf-8") as fh: fh.write(data) log.success("saved to: %s", path)
def format_code(code: bytes, dos2unix: bool = False, rstrip: bool = False) -> bytes: if dos2unix: log.info('dos2unix...') code = code.replace(b'\r\n', b'\n') if rstrip: log.info('rstrip...') code = code.rstrip() return code
def session(cookiejar): s = requests.Session() s.cookies = http.cookiejar.LWPCookieJar(cookiejar) if os.path.exists(cookiejar): log.info('load cookie from: %s', cookiejar) s.cookies.load() yield s log.info('save cookie to: %s', cookiejar) if os.path.dirname(cookiejar): os.makedirs(os.path.dirname(cookiejar), exist_ok=True) s.cookies.save() os.chmod(cookiejar, 0o600) # NOTE: to make secure a little bit
def with_cookiejar(session, path): path = path or default_cookie_path session.cookies = http.cookiejar.LWPCookieJar(path) if os.path.exists(path): log.info('load cookie from: %s', path) session.cookies.load() yield session log.info('save cookie to: %s', path) if os.path.dirname(path): os.makedirs(os.path.dirname(path), exist_ok=True) session.cookies.save() os.chmod(path, 0o600) # NOTE: to make secure a little bit
def with_cookiejar(session: requests.Session, path: str) -> Generator[requests.Session, None, None]: path = path or default_cookie_path session.cookies = http.cookiejar.LWPCookieJar(path) # type: ignore if os.path.exists(path): log.info('load cookie from: %s', path) session.cookies.load() # type: ignore yield session log.info('save cookie to: %s', path) if os.path.dirname(path): os.makedirs(os.path.dirname(path), exist_ok=True) session.cookies.save() # type: ignore os.chmod(path, 0o600) # NOTE: to make secure a little bit
def generate_output(args): if not args.test: args.test = glob_with_format(args.format) # by default if args.ignore_backup: args.test = drop_backup_or_hidden_files(args.test) tests = construct_relationship_of_files(args.test, args.format) for name, it in sorted(tests.items()): log.emit('') log.info('%s', name) if 'out' in it: log.info('output file already exists.') log.info('skipped.') continue with open(it['in']) as inf: begin = time.perf_counter() answer, proc = utils.exec_command(args.command, shell=args.shell, stdin=inf) end = time.perf_counter() log.status('time: %f sec', end - begin) if proc.returncode != 0: log.failure(log.red('RE') + ': return code %d', proc.returncode) log.info('skipped.') continue log.emit(log.bold(answer.decode().rstrip())) path = path_from_format(args.format, match_with_format(args.format, it['in']).groupdict()['name'], 'out') with open(path, 'w') as fh: fh.buffer.write(answer) log.success('saved to: %s', path)
def generate_output(args: 'argparse.Namespace') -> None: if not args.test: args.test = cutils.glob_with_format(args.directory, args.format) # by default if args.ignore_backup: args.test = cutils.drop_backup_or_hidden_files(args.test) tests = cutils.construct_relationship_of_files(args.test, args.directory, args.format) for name, it in sorted(tests.items()): log.emit('') log.info('%s', name) if 'out' in it: log.info('output file already exists.') log.info('skipped.') continue with it['in'].open() as inf: begin = time.perf_counter() answer, proc = utils.exec_command(args.command, shell=True, stdin=inf) end = time.perf_counter() log.status('time: %f sec', end - begin) if proc.returncode != 0: log.failure(log.red('RE') + ': return code %d', proc.returncode) log.info('skipped.') continue log.emit(log.bold(answer.decode().rstrip())) match_result = cutils.match_with_format(args.directory, args.format, it['in']) # type: Optional[Match[Any]] if match_result is not None: matched_name = match_result.groupdict()['name'] # type: str else: assert False path = cutils.path_from_format(args.directory, args.format, name=matched_name, ext='out') if not path.parent.is_dir(): os.makedirs(str(path.parent), exist_ok=True) with path.open('wb') as fh: fh.write(answer) log.success('saved to: %s', path)
def main(args: Optional[List[str]] = None) -> None: log.addHandler(log.logging.StreamHandler(sys.stderr)) log.setLevel(log.logging.INFO) version_check() parser = get_parser() namespace = parser.parse_args(args=args) try: run_program(namespace, parser=parser) except NotImplementedError as e: log.debug('\n' + traceback.format_exc()) log.error('NotImplementedError') log.info( 'The operation you specified is not supported yet. Pull requests are welcome.' ) log.info( 'see: https://github.com/kmyk/online-judge-tools/blob/master/CONTRIBUTING.md' )
def submit(args): problem = onlinejudge.dispatch.problem_from_url(args.url) if problem is None: sys.exit(1) # code with open(args.file) as fh: code = fh.buffer.read() try: s = code.decode() # for logging except UnicodeDecodeError as e: log.failure('%s: %s', e.__class__.__name__, str(e)) s = repr(code)[1:] log.info('code:') log.emit(log.bold(s)) # session with utils.session(cookiejar=args.cookie) as sess: # language langs = problem.get_language_dict(session=sess) if args.language not in langs: log.error('language is unknown') log.info('supported languages are:') for lang in sorted(langs.keys()): log.emit('%s (%s)', lang, langs[lang]['description']) sys.exit(1) # confirm if args.wait: log.status('sleep(%.2f)', args.wait) time.sleep(args.wait) if not args.yes: sys.stdout.write('Are you sure? [y/N] ') sys.stdout.flush() c = sys.stdin.read(1) if c != 'y': log.info('terminated.') return # submit url = problem.submit(code, language=args.language, session=sess) if url and args.open: if not isinstance(args.open, str): args.open = None for browser in default_url_opener: args.open = shutil.which(browser) if args.open: break if not args.open: log.failure('couldn\'t open the url. please specify a browser') else: log.info('open the submission page with: %s', args.open) subprocess.check_call([args.open, url], stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr)
def download(args): problem = onlinejudge.dispatch.problem_from_url(args.url) if problem is None: sys.exit(1) kwargs = {} if problem.get_service().get_name() == 'yukicoder': for x in args.extra_option: if x == 'all': kwargs['is_all'] = True if args.format is None: if problem.get_service().get_name() == 'yukicoder' and kwargs.get( 'is_all'): args.format = 'test/%b.%e' else: args.format = 'test/sample-%i.%e' with utils.session(cookiejar=args.cookie) as sess: samples = problem.download(session=sess, **kwargs) for i, sample in enumerate(samples): log.emit('') log.info('sample %d', i) for ext, (s, name) in zip(['in', 'out'], sample): table = {} table['i'] = str(i + 1) table['e'] = ext table['n'] = name table['b'] = os.path.basename(name) table['d'] = os.path.dirname(name) path = utils.parcentformat(args.format, table) log.status('%sput: %s', ext, name) log.emit(colorama.Style.BRIGHT + s.rstrip() + colorama.Style.RESET_ALL) if args.dry_run: continue if os.path.exists(path): log.warning('file already exists: %s', path) if not args.overwrite: log.warning('skipped') continue os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, 'w') as fh: fh.write(s) log.success('saved to: %s', path)
def construct_relationship_of_files(paths, format): tests = collections.defaultdict(dict) for path in paths: m = match_with_format(format, os.path.normpath(path)) if not m: log.error('unrecognizable file found: %s', path) sys.exit(1) name = m.groupdict()['name'] ext = m.groupdict()['ext'] assert ext not in tests[name] tests[name][ext] = path for name in tests: if 'in' not in tests[name]: assert 'out' in tests[name] log.error('dangling output case: %s', tests[name]['out']) sys.exit(1) if not tests: log.error('no cases found') sys.exit(1) log.info('%d cases found', len(tests)) return tests
def construct_relationship_of_files(paths, directory, format): tests = collections.defaultdict(dict) for path in paths: m = match_with_format(directory, format, os.path.normpath(path)) if not m: log.error("unrecognizable file found: %s", path) sys.exit(1) name = m.groupdict()["name"] ext = m.groupdict()["ext"] assert ext not in tests[name] tests[name][ext] = path for name in tests: if "in" not in tests[name]: assert "out" in tests[name] log.error("dangling output case: %s", tests[name]["out"]) sys.exit(1) if not tests: log.error("no cases found") sys.exit(1) log.info("%d cases found", len(tests)) return tests