def generate(source_files): tmpdir = generate_into_tmpdir(source_files) try: if source_files: # Only replace the selftests for the specified files. for f in source_files: newfile = os.path.join(tmpdir, selftest_filename(f)) target = os.path.join(os.curdir, "selftest/auto_selftest", selftest_filename(f)) if os.path.exists(newfile): mkdir_p(os.path.dirname(target)) os.rename(newfile, target) else: sys.stderr.write( "error: '%s' isn't a valid source file.\n" % f) return 1 else: # Replace all selftests, deleting selftests for source files that # no longer exist. target = "%s/selftest/auto_selftest" % os.curdir if os.path.exists(target): shutil.rmtree(target) os.rename(tmpdir, target) finally: shutil.rmtree(tmpdir, ignore_errors=True)
def generate(source_files): tmpdir = generate_into_tmpdir(source_files) try: if source_files: # Only replace the selftests for the specified files. for f in source_files: newfile = os.path.join(tmpdir, selftest_filename(f)) target = os.path.join(os.curdir, "selftest/auto_selftest", selftest_filename(f)) if os.path.exists(newfile): mkdir_p(os.path.dirname(target)) os.rename(os.path.join(tmpdir, selftest_filename(f)), target) else: sys.stderr.write( "error: '%s' isn't a valid source file.\n" % f) return 1 else: # Replace all selftests, deleting selftests for source files that # no longer exist. target = "%s/selftest/auto_selftest" % os.curdir if os.path.exists(target): shutil.rmtree(target) os.rename(tmpdir, target) finally: shutil.rmtree(tmpdir, ignore_errors=True)
def setup_cache(filename=None): """Set up the cache. Typically called by stbt-run before running your test. This is safe to call if lmdb isn't installed; in that case it'll be a no-op. :param str filename: Defaults to $XDG_CACHE_HOME/stbt/cache.lmdb (or $HOME/.cache/stbt/cache.lmdb if XDG_CACHE_HOME isn't set). """ if lmdb is None or os.environ.get('STBT_DISABLE_CACHING'): yield return global _cache global _cache_full_warning if filename is None: filename = default_filename mkdir_p(os.path.dirname(filename) or ".") with lmdb.open(filename, map_size=MAX_CACHE_SIZE_BYTES) as db: # pylint: disable=no-member assert _cache is None try: _cache = db _cache_full_warning = False yield finally: _cache = None
def generate(source_files): tmpdir, modules = generate_into_tmpdir(source_files) target_dir = os.path.join(os.curdir, "selftest/auto_selftest") try: if source_files: # Only replace the selftests for the specified files. for f in modules: newfile = os.path.join(tmpdir, selftest_filename(f.filename)) target = os.path.join(target_dir, selftest_filename(f.filename)) if os.path.exists(newfile): mkdir_p(os.path.dirname(target)) os.rename(newfile, target) else: rm_f(target) if f.error: sys.stderr.write( "error: '%s' isn't a valid source file.\n" % f.filename) return 1 sys.stderr.write( "warning: '%s' doesn't define any selftests.\n" % f.filename) prune_empty_directories(target_dir) else: # Replace all selftests, deleting selftests for source files that # no longer exist. if os.path.exists(target_dir): shutil.rmtree(target_dir) os.rename(tmpdir, target_dir) finally: shutil.rmtree(tmpdir, ignore_errors=True)
def __init__(self, video_generators, video_format): from textwrap import dedent from tempfile import NamedTemporaryFile from subprocess import CalledProcessError, check_output, STDOUT from random import randint self.lighttpd_pid = None self.video_generators = dict(video_generators) video_cache_dir = _gen_video_cache_dir() mkdir_p(video_cache_dir) lighttpd_config_file = NamedTemporaryFile( prefix='stbt-camera-lighttpd-', suffix='.conf', delete=False) pidfile = NamedTemporaryFile(prefix="stbt-camera-lighttpd-", suffix=".pidfile") # This is an awful way to start listening on a random port and not a # great way of tracking the sub-process. port = None while port is None: try: lighttpd_config_file.seek(0) lighttpd_config_file.truncate(0) try_port = randint(10000, 30000) lighttpd_config_file.write( dedent("""\ # This file is generated automatically by stb-tester. # DO NOT EDIT. server.document-root = "%s" server.port = %i server.pid-file = "%s" mimetype.assign = ( ".png" => "image/png", ".mp4" => "video/mp4", ".ts" => "video/MP2T" )""") % (video_cache_dir, try_port, pidfile.name)) lighttpd_config_file.flush() check_output(['lighttpd', '-f', lighttpd_config_file.name], close_fds=True, stderr=STDOUT) port = try_port except CalledProcessError as e: if e.output.find('Address already in use') != -1: pass else: sys.stderr.write("lighttpd failed to start: %s\n" % e.output) raise # lighttpd writes its pidfile out after forking rather than before # causing a race. The real fix is to patch lighttpd to support socket # passing and then open the listening socket ourselves. while os.fstat(pidfile.fileno()).st_size == 0: sleep(0.1) self.lighttpd_pid = int(pidfile.read()) self.base_url = "http://%s:%i/" % (_get_external_ip(), port) self.video_format = video_format
def generate_into_tmpdir(): start_time = time.time() selftest_dir = "%s/selftest" % os.curdir mkdir_p(selftest_dir) # We use this process pool for sandboxing rather than concurrency: pool = multiprocessing.Pool( processes=1, maxtasksperchild=1, initializer=init_worker) tmpdir = tempfile.mkdtemp(dir=selftest_dir, prefix="auto_selftest") try: filenames = [] for module_filename in _recursive_glob('*.py'): if module_filename.startswith('selftest'): continue if not is_valid_python_identifier( os.path.basename(module_filename)[:-3]): continue filenames.append(module_filename) perf_log = [] test_file_count = 0 for module_filename in iterate_with_progress(filenames): outname = os.path.join( tmpdir, re.sub('.py$', '_selftest.py', module_filename)) barename = re.sub('.py$', '_bare.py', outname) mkdir_p(os.path.dirname(outname)) module = pool.apply(inspect_module, (module_filename,)) test_line_count = write_bare_doctest(module, barename) if test_line_count: test_file_count += 1 perf_log.extend(pool.apply_async( update_doctests, (barename, outname)).get(timeout=60 * 60)) os.unlink(barename) if test_file_count > 0: with open('%s/README' % tmpdir, 'w') as f: f.write("\n".join(wrap( "This directory contains self-tests generated by `stbt " "auto-selftest`. Do not modify by hand. Any files " "modified or created in this directory may be overwritten " "or deleted by `stbt auto-selftest`.")) + "\n") for x in _recursive_glob('*.pyc', tmpdir): os.unlink(os.path.join(tmpdir, x)) prune_empty_directories(tmpdir) print_perf_summary(perf_log, time.time() - start_time) return tmpdir except: pool.terminate() pool.join() shutil.rmtree(tmpdir) raise
def generate_into_tmpdir(): start_time = time.time() selftest_dir = "%s/selftest" % os.curdir mkdir_p(selftest_dir) # We use this process pool for sandboxing rather than concurrency: pool = multiprocessing.Pool( processes=1, maxtasksperchild=1, initializer=init_worker) tmpdir = tempfile.mkdtemp(dir=selftest_dir, prefix="auto_selftest") try: filenames = [] for module_filename in _recursive_glob('*.py'): if module_filename.startswith('selftest'): continue if not is_valid_python_identifier( os.path.basename(module_filename)[:-3]): continue filenames.append(module_filename) perf_log = [] test_file_count = 0 for module_filename in iterate_with_progress(filenames): outname = os.path.join( tmpdir, re.sub('.py$', '_selftest.py', module_filename)) barename = re.sub('.py$', '_bare.py', outname) mkdir_p(os.path.dirname(outname)) module = pool.apply(inspect_module, (module_filename,)) test_line_count = write_bare_doctest(module, barename) if test_line_count: test_file_count += 1 perf_log.extend(pool.apply_async( update_doctests, (barename, outname)).get(timeout=60 * 60)) os.unlink(barename) if test_file_count > 0: with open('%s/README' % tmpdir, 'w') as f: f.write("\n".join(wrap( "This directory contains self-tests generated by `stbt " "auto-selftests`. Do not modify by hand. Any files " "modified or created in this directory may be overwritten " "or deleted by `stbt auto-selftests`.")) + "\n") for x in _recursive_glob('*.pyc', tmpdir): os.unlink(os.path.join(tmpdir, x)) prune_empty_directories(tmpdir) print_perf_summary(perf_log, time.time() - start_time) return tmpdir except: pool.terminate() pool.join() shutil.rmtree(tmpdir) raise
def __init__(self, video_generators, video_format): from textwrap import dedent from tempfile import NamedTemporaryFile from subprocess import CalledProcessError, check_output, STDOUT from random import randint self.lighttpd_pid = None self.video_generators = dict(video_generators) video_cache_dir = _gen_video_cache_dir() mkdir_p(video_cache_dir) lighttpd_config_file = NamedTemporaryFile( prefix='stbt-camera-lighttpd-', suffix='.conf', delete=False) pidfile = NamedTemporaryFile( prefix="stbt-camera-lighttpd-", suffix=".pidfile") # This is an awful way to start listening on a random port and not a # great way of tracking the sub-process. port = None while port is None: try: lighttpd_config_file.seek(0) lighttpd_config_file.truncate(0) try_port = randint(10000, 30000) lighttpd_config_file.write(dedent("""\ # This file is generated automatically by stb-tester. # DO NOT EDIT. server.document-root = "%s" server.port = %i server.pid-file = "%s" mimetype.assign = ( ".png" => "image/png", ".mp4" => "video/mp4", ".ts" => "video/MP2T" )""") % (video_cache_dir, try_port, pidfile.name)) lighttpd_config_file.flush() check_output(['lighttpd', '-f', lighttpd_config_file.name], close_fds=True, stderr=STDOUT) port = try_port except CalledProcessError as e: if e.output.find('Address already in use') != -1: pass else: sys.stderr.write("lighttpd failed to start: %s\n" % e.output) raise # lighttpd writes its pidfile out after forking rather than before # casuing a race. The real fix is to patch lighttpd to support socket # passing and then open the listening socket ourselves. while os.fstat(pidfile.fileno()).st_size == 0: sleep(0.1) self.lighttpd_pid = int(pidfile.read()) self.base_url = "http://%s:%i/" % (_get_external_ip(), port) self.video_format = video_format
def generate_into_tmpdir(source_files=None): start_time = time.time() # Importing stbt + gstreamer bindings + opencv is slow. # multiprocessing.Pool uses `fork` so by importing here, these modules # won't be imported from scratch in each subprocess. import stbt # pylint:disable=unused-variable selftest_dir = "%s/selftest" % os.curdir mkdir_p(selftest_dir) # We use this process pool for sandboxing rather than concurrency: pool = multiprocessing.Pool( processes=1, maxtasksperchild=1, initializer=init_worker) tmpdir = tempfile.mkdtemp(dir=selftest_dir, prefix="auto_selftest") modules = [] try: if not source_files: source_files = valid_source_files(_recursive_glob('*.py')) perf_log = [] test_file_count = 0 for module_filename in iterate_with_progress(source_files): outname = os.path.join(tmpdir, selftest_filename(module_filename)) barename = re.sub('.py$', '_bare.py', outname) mkdir_p(os.path.dirname(outname)) module = pool.apply(inspect_module, (module_filename,)) modules.append(module) test_line_count = write_bare_doctest(module, barename) if test_line_count: test_file_count += 1 perf_log.extend(pool.apply_async( update_doctests, (barename, outname)).get(timeout=60 * 60)) os.unlink(barename) if test_file_count > 0: with open('%s/README' % tmpdir, 'w') as f: f.write("\n".join(wrap( "This directory contains self-tests generated by `stbt " "auto-selftest`. Do not modify by hand. Any files " "modified or created in this directory may be overwritten " "or deleted by `stbt auto-selftest`.")) + "\n") for x in _recursive_glob('*.pyc', tmpdir): os.unlink(os.path.join(tmpdir, x)) prune_empty_directories(tmpdir) print_perf_summary(perf_log, time.time() - start_time) return tmpdir, modules except: pool.terminate() pool.join() shutil.rmtree(tmpdir) raise
def setup_dirs(outputdir, tag): mkdir_p(outputdir) rundir = make_rundir(outputdir, tag) symlink_f(rundir, os.path.join(outputdir, "current" + tag)) try: yield os.path.join(outputdir, rundir) finally: # Now test has finished... symlink_f(rundir, os.path.join(outputdir, "latest" + tag))
def cache(filename=None): if os.environ.get('STBT_DISABLE_CACHING'): yield return global _cache global _cache_full_warning if filename is None: cache_home = os.environ.get('XDG_CACHE_HOME') \ or '%s/.cache' % os.environ['HOME'] mkdir_p(cache_home + "/stbt") filename = cache_home + "/stbt/cache.lmdb" with lmdb.open(filename, map_size=MAX_CACHE_SIZE_BYTES) as db: # pylint: disable=no-member assert _cache is None try: _cache = db _cache_full_warning = False yield finally: _cache = None
def main(argv): parser = argparse.ArgumentParser() parser.add_argument("socket", help="shmsrc socket") args = parser.parse_args(argv[1:]) cache_root = (os.environ.get("XDG_CACHE_HOME", None) or os.environ.get("HOME") + '/.cache') default_file = '%s/stbt/camera-video-cache/black.mp4' % cache_root if not os.path.exists(default_file): utils.mkdir_p(os.path.dirname(default_file)) gst_utils.frames_to_video( default_file, [(bytearray([0, 0, 0]) * 1280 * 720, 5 * Gst.SECOND)], 'video/x-raw,format=BGR,width=1280,height=720', 'mp4') default_uri = "file://" + default_file frame_bytes = 1280 * 720 * 3 next_video = [default_uri] def about_to_finish(playbin): playbin.set_property('uri', next_video[0]) next_video[0] = default_uri playbin.set_state(Gst.State.PLAYING) if USE_SHMSRC: pipeline_desc = ( """\ playbin name=pb audio-sink=fakesink uri=%s flags=0x00000791 \ video-sink="videoconvert \ ! video/x-raw,width=1280,height=720,format=RGB ! identity ! \ shmsink wait-for-connection=true shm-size=%i max-lateness=-1 \ qos=false socket-path=%s blocksize=%i sync=true \ buffer-time=100000000" """ % (default_uri, frame_bytes * 1000, args.socket, frame_bytes)) else: pipeline_desc = ( """playbin name=pb audio-sink=fakesink uri=%s flags=0x00000791 \ video-sink="videoconvert ! timeoverlay ! xvimagesink sync=true" """ % default_uri) playbin = Gst.parse_launch(pipeline_desc) playbin.connect("about-to-finish", about_to_finish) runner = gst_utils.PipelineRunner(playbin) gst_thread = threading.Thread(target=runner.run) gst_thread.daemon = True gst_thread.start() playbin.get_state(0) def set_uri(uri): print "=== Setting URI to", uri if uri == 'stop': next_video[0] = default_uri else: next_video[0] = uri playbin.seek( 1.0, Gst.Format.TIME, Gst.SeekFlags.FLUSH | Gst.SeekFlags.KEY_UNIT, Gst.SeekType.END, 0, Gst.SeekType.NONE, 0) while True: uri = sys.stdin.readline() if uri == '': break elif len(uri.strip()) > 0: set_uri(uri.strip())
def main(argv): parser = argparse.ArgumentParser( usage=("\n stbt batch run [options] test.py [test.py ...]" "\n stbt batch run [options] test.py arg [arg ...] -- " "test.py arg [arg ...] [-- ...])")) parser.add_argument( '-1', '--run-once', action="store_true", help=( 'Run once. The default behaviour is to run the test repeatedly as ' 'long as it passes.')) parser.add_argument( '-k', '--keep-going', action="count", help=( 'Continue running after failures. Provide this argument once to ' 'continue running after "uninteresting" failures, and twice to ' 'continue running after any failure (except those that would ' 'prevent any further test from passing).')) parser.add_argument( '-d', '--debug', action="store_true", help=('Enable "stbt-debug" dump of intermediate images.')) parser.add_argument( '-v', '--verbose', action="count", default=0, help=('Verbose. Provide this argument once to print stbt standard ' 'output. Provide this argument twice to also print stbt stderr ' 'output.')) parser.add_argument( '-o', '--output', default=os.curdir, help=('Output directory to save the report and test-run logs under ' '(defaults to the current directory).')) parser.add_argument( '-t', '--tag', help=( 'Tag to add to test-run directory names (useful to differentiate ' 'directories when you intend to merge test results from multiple ' 'machines).')) parser.add_argument( '--shuffle', action="store_true", help=( "Run the test cases in a random order attempting to spend the same " "total amount of time executing each test case.")) parser.add_argument( '--no-html-report', action='store_false', dest='do_html_report', help="""Don't generate an HTML report after each test-run; generating the report can be slow if there are many results in the output directory. You can still generate the HTML reports afterwards with 'stbt batch report'.""") parser.add_argument('--no-save-video', action='store_false', dest='do_save_video', help=""" Don't generate a video recording of each test-run. Use this if you are saving video another way.""") parser.add_argument('test_name', nargs=argparse.REMAINDER) args = parser.parse_args(argv[1:]) if args.tag is not None: tag_suffix = '-' + args.tag else: tag_suffix = "" os.environ['PYTHONUNBUFFERED'] = 'x' term_count = [0] def on_term(_signo, _frame): term_count[0] += 1 if term_count[0] == 1: sys.stderr.write( "\nReceived interrupt; waiting for current test to complete.\n" ) else: sys.stderr.write("Received interrupt; exiting.\n") sys.exit(1) signal.signal(signal.SIGINT, on_term) signal.signal(signal.SIGTERM, on_term) failure_count = 0 last_exit_status = 0 test_cases = parse_test_args(args.test_name) run_count = 0 if args.shuffle: test_generator = shuffle(test_cases, repeat=not args.run_once) else: test_generator = loop_tests(test_cases, repeat=not args.run_once) mkdir_p(args.output) # We assume that all test-cases are in the same git repo: git_info = read_git_info(os.path.dirname(test_cases[0][0])) for test_name, test_args in test_generator: if term_count[0] > 0: break run_count += 1 last_exit_status = run_test(args, tag_suffix, test_name, test_args, git_info) if last_exit_status != 0: failure_count += 1 if os.path.exists("%s/latest%s/unrecoverable-error" % (args.output, tag_suffix)): break if last_exit_status == 0: continue elif last_exit_status >= 2 and args.keep_going > 0: # "Uninteresting" failures due to the test infrastructure continue elif args.keep_going >= 2: continue else: break if run_count == 1: # If we only run a single test a single time propagate the result # through return last_exit_status elif failure_count == 0: return 0 else: return 1
def main(argv): parser = argparse.ArgumentParser() parser.add_argument("socket", help="shmsrc socket") args = parser.parse_args(argv[1:]) cache_root = (os.environ.get("XDG_CACHE_HOME", None) or os.environ.get("HOME") + '/.cache') default_file = '%s/stbt/camera-video-cache/black.mp4' % cache_root if not os.path.exists(default_file): utils.mkdir_p(os.path.dirname(default_file)) gst_utils.frames_to_video( default_file, [(bytearray([0, 0, 0]) * 1280 * 720, 5 * Gst.SECOND)], 'video/x-raw,format=BGR,width=1280,height=720', 'mp4') default_uri = "file://" + default_file frame_bytes = 1280 * 720 * 3 next_video = [default_uri] def about_to_finish(playbin): playbin.set_property('uri', next_video[0]) next_video[0] = default_uri playbin.set_state(Gst.State.PLAYING) if USE_SHMSRC: pipeline_desc = ( """\ playbin name=pb audio-sink=fakesink uri=%s flags=0x00000791 \ video-sink="videoconvert \ ! video/x-raw,width=1280,height=720,format=RGB ! identity ! \ shmsink wait-for-connection=true shm-size=%i max-lateness=-1 \ qos=false socket-path=%s blocksize=%i sync=true \ buffer-time=100000000" """ % (default_uri, frame_bytes * 1000, args.socket, frame_bytes)) else: pipeline_desc = ( """playbin name=pb audio-sink=fakesink uri=%s flags=0x00000791 \ video-sink="videoconvert ! timeoverlay ! xvimagesink sync=true" """ % default_uri) playbin = Gst.parse_launch(pipeline_desc) playbin.connect("about-to-finish", about_to_finish) runner = gst_utils.PipelineRunner(playbin) gst_thread = threading.Thread(target=runner.run) gst_thread.daemon = True gst_thread.start() playbin.get_state(0) def set_uri(uri): print "=== Setting URI to", uri if uri == 'stop': next_video[0] = default_uri else: next_video[0] = uri playbin.seek(1.0, Gst.Format.TIME, Gst.SeekFlags.FLUSH | Gst.SeekFlags.KEY_UNIT, Gst.SeekType.END, 0, Gst.SeekType.NONE, 0) while True: uri = sys.stdin.readline() if uri == '': break elif len(uri.strip()) > 0: set_uri(uri.strip())