def compile(self, a, terminal=False): try: file = open(a, 'r', encoding="utf-8").read() file = file.replace("</main>", "") run(file, terminal) except FileNotFoundError: print(f"File not Found: {a} does not exists")
def do_read_expr(crashDev, canRead): ''' Assemble -> crash log or data device -> try to read. crashDev :: str - crash device. canRead :: bool - True if read must success, or False. ''' print_line() print 'TEST_INIT read_test', crashDev, canRead run(BIN + 'wdevc format-ldev %s %s > /dev/null' % (LDEV, DDEV)) run(BIN + 'wdevc create-wdev %s %s -n %s > /dev/null' % (LDEV, DDEV, WDEV_NAME)) ret = read_first_block(WDEV) if not ret: raise Exception('TEST_FAILURE read_test read must success') run(BIN + 'crashblkc io-error %s r' % crashDev) ret = read_first_block(WDEV) if ret != canRead: raise Exception('TEST_FAILURE read_test read must %s' % ('success' if canRead else 'fail')) run(BIN + 'crashblkc recover %s' % crashDev) ret = read_first_block(WDEV) if not ret: raise Exception('TEST_FAILURE read_test read must success') run(BIN + 'wdevc delete-wdev -f %s' % WDEV) print 'TEST_SUCCESS read_test', crashDev, canRead print_line()
def prepare_bdev(devPath, devFile): # ex. /dev/loop0 # ex. ldev64M.0 if USE_LOOP_DEV == 1: run("losetup %s %s" % (devPath, devFile)) else: run("dd oflag=direct if=%s of=%s bs=1M" % (devFile, devPath))
def main( config_path: Optional[str]=None, config_section_name: Optional[str]=None, ) -> int: {%- elif cookiecutter.command_line_interface|lower == 'argparse' %} def main() -> int: parser = get_arguments_parser() args = parser.parse_args() {% if 'no' not in cookiecutter.config_file_format|lower -%} config_path = args.config_path config_section_name = args.config_section_name {%- endif %} {%- endif %} {% if 'no' in cookiecutter.config_file_format|lower -%} config = try_to_load_config() {% else %} config = try_to_load_config(config_path, config_section_name) {%- endif %} setup_logging(config['logging']) try: LOG.info("{{ cookiecutter.executable_name }} started") run() except Exception: LOG.exception("{{ cookiecutter.executable_name }} failed") exit_code = -1 else: LOG.info("{{ cookiecutter.executable_name }} ended") exit_code = 0 finally: return exit_code
def do_write_read_expr(): ''' (1) Make the data device read-error state. (2) Write a block. (3) Wait for the block data will be evicted from its pending data. (4) Try to read the block and verify its failure. ''' print_line() print 'TEST_INIT write_read_test' run(BIN + 'wdevc format-ldev %s %s > /dev/null' % (LDEV, DDEV)) run(BIN + 'wdevc create-wdev %s %s -n %s > /dev/null' % (LDEV, DDEV, WDEV_NAME)) run(BIN + 'crashblkc io-error %s r' % DDEV) write_first_block(WDEV) wait_for_written(WDEV_NAME) ret = read_first_block(WDEV) if ret: raise Exception('TEST_FAILURE write_read_test read must fail') run(BIN + 'crashblkc recover %s' % DDEV) run(BIN + 'wdevc delete-wdev -f %s' % WDEV) print 'TEST_SUCCESS write_read_test' print_line()
def test_invalid_server_call(self): loop = asyncio.get_event_loop() with pytest.raises(Exception) as excinfo1: run(server_call(None, None), loop=loop) with pytest.raises(Exception) as excinfo2: run(server_call('GetVersion', None), loop=loop) assert 'method' in str(excinfo1.value) assert 'server' in str(excinfo2.value)
def write_first_block(devPath): ''' devPath :: str return :: bool ''' try: run('sudo dd if=/dev/zero of=%s oflag=direct bs=4096 count=1' % devPath) return True except: return False
def log_diff_equality_test(): print "#################### Log/diff equality test ####################" for i in xrange(1, 5): make_zero_image(0, 1, 2) run(BIN + ("/wlog-redo -z ddev32M.0 < {}.wlog".format(i))) run(BIN + ("/wdiff-redo -z ddev32M.1 -i {}.s.wdiff".format(i))) run(BIN + "/bdiff -b 512 ddev32M.0 ddev32M.1") check_result("log/diff equality test {}th wlog/s.wdiff.".format(i)) run(BIN + ("/wdiff-redo -z ddev32M.2 -i {}.i.wdiff".format(i))) run(BIN + "/bdiff -b 512 ddev32M.0 ddev32M.2") check_result("log/diff equality test {}th wlog/i.wdiff.".format(i))
def run_two(cfg: configuration.Configuration): print(cfg) movie = Movie() draw_cfg(movie, cfg) cheerios, result = run(cfg) cfg.rolling = False cheerios2, result = run(cfg) to_draw = [[(circle.Circle(cheerio, cfg.cheerio_radius), "green"), (circle.Circle(cheerio2, cfg.cheerio_radius), "blue")] for cheerio, cheerio2 in zip(cheerios, cheerios2)] movie.run_animation(to_draw, 0) exit(0)
def set_motors(self, left, right, speed=None): if self.is_mocked(): self.stop(reset_motors=False) if speed == None: speed = self.saved_speed self.odometry.speed = speed self._L = left self._R = right self._d = float(max(abs(left), abs(right))) delay = 0.1 self._start_motors = time.time() + delay self.send('SetMotor LWheelDist %i RWheelDist %i Speed %i' % (left, right, speed), delay=delay) def resetLR(): #debug('Finished Movement') if self.is_mocked(): self.stop(reset_motors=False) elif hasattr(self, '_resetLR_timer'): self._resetLR_timer.cancel() self._resetLR_timer = run(resetLR, delay=max(self.time_to_complete() - delay, 0))
def test_session_expired(self, async_populated_api): credentials = async_populated_api.credentials credentials.password = PASSWORD credentials.session_id = 'abc123' test_api = API.from_credentials(credentials, loop=async_populated_api.loop) users = run(test_api.get_async('User'), loop=async_populated_api.loop) assert len(users) >= 1
def main(): """ Command line interface for package, if desired. If not, remove the `entry_points...` line from `setup.py` """ PARSER = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter ) # parse arguments from command line ARGS = PARSER.parse_args() # validate command line arguments # import packages after parsing to speed up command line responsiveness validated_args = validate(ARGS) # run genome-interval-similarity function import run run(**validated_args)
def consolidation_test2(): print "##################### Consolidation test ####################" run("cp ddev32M ddev32M.0") for i in xrange(1, 5): run(BIN + ("/wdiff-redo -z ddev32M.0 < %d.wdiff" % i)) run(BIN + "/virt-full-cat -stat -i ddev32M -o ddev32M.1 -d 1.wdiff 2.wdiff 3.wdiff 4.wdiff") run(BIN + "/bdiff -b 512 ddev32M.0 ddev32M.1") check_result("consolidation test 3.")
def prepare_test(): # Generate wlog/wdiff files for test. print "#################### Generate wlog/wdiff files for test ####################" run("dd if=/dev/urandom of=./ddev32M bs=1048576 count=32") for i in xrange(1, 5): #BIN/wlog-gen --nodiscard -s 32M -o ${i}.wlog run(BIN + ( "/wlog-gen -s 32M -z 32M --minDiscardSize 512 --maxDiscardSize 1M -o {}.wlog" .format(i))) run(BIN + ("/wlog-to-wdiff -o {}.s.wdiff -i {}.wlog".format(i, i))) run(BIN + ("/wlog-to-wdiff -indexed -o {}.i.wdiff -i {}.wlog".format(i, i)))
def run_thread(token, mab_name, policy_name, file_id): np.random.seed(file_id+token) res = 1000 reward_function = np.sin(np.linspace(0, 20, res)) * (np.linspace(0, 20, res) - 20) ** 2 reward_function = (reward_function - reward_function.min()) / ( reward_function.max() * 1.5 - reward_function.min()) + 0.11 mab = {} mab['gaussian'] = GaussianMAB([(mu, 0.1) for mu in np.arange(1, 11) * 0.06 + 0.2]) mab['nonstationary_trunc_gauss'] = NonStationaryTruncatedGaussianMAB( [(mu, 0.1) for mu in np.arange(1, 11) * 0.06 + 0.2]) mab['gaussian_lipschitz'] = GaussianMAB(list(zip(reward_function, [0.1] * len(reward_function)))) mab['nonstationary_lipschitz'] = NonStationaryTruncatedGaussianMAB(list(zip(reward_function, [0.1] * len(reward_function)))) mab['correlated'] = CorrelatedNonstationaryGaussianMAB([0.9] * 100, alpha=0.6, forgetting_rate=0.001) alpha = lambda n: 1 / n policies = {'zooming3': {'policy': adaptive_zooming3, 'label': 'Adaptive Zooming - 3rd attempt', 'data': [], 'params': {'alpha': np.concatenate((np.arange(1,10)*0.1, np.arange(1,10), np.arange(1,10)*10)), 'beta': np.concatenate((np.arange(1,10)*0.1, np.arange(1,10), np.arange(1,10)*10)), 'active_set': []}}, 'eps': {'policy': epsilonGreedy, 'label': '$\epsilon$-greedy', 'data': [], 'params': {'epsilon': np.concatenate(([0], np.arange(1,10)*0.01, np.arange(1,10)*0.01+0.1)), 'alpha': alpha}}, 'exp3': {'policy': exp3, 'label': 'Exp3', 'data': [], 'params': {'eta': np.linspace(0,1,41), 'epsilon': np.linspace(0,1,41)}}, 'exp3_loss_est': {'policy': exp3_loss_est, 'label': 'Exp3_loss_est', 'data': [], 'params': {'eta': np.linspace(0,1,41), 'epsilon': np.linspace(0,1,41)}}, 'exp3_s': {'policy': exp3_s, 'label': 'Exp3_S', 'data': [], 'params': {'eta': np.linspace(0,1,41), 'beta': np.linspace(0,0.5,11), 'epsilon': np.linspace(0,1,41)}}, 'zooming3_a': {'policy': adaptive_zooming3, 'label': 'Adaptive Zooming', 'data': [], 'params': {'alpha': np.linspace(0,1000,41), 'beta': np.linspace(0,100,41), 'active_set': []}}, 'zooming3_b': {'policy': adaptive_zooming3, 'label': 'Adaptive Zooming', 'data': [], 'params': {'alpha': np.linspace(0,1000,41), 'beta': np.linspace(0,5,41), 'active_set': []}}, } steps = 1000 for _ in range(2000): # sample from hyperparameters randomly params = {k: (np.random.choice(v) if isinstance(v, Iterable) and len(v) > 0 else v) for k, v in policies[policy_name]['params'].items()} result = run(mab[mab_name], policy=policies[policy_name]['policy'], steps=steps, **params) output = {'mab': mab_name, 'name': policy_name, 'result': result[-1, 2], 'result2': np.sum(np.cumsum(result[:, 1]))/steps} output.update(params) print(output) with open(path + "mab_hyperopt_{}_{}.json".format(file_id, token), "a") as f: json.dump(output, f, sort_keys=True, default=json_parse_objects) f.write("\n")
def full_image_test(): print "#################### Full image test ####################" run(BIN + "/wdiff-full < ddev32M > 0.wdiff") make_zero_image(0) run(BIN + "/wdiff-redo -z ddev32M.0 < 0.wdiff") run(BIN + "/bdiff -b 512 ddev32M ddev32M.0") check_result("full image test")
def test_add_edit_remove(self, async_populated_api_entity): def get_trailer(): trailers = run(async_populated_api_entity.get_async('Trailer', name=TRAILER_NAME), loop=async_populated_api_entity.loop) assert len(trailers) == 1 assert len(trailers[0]) == 1 return trailers[0][0] user = async_populated_api_entity.get('User', name=USERNAME)[0] trailer = { 'name': TRAILER_NAME, 'groups': user['companyGroups'] } trailer_id = run(async_populated_api_entity.add_async('Trailer', trailer), loop=async_populated_api_entity.loop) assert len(trailer_id) == 1 trailer['id'] = trailer_id[0] trailer = get_trailer() assert trailer['name'] == TRAILER_NAME comment = 'some comment' trailer['comment'] = comment run(async_populated_api_entity.set_async('Trailer', trailer), loop=async_populated_api_entity.loop) trailer = get_trailer() assert trailer['comment'] == comment run(async_populated_api_entity.remove_async('Trailer', trailer), loop=async_populated_api_entity.loop) trailers = run(async_populated_api_entity.get_async('Trailer', name=TRAILER_NAME), loop=async_populated_api_entity.loop) assert len(trailers) == 1 assert len(trailers[0]) == 0
def log_diff_equality_test(): print "#################### Log/diff equality test ####################" for i in xrange(1, 5): make_zero_image(0, 1) run(BIN + ("/wlog-redo -z ddev32M.0 < %d.wlog" % i)) run(BIN + ("/wdiff-redo -z ddev32M.1 < %d.wdiff" % i)) run(BIN + "/bdiff -b 512 ddev32M.0 ddev32M.1") check_result("log/diff equality test %dth wlog/wdiff." % i)
def format_ldev(): run("dd if=/dev/zero of=%s bs=1M count=128" % LDEV) run("dd if=/dev/zero of=%s bs=1M count=64" % DDEV_0) prepare_bdev(LOOP0, LDEV) prepare_bdev(LOOP1, DDEV_0) run("%s format-ldev %s %s" % (CTL2, LOOP0, LOOP1)) #RING_BUFFER_SIZE=$(${BIN}/wldev-info $LOOP0 |grep ringBufferSize |awk '{print $2}') run("%s/wldev-info %s > %s" % (BIN, LOOP0, TMP_FILE)) v = getKeyValue(TMP_FILE, "ringBufferSize", 1) global RING_BUFFER_SIZE RING_BUFFER_SIZE = int(v) print "RING_BUFFER_SIZE=", RING_BUFFER_SIZE time.sleep(1) finalize_bdev(LOOP0, LDEV) finalize_bdev(LOOP1, DDEV_0)
def prepare_test(): # Generate wlog/wdiff files for test. print "#################### Generate wlog/wdiff files for test ####################" run("dd if=/dev/urandom of=./ddev32M bs=1048576 count=32") for i in xrange(1, 5): #BIN/wlog-gen --nodiscard -s 32M -o ${i}.wlog run(BIN + ("/wlog-gen -s 32M -z 32M --minDiscardSize 512 --maxDiscardSize 1M -o %d.wlog" % i)) run(BIN + ("/wlog-to-wdiff > %d.wdiff < %d.wlog" % (i, i)))
def test_func(self): """ Usage: export CUDA_VISIBLE_DEVICES=0,1 export PORT=6006 export TIME_STR=1 export PYTHONPATH=./submodule python detectron2_exp/tests/run_detectron2.py \ --config ./detectron2_exp/configs/detectron2.yaml \ --command train_scratch_mask_rcnn_dense_R_50_FPN_3x_gn_2gpu \ --outdir results/Detectron2/train_scratch_mask_rcnn_dense_R_50_FPN_3x_gn_2gpu :return: """ if 'CUDA_VISIBLE_DEVICES' not in os.environ: os.environ['CUDA_VISIBLE_DEVICES'] = '0' if 'PORT' not in os.environ: os.environ['PORT'] = '6006' if 'TIME_STR' not in os.environ: os.environ['TIME_STR'] = '0' if utils.is_debugging() else '1' # func name assert sys._getframe().f_code.co_name.startswith('test_') command = sys._getframe().f_code.co_name[5:] class_name = self.__class__.__name__[7:] \ if self.__class__.__name__.startswith('Testing') else self.__class__.__name__ class_name = class_name.strip('_') outdir = f'results/{class_name}/{command}' argv_str = f""" --config domain_adaptive_faster_rcnn_pytorch_exp/configs/domain_faster_rcnn.yaml --command {command} --outdir {outdir} --overwrite_opts False """ import run run(argv_str)
def create_heat_map(times, file_name, uniquely=False): stones, cheerio, nest, path = init_stones(file_name) print("Ants:", path.length()) h = HeatMap() sum = 0 for i in range(times): cheerios, result = run(cheerio, nest, stones, max_frames, verbose=False) sum += MotionPath(cheerios).length() h.add_to_map(cheerios, uniquely) if i % 100 == 0 and i: print(i) print("Simulation:", sum / times) if not file_name: file_name = "tmp" h.save_map("data/" + file_name + "_heatmap.txt")
def test_multi_call(self, async_populated_api): calls = [ ['Get', dict(typeName='User', search=dict(name='{0}'.format(USERNAME)))], ['GetVersion'] ] results = run(async_populated_api.multi_call_async(calls), loop=async_populated_api.loop) assert len(results) == 1 results = results[0] assert len(results) == 2 assert results[0] is not None assert len(results[0]) == 1 assert results[0][0]['name'] == USERNAME assert results[1] is not None version_split = results[1].split('.') assert len(version_split) == 4
def test_resume(run_trainings_not_dry: str, cfg_all_not_dry: DictConfig, tmp_path_factory: TempPathFactory) -> None: old_checkpoint_path = get_checkpoint_path(run_trainings_not_dry) new_cfg = OmegaConf.create(cfg_all_not_dry) new_storage_dir = tmp_path_factory.mktemp("resumed_training") new_cfg.core.storage_dir = str(new_storage_dir) new_cfg.train.trainer.max_steps = 2 * TRAIN_MAX_NSTEPS new_cfg.train.restore.ckpt_or_run_path = str(old_checkpoint_path) new_cfg.train.restore.mode = "hotstart" new_training_dir = run(new_cfg) old_checkpoint = NNCheckpointIO.load(path=old_checkpoint_path) new_checkpoint = load_checkpoint(new_training_dir) assert old_checkpoint["run_path"] != new_checkpoint["run_path"] assert old_checkpoint["global_step"] * 2 == new_checkpoint["global_step"] assert new_checkpoint["epoch"] == 2
def Initialization(): global endLsid0 global nPacks0 global totalPadding0 format_ldev() #run("%s/wlog-gen -s 64M -z 16M --maxPackSize 4M -o %s" % (BIN, WLOG_0)) run("%s/wlog-gen -s 64M -z 32M --minIoSize 512 --maxIoSize 1M --minDiscardSize 1M --maxDiscardSize 64M --maxPackSize 4M -o %s" % (BIN, WLOG_0)) #${BIN}/wlog-gen -s 64M -z 16M --minIoSize 512 --maxIoSize 512 --maxPackSize 1M -o WLOG_0 endLsid0 = echo_wlog_value(WLOG_0, "reallyEndLsid") nPacks0 = echo_wlog_value(WLOG_0, "nrPacks") totalPadding0 = echo_wlog_value(WLOG_0, "paddingPb") shutil.copyfile(DDEV_0, DDEV_0z) run("%s/wlog-redo %s < %s" % (BIN, DDEV_0, WLOG_0)) run("%s/wlog-redo %s -z < %s" % (BIN, DDEV_0z, WLOG_0))
def on_button_click(self): self.filename = self.entry_file.get() self.clear_canvas() if (os.path.isfile(self.filename)): query_wavsound = wavsound(self.filename) self.dbroot = self.entry_db.get() samples = self.samples partition = int(len(query_wavsound.get_data()) / self.samplelength) max_split = self.max_split # repository query time start_time = time.time() result_lst = run(self.filename, self.samplelength, samples, self.dbroot, max_split) # output output = "Search Result: \n" # Tabulate % match (wav files with 0% match are excluded from the result) for pair in result_lst: output += pair[0] + " : " + ( 40 - len(pair[0])) * " " + pair[1] + "% match" + "\n" # Show search time timelapse_parallel = time.time() - start_time output = output + str(timelapse_parallel) + "seconds" self.text_result.insert('1.0', output + "\n") self.draw_wavform(query_wavsound.get_data(), "cyan", "query") top_match_wavsoundfile = output.split()[2] print(output.split()) print(top_match_wavsoundfile) top_match_wavsound = wavsound(top_match_wavsoundfile) self.draw_wavform(top_match_wavsound.get_data(), "white", "result")
def on_button_click(self): self.filename = self.entry_file.get() self.clear_canvas() if (os.path.isfile(self.filename)): query_wavsound = wavsound(self.filename) self.dbroot = self.entry_db.get() samples = self.samples partition = int(len(query_wavsound.get_data())/self.samplelength) max_split = self.max_split # repository query time start_time = time.time() result_lst = run(self.filename, self.samplelength, samples, self.dbroot, max_split) # output output = "Search Result: \n" # Tabulate % match (wav files with 0% match are excluded from the result) for pair in result_lst: output += pair[0] + " : " + (40-len(pair[0]))*" " + pair[1] + "% match" + "\n" # Show search time timelapse_parallel = time.time() - start_time output = output + str(timelapse_parallel) + "seconds" self.text_result.insert('1.0', output + "\n" ) self.draw_wavform(query_wavsound.get_data(),"cyan","query") top_match_wavsoundfile = output.split()[2] print( output.split()) print(top_match_wavsoundfile) top_match_wavsound = wavsound(top_match_wavsoundfile) self.draw_wavform(top_match_wavsound.get_data(),"white","result")
def experiment(cfg: configuration.Configuration, times, randomize_stones): count = 0 count_ok = 0 total_time = 0 total_path_length = 0 while count < times: if randomize_stones: cfg.rerandom_stones() cfg.reset_runseed(None) cheerios, result = run(cfg, verbose=False) if result != -1: count += 1 if result > 0: count_ok += 1 total_time += result else: total_time += cfg.max_frames total_path_length += MotionPath(cheerios).length() avg_time = int(total_time / times) success_percent = int(count_ok * 100.0 / times) avg_length = int(1000 * total_path_length / times) ants_length = int(1000 * cfg.path.length()) print("{} finished = {}% time={} antslength={} length={}".format( cfg, success_percent, avg_time, ants_length, avg_length))
def max_io_blocks_test(): print "#################### MaxIoBlocks test #################### " make_zero_image(0, 1, 2) for i in xrange(1, 5): run(BIN + ("/wlog-to-wdiff -x 4K < %d.wlog > %d-4K.wdiff" % (i, i))) run(BIN + ("/wlog-to-wdiff -x 16K < %d.wlog > %d-16K.wdiff" % (i, i))) run(BIN + ("/wdiff-redo -z ddev32M.0 < %d.wdiff" % i)) run(BIN + ("/wdiff-redo -z ddev32M.1 < %d-4K.wdiff" % i)) run(BIN + ("/wdiff-redo -z ddev32M.2 < %d-16K.wdiff" % i)) run(BIN + "/bdiff -b 512 ddev32M.0 ddev32M.1") check_result("maxIoBlocks test %dth wdiff 4K" % i) run(BIN + "/bdiff -b 512 ddev32M.0 ddev32M.2") check_result("maxIoBlocks test %dth wdiff 16K" % i)
def do_write_expr(crashDev, mode, isOverlap, lostPct, reorder): ''' This test will check checkpointing and redo functionalities of walb devices will work well. crashDev :: str - crash device. mode :: str - 'crash' or 'write-error' or 'rw-error'. isOverlap :: bool - True to run overlap test. lostPct :: int - lost percentage in crash. [0,100]. reorder :: bool - True if you want to reorder IOs. ''' print_line() print 'TEST_INIT write_test', crashDev, mode, isOverlap, lostPct, reorder for bdev in [LDEV, DDEV]: run(BIN + 'crashblkc set-reorder %s %d' % (bdev, 1 if reorder else 0)) run(BIN + 'crashblkc set-lost-pct %s %d' % (crashDev, lostPct)) run(BIN + 'wdevc format-ldev %s %s > /dev/null' % (LDEV, DDEV)) run(BIN + 'wdevc create-wdev %s %s -n %d > /dev/null' % (LDEV, DDEV, WDEV_NAME)) #run(BIN + 'wdevc set-checkpoint-interval %s %d' % (WDEV, 1000)) if isOverlap: opt = '-ol' else: opt = '-nr %d' % (NR_THREADS_REORDER if reorder else NR_THREADS) proc = run_async(BIN + 'crash-test write %s -to %d -bs %s -ii %d -fi %d %s > %s' % (opt, CRASHTEST_TIMEOUT_S, BLOCK_SIZE, IO_INTERVAL_MS, FLUSH_INTERVAL_MS, WDEV, 'write.log')) print 'sleep %d sec...' % CRASHTEST_RUNNING_S time.sleep(CRASHTEST_RUNNING_S) if mode == 'crash': run(BIN + 'crashblkc crash %s' % crashDev) else: run(BIN + 'crashblkc io-error %s %s' % (crashDev, ('w' if mode == 'write-error' else 'rw'))) if not proc.wait(): raise EnvironmentError(proc.args) run(BIN + 'wdevc delete-wdev -f %s' % WDEV) run(BIN + 'crashblkc recover %s' % crashDev) run(BIN + 'wdevc create-wdev %s %s > /dev/null' % (LDEV, DDEV)) run(BIN + 'crash-test read %s -bs %s %s > read.log' % (opt, BLOCK_SIZE, WDEV)) run(BIN + 'wdevc delete-wdev -f %s' % WDEV) if isOverlap: opt = '-ol' else: opt = '' proc = run_async(BIN + 'crash-test verify %s %s %s' % (opt, 'write.log', 'read.log')) if not proc.wait(): raise Exception('TEST_FAILURE write_test', crashDev, mode, isOverlap, lostPct, reorder) print 'TEST_SUCCESS write_test', crashDev, mode, isOverlap, lostPct, reorder print_line()
import os import sys from run import * if __name__ == "__main__": pa = sys.argv[1] os.system('rm ../course/test') s = 'ln -s '+os.path.abspath('.')+"/"+pa + " "+os.path.abspath('../course/test') os.system(s) truehash = run(pa) os.chdir('../course') res = os.popen('./run').read() ll = res.split('\n') myhash = [] for l in ll: if l[:8] == "Checksum": myhash.append(l[10:]) elif l[:14] == "Response time:": print "time: ", l[15:] print myhash == truehash
def consolidation_test1(): print "##################### Consolidation test ####################" make_zero_image(0, 1, 2, 3, 4) for i in xrange(1, 5): run(BIN + ("/wlog-redo -z ddev32M.0 < {}.wlog".format(i))) run(BIN + ("/wdiff-redo -z ddev32M.1 -i {}.s.wdiff".format(i))) run(BIN + ("/wdiff-redo -z ddev32M.2 -i {}.i.wdiff".format(i))) run(BIN + "/wdiff-merge -stat -x 16K -i {} -o all.s.wdiff".format(' '.join( '{}.s.wdiff'.format(i) for i in xrange(1, 5)))) run(BIN + "/wdiff-redo -z ddev32M.3 -i all.s.wdiff") run(BIN + "/wdiff-merge -stat -x 16K -i {} -o all.i.wdiff".format(' '.join( '{}.i.wdiff'.format(i) for i in xrange(1, 5)))) run(BIN + "/wdiff-redo -z ddev32M.4 -i all.i.wdiff") run("sha1sum {}".format(' '.join('ddev32M.{}'.format(i) for i in xrange(0, 5)))) for i in xrange(1, 5): run(BIN + "/bdiff -b 512 ddev32M.0 ddev32M.{}".format(i)) check_result("consolidation test 1-{}.".format(i))
def consolidation_test1(): print "##################### Consolidation test ####################" make_zero_image(0, 1, 2) for i in xrange(1, 5): run(BIN + ("/wdiff-redo -z ddev32M.0 < %d.wdiff" % i)) run(BIN + ("/wlog-redo -z ddev32M.1 < %d.wlog" % i)) run(BIN + "/wdiff-merge -stat -x 16K -i 1.wdiff 2.wdiff 3.wdiff 4.wdiff -o all.wdiff") run(BIN + "/wdiff-redo -z ddev32M.2 < all.wdiff") run("sha1sum ddev32M.0 ddev32M.1 ddev32M.2") run(BIN + "/bdiff -b 512 ddev32M.0 ddev32M.1") check_result("consolidation test 1a.") run(BIN + "/bdiff -b 512 ddev32M.0 ddev32M.2") check_result("consolidation test 1b.")
"/beegfs/lfi.mipt.su/scratch/MadGraph/HH_bbWW/HH_bbWW_samples_2/delphes_{0}.root" .format(i), "events": "/beegfs/lfi.mipt.su/scratch/MadGraph/HH_bbWW/HH_bbWW_samples_2/GF_HH_{0}_cmsgrid_final_decayed.lhe" .format(i) } if __name__ == "__main__": try: sm = [] bsm = [] bsm2 = [] for i in range(1, 3): sm.append(run(SM(i))) bsm.append(run(BSM(i))) bsm2.append(run(BSM2(i))) print() for i in range(3, 13): # sm.append(run(SM(i))) bsm.append(run(BSM(i))) bsm2.append(run(BSM2(i))) print() m = lambda x: x["Non-resonant criteria"] print("SM: ", list(map(m, sm))) print("BSM: ", list(map(m, bsm))) print("BSM2: ", list(map(m, bsm2)))
def finalize_bdev(devPath, devFile): if USE_LOOP_DEV == 1: run("losetup -d %s" % devPath) else: run("dd oflag=direct if=%s of=%s bs=1M" % (devPath, devFile))
from os import environ from datetime import datetime, timedelta from notify import Notify import config import run with open('.env') as file_: for line in file_: variable = [x.strip() for x in line.split('=')] environ['{0}'.format(variable[0])] = '{0}'.format(variable[1]) config.HOUR = datetime.utcnow() + timedelta(hours=11) port = int(environ.get('PORT', 5000)) run() debug=True
def restore_test(testId, lsidDiff, invalidLsid): run("dd if=/dev/zero of=%s bs=1M count=64" % DDEV_1) run("dd if=/dev/zero of=%s bs=1M count=64" % DDEV_1z) run("dd if=/dev/zero of=%s bs=1M count=64" % DDEV_2) run("dd if=/dev/zero of=%s bs=1M count=64" % DDEV_3) run("%s/wlog-restore %s --verify -d %d -i %d < %s" % (BIN, LDEV, lsidDiff, invalidLsid, WLOG_0)) run("%s/wlog-cat %s -v -o %s" % (BIN, LDEV, WLOG_1)) prepare_bdev(LOOP0, LDEV) run("%s/wlog-cat -noaio -v %s -o %s" % (BIN, LOOP0, WLOG_2)) time.sleep(1) finalize_bdev(LOOP0, LDEV) if invalidLsid == 0xffffffffffffffff: endLsid0a = endLsid0 + lsidDiff - nPacks0 - totalPadding0 endLsid1 = echo_wlog_value(WLOG_1, "reallyEndLsid") endLsid2 = echo_wlog_value(WLOG_2, "reallyEndLsid") nPacks1 = echo_wlog_value(WLOG_1, "nrPacks") nPacks2 = echo_wlog_value(WLOG_2, "nrPacks") totalPadding1 = echo_wlog_value(WLOG_1, "paddingPb") totalPadding2 = echo_wlog_value(WLOG_2, "paddingPb") endLsid1a = endLsid1 - nPacks1 - totalPadding1 endLsid2a = endLsid2 - nPacks2 - totalPadding2 if endLsid0a != endLsid1a: print "endLsid0a", endLsid0a, "does not equal to endLsid1a", endLsid1a print "TEST" + str(testId) + "_FAILURE" exit(1) if endLsid0a != endLsid2a: print "endLsid0a", endLsid0a, " does not equal to endLsid2a", endLsid2a print "TEST" + str(testId) + "_FAILURE" exit(1) normalLb1 = echo_wlog_value(WLOG_1, 'normalLb') normalLb2 = echo_wlog_value(WLOG_2, 'normalLb') if normalLb1 != normalLb2: print "normalLb1", normalLb1, "and", "normalLb2", normalLb2, "differ." print "TEST" + str(testId) + "_FAILURE" discardLb1 = echo_wlog_value(WLOG_1, 'discardLb') discardLb2 = echo_wlog_value(WLOG_2, 'discardLb') if discardLb1 != discardLb2: print "discardLb1", discardLb1, "and", "discardLb2", discardLb2, "differ." print "TEST" + str(testId) + "_FAILURE" run("%s/bdiff -b 512 %s %s" % (BIN, WLOG_1, WLOG_2)) run("%s/wlog-redo %s < %s" % (BIN, DDEV_1, WLOG_1)) run("%s/wlog-redo %s -z < %s" % (BIN, DDEV_1z, WLOG_1)) prepare_bdev(LOOP1, DDEV_2) run("%s/wlog-redo -noaio %s < %s" % (BIN, LOOP1, WLOG_1)) time.sleep(1) finalize_bdev(LOOP1, DDEV_2) time.sleep(1) prepare_bdev(LOOP0, LDEV) prepare_bdev(LOOP1, DDEV_3) run("%s/wdev-redo %s %s" % (BIN, LOOP0, LOOP1)) time.sleep(1) finalize_bdev(LOOP0, LDEV) finalize_bdev(LOOP1, DDEV_3) if invalidLsid == 0xffffffffffffffff: run("%s/bdiff -b 512 %s %s" % (BIN, DDEV_0, DDEV_1)) run("%s/bdiff -b 512 %s %s" % (BIN, DDEV_0, DDEV_2)) run("%s/bdiff -b 512 %s %s" % (BIN, DDEV_0, DDEV_3)) run("%s/bdiff -b 512 %s %s" % (BIN, DDEV_0z, DDEV_1z)) else: run("%s/bdiff -b 512 %s %s" % (BIN, DDEV_1, DDEV_2)) run("%s/bdiff -b 512 %s %s" % (BIN, DDEV_1, DDEV_3))
def SimpleTest(): run("%s/wlog-restore --verify %s < %s" % (BIN, LDEV, WLOG_0)) run("%s/wlog-cat %s -v -o %s" % (BIN, LDEV, WLOG_1)) run("%s/bdiff -b 512 %s %s" % (BIN, WLOG_0, WLOG_1)) run("%s/wlog-show -pack -stat %s > %s" % (BIN, WLOG_0, TMP_FILE0)) run("%s/wldev-show -pack -stat %s > %s" % (BIN, LDEV, TMP_FILE1)) run("/usr/bin/diff %s %s" % (TMP_FILE0, TMP_FILE1))
from run import * run(fi='D31.dat',sep=' ')
while (good_file == 0): query = input("Submit .wav file to search against repository (Example: button.wav): ") if (os.path.isfile(query)): good_file = 1 query_wavsound = wavsound(query) print("\n**Higher number of partitions increases false positive rates, \nwhile lower number of partitions increases false negative rates\n") samplelength = input("Set word size (sample length) (5 ~ 100) : "); samples = input("Set number of samples (n) of partitions from 1 to " + str(int(len(query_wavsound.get_data())/float(samplelength))) + ": ") # repository look up directory dbdir = input("Enter repository directory to search (example: 'db') : ") max_split = int(input("Set maximum allowable number of split repositories : ")) # repository query time start_time = time.time() result_lst = run(query, int(samplelength), samples, dbdir, max_split) # output output = "Search Result: \n" # Tabulate % match (wav files with 0% match are excluded from the result) for pair in result_lst: output += pair[0] + " : " + (40-len(pair[0]))*" " + pair[1] + "% match" + "\n" # Show search time timelapse_parallel = time.time() - start_time output = output + str(timelapse_parallel) + "seconds" print(output)
from run import * from plotADCvBX import * from capIDtest import * from ROOT import * #dataFile = open("internalChargeInjection_00A.txt") fiber3 = run("internalChargeInjection_00A.txt",3) fiber4 = run("internalChargeInjection_00A.txt",4) fiber6 = run("internalChargeInjection_00A.txt",6) #fiber3.saveToROOT("test.root") #fiber4.saveToROOT("test.root") #fiber6.saveToROOT("test.root") #rootFile = ROOT.TFile("test.root","READ") #rootTree = rootFile.Get("fiber4") #rootTree.Show(0) plotADCvBX( fiber3 , "internalChargeInjectionTest_fiber3" ) plotADCvBX( fiber4 , "internalChargeInjectionTest_fiber4" ) capIDtest( fiber3 , "capIDtest_fiber3" ) capIDtest( fiber4 , "capIDtest_fiber4" )
def echo_wlog_value(wlogFile, keyword): # $CTL show_wlog < $wlogFile |grep $keyword |awk '{print $2}' # run($CTL + " show_wlog < " + wlogFile + " > " + TMP_FILE) run("%s/wlog-show -stat %s > %s" % (BIN, wlogFile, TMP_FILE)) v = getKeyValue(TMP_FILE, keyword, 1) return int(v)
from {{cookiecutter.package_name}} import run if __name__ == "__main__": run()
def consolidation_test2(): print "##################### Consolidation test ####################" run("cp ddev32M ddev32M.0") for i in xrange(1, 5): run(BIN + ("/wdiff-redo -z ddev32M.0 -i {}.s.wdiff".format(i))) run(BIN + "/virt-full-cat -stat -i ddev32M -o ddev32M.1 -d {}".format(' '.join( '{}.s.wdiff'.format(i) for i in xrange(1, 5)))) run(BIN + "/bdiff -b 512 ddev32M.0 ddev32M.1") check_result("consolidation test 2s.") run(BIN + "/virt-full-cat -stat -i ddev32M -o ddev32M.2 -d {}".format(' '.join( '{}.i.wdiff'.format(i) for i in xrange(1, 5)))) run(BIN + "/bdiff -b 512 ddev32M.0 ddev32M.2") check_result("consolidation test 2i.")
def make_zero_image(*args): for i in args: run("dd if=/dev/zero of=./ddev32M.%d bs=1048576 count=32" % i)
def delete_crashblk_devices(): for bdev in [LDEV, DDEV]: run(BIN + 'crashblkc delete %s' % bdev)
def full_image_test(): print "#################### Full image test ####################" run(BIN + "/wdiff-full -i ddev32M -o 0.s.wdiff") run(BIN + "/wdiff-full -indexed -i ddev32M -o 0.i.wdiff") make_zero_image(0, 1) run(BIN + "/wdiff-redo -z ddev32M.0 -i 0.s.wdiff") run(BIN + "/bdiff -b 512 ddev32M ddev32M.0") check_result("full image test (s.wdiff)") run(BIN + "/wdiff-redo -z ddev32M.1 -i 0.i.wdiff") run(BIN + "/bdiff -b 512 ddev32M ddev32M.1") check_result("full image test (i.wdiff)")
import os import subprocess __author__ = 'serg' from run import * prefix = "../reads/" kmers = [21, 23, 25, 27, 29, 31, 35, 37, 39] # kmers = [23] _reads = ["frag_2.fastq, frag_1.fastq, 1.fastq, norm.fastq, 2.fastq"] assemblers = ["Abyss"] i = 0 for read in _reads: i += 1 for k in kmers: for assembler in assemblers: if not os.path.exists("test" + assembler + "/" + str(i)): os.mkdir("test" + assembler + "/" + str(i) + "/") args = [assembler, "--o", "test" + assembler + "/" + str(i) + "/" + str(k), "--k", str(k), "--i", prefix+read, "--f", "fastq", "--r", "long"] run(args) # assemblers = ["SPAdes"] # args = [assembler, "--o", "test" + assembler + "/" + str(k), "--k", str(k), # "--i", reads, "--f", "fastq", "--r", "long"]
def make_zero_image(*args): for i in args: run("dd if=/dev/zero of=./ddev32M.{} bs=1048576 count=32".format(i))
def tp_test(): run(BIN + 'lvm-mgr exists-tp -vg %s -tp %s' % (VG, TP)) run(BIN + 'lvm-mgr create -vg %s -tp %s -s 12M %s' % (VG, TP, VOL)) run('sudo dd if=/dev/zero of=%s oflag=direct bs=1M count=12' % get_lvm_path(VOL)) run(BIN + 'lvm-mgr snap -vg %s -lv %s %s' % (VG, VOL, SNAP)) run(BIN + 'lvm-mgr resize -vg %s -s 24M %s' % (VG, VOL)) run(BIN + 'lvm-mgr resize -vg %s -s 24M %s' % (VG, SNAP)) run(BIN + 'lvm-mgr remove -vg %s %s' % (VG, SNAP)) run(BIN + 'lvm-mgr remove -vg %s %s' % (VG, VOL))
def max_io_blocks_test(): print "#################### MaxIoBlocks test #################### " make_zero_image(0, 1, 2) for i in xrange(1, 5): run(BIN + ("/wlog-to-wdiff -x 4K < {}.wlog > {}-4K.wdiff".format(i, i))) run(BIN + ("/wlog-to-wdiff -x 16K < {}.wlog > {}-16K.wdiff".format(i, i))) run(BIN + ("/wdiff-redo -z ddev32M.0 < {}.s.wdiff".format(i))) run(BIN + ("/wdiff-redo -z ddev32M.1 < {}-4K.wdiff".format(i))) run(BIN + ("/wdiff-redo -z ddev32M.2 < {}-16K.wdiff".format(i))) run(BIN + "/bdiff -b 512 ddev32M.0 ddev32M.1") check_result("maxIoBlocks test {}th wdiff 4K".format(i)) run(BIN + "/bdiff -b 512 ddev32M.0 ddev32M.2") check_result("maxIoBlocks test {}th wdiff 16K".format(i))
def create_crashblk_device(sizeStr): tmpFileName = 'device.id' run(BIN + 'crashblkc create %s > %s' % (sizeStr, tmpFileName)) with open(tmpFileName, 'r') as f: iD = int(f.read().strip()) return iD