def __init__(self, dir, authorIds={}, context=None, start=True, baseDir=None): if not os.path.isdir(dir): os.mkdir(dir) if not list(os.listdir(dir)): self.ix = create_in(dir, Index.schema) if not baseDir: baseDir = os.path.join(os.path.split(dir)[0]) self.ix = whoosh.index.open_dir(dir) self.searchers = [] self.failedDir = os.path.join(baseDir, "failed") utils.ensureDir(self.failedDir) self.incomingDir = os.path.join(baseDir, "incoming") utils.ensureDir(self.incomingDir) self.indexer = threading.Thread(target=Index.indexLoop, args=[self]) self.logger = open(os.path.join(baseDir, "index.log"), "a") self.stopping = False if start: self.startIndexer() self.counts = {}
def start(self): global mongod_port global mongod if self.proc: print >> sys.stderr, "probable bug: self.proc already set in start()" return self.ensure_test_dirs() dir_name = smoke_db_prefix + "/data/db/sconsTests/" self.port = int(mongod_port) self.slave = False if 'slave' in self.kwargs: dir_name = smoke_db_prefix + '/data/db/sconsTestsSlave/' srcport = mongod_port self.port += 1 self.slave = True if os.path.exists(dir_name): if 'slave' in self.kwargs: argv = [utils.find_python(), "buildscripts/cleanbb.py", '--nokill', dir_name] else: argv = [utils.find_python(), "buildscripts/cleanbb.py", dir_name] call(argv) utils.ensureDir(dir_name) argv = [mongod_executable, "--port", str(self.port), "--dbpath", dir_name] # This should always be set for tests argv += ['--setParameter', 'enableTestCommands=1'] if self.kwargs.get('small_oplog'): argv += ["--master", "--oplogSize", "511"] if self.kwargs.get('small_oplog_rs'): argv += ["--replSet", "foo", "--oplogSize", "511"] if self.slave: argv += ['--slave', '--source', 'localhost:' + str(srcport)] if self.kwargs.get('no_journal'): argv += ['--nojournal'] if self.kwargs.get('no_preallocj'): argv += ['--nopreallocj'] if self.kwargs.get('auth'): argv += ['--auth'] self.auth = True if self.kwargs.get('use_ssl'): argv += ['--sslOnNormalPorts', '--sslPEMKeyFile', 'jstests/libs/server.pem', '--sslCAFile', 'jstests/libs/ca.pem'] print "running " + " ".join(argv) self.proc = self._start(buildlogger(argv, is_global=True)) if not self.did_mongod_start(self.port): raise Exception("Failed to start mongod") if self.auth: self.setup_admin_user(self.port) if self.slave: local = Connection(port=self.port, slave_okay=True).local synced = False while not synced: synced = True for source in local.sources.find(fields=["syncedTo"]): synced = synced and "syncedTo" in source and source["syncedTo"]
def run_finer_lrs(init_param='kaiming', device='cpu'): dist_grid = [ExampleDistribution() ] + [RadialDataDistribution(d=2**k) for k in range(7)] std_grid = [0.1, 0.5, 1.0, 2.0] # bi_grid = [('zero', 0.0), ('he+5', 0.0), ('he+1', 0.0), ('kink_uniform', 0.0)] \ # + [(bim, big) for big in std_grid for bim in ['normal', 'uniform']] \ # + [('pos-unif', 1.0), ('neg-unif', 1.0), ('kink-unif', 1.0), ('kink-neg-unif', 1.0), # ('kink-neg-point', 0.0)] bi_grid = [('zero', 0.0), ('unif', 1.0), ('unif-pos', 1.0), ('unif-neg', 1.0), ('kink-neg-unif', 1.0), ('pytorch', 1.0), ('kink-neg-point', 0.0)] for opt in ['gd', 'gd-mom', 'adam']: for dist in dist_grid: d = dist.get_x_dim() for bim, big in bi_grid: folder_name = f'{init_param}_{opt}_{dist.get_name()}_{bim}-{big:g}' path = Path(custom_paths.get_results_path() ) / 'nn_comparison' / folder_name best_lr_file = Path(custom_paths.get_results_path( )) / 'nn_comparison' / f'{folder_name}_bestlr.pkl' if not utils.existsFile(best_lr_file): sys.stderr.write( 'best lr file {best_lr_file} does not exist!\n') continue best_lr = utils.deserialize(best_lr_file) lr_grid = [best_lr * (2**(k / 8)) for k in range(-3, 4)] for lr in lr_grid: print(f'Running combination {folder_name} with lr {lr:g}') file = path / f'{lr:g}.pkl' utils.ensureDir(file) if utils.existsFile(file): continue n_rep = 2 if d == 64 else 1 trainer = SimpleParallelTrainer(n_parallel=100 // n_rep, n_train=256 * d, n_valid=1024, n_test=1024, data_distribution=dist, lr=lr, bias_init_gain=big, batch_size=256, bias_init_mode=bim, init_param=init_param, n_epochs=8192 // d, seed=0, device=device, n_hidden=512, opt=opt, valid_epoch_interval=64 // d, n_rep=n_rep) results = trainer.fit(do_plot=False, verbose=False) if results is None: print('Got NaN values') utils.serialize(file, { 'trainer': trainer, 'results': results })
def execute_mc(offset_str, base_dir='./mc-data/'): param_combinations = get_param_combinations() num_processes = max(1, multiprocessing.cpu_count() // 2) pool = multiprocessing.Pool(processes=num_processes) utils.ensureDir(base_dir + 'mc-data-{}/'.format(offset_str)) pool.map(KerasMCRunner(offset_str, base_dir), param_combinations, chunksize=1) pool.terminate() pool.join()
def start(self): global mongod_port global mongod if self.proc: print >> sys.stderr, "probable bug: self.proc already set in start()" return self.ensure_test_dirs() dir_name = smoke_db_prefix + "/data/db/sconsTests/" self.port = int(mongod_port) self.slave = False if 'slave' in self.kwargs: dir_name = smoke_db_prefix + '/data/db/sconsTestsSlave/' srcport = mongod_port self.port += 1 self.slave = True if os.path.exists(dir_name): if 'slave' in self.kwargs: argv = [utils.find_python(), "buildscripts/cleanbb.py", '--nokill', dir_name] else: argv = [utils.find_python(), "buildscripts/cleanbb.py", dir_name] call(argv) utils.ensureDir(dir_name) argv = [mongod_executable, "--port", str(self.port), "--dbpath", dir_name] if self.kwargs.get('small_oplog'): argv += ["--master", "--oplogSize", "511"] if self.kwargs.get('small_oplog_rs'): argv += ["--replSet", "foo", "--oplogSize", "511"] if self.slave: argv += ['--slave', '--source', 'localhost:' + str(srcport)] if self.kwargs.get('no_journal'): argv += ['--nojournal'] if self.kwargs.get('no_preallocj'): argv += ['--nopreallocj'] if self.kwargs.get('auth'): argv += ['--auth'] self.auth = True print "running " + " ".join(argv) self.proc = self._start(buildlogger(argv, is_global=True)) if not self.did_mongod_start(self.port): raise Exception("Failed to start mongod") if self.auth: self.setup_admin_user(self.port) if self.slave: local = Connection(port=self.port, slave_okay=True).local synced = False while not synced: synced = True for source in local.sources.find(fields=["syncedTo"]): synced = synced and "syncedTo" in source and source["syncedTo"]
def start(self): global mongod_port global mongod if self.proc: print >> sys.stderr, "probable bug: self.proc already set in start()" return self.ensure_test_dirs() dir_name = smoke_db_prefix + "/data/db/sconsTests/" self.port = int(mongod_port) self.slave = False if "slave" in self.kwargs: dir_name = smoke_db_prefix + "/data/db/sconsTestsSlave/" srcport = mongod_port self.port += 1 self.slave = True if os.path.exists(dir_name): if "slave" in self.kwargs: argv = [utils.find_python(), "buildscripts/cleanbb.py", "--nokill", dir_name] else: argv = [utils.find_python(), "buildscripts/cleanbb.py", dir_name] call(argv) utils.ensureDir(dir_name) argv = [mongod_executable, "--port", str(self.port), "--dbpath", dir_name] if self.kwargs.get("small_oplog"): argv += ["--master", "--oplogSize", "511"] if self.kwargs.get("small_oplog_rs"): argv += ["--replSet", "foo", "--oplogSize", "511"] if self.slave: argv += ["--slave", "--source", "localhost:" + str(srcport)] if self.kwargs.get("no_journal"): argv += ["--nojournal"] if self.kwargs.get("no_preallocj"): argv += ["--nopreallocj"] if self.kwargs.get("auth"): argv += ["--auth"] self.auth = True print "running " + " ".join(argv) self.proc = self._start(buildlogger(argv, is_global=True)) if not self.did_mongod_start(self.port): raise Exception("Failed to start mongod") if self.auth: self.setup_admin_user(self.port) if self.slave: local = Connection(port=self.port, slave_okay=True).local synced = False while not synced: synced = True for source in local.sources.find(fields=["syncedTo"]): synced = synced and "syncedTo" in source and source["syncedTo"]
def execute_mc(offset_str, base_dir='./mc-data/', use_sgd=False, use_early_stopping=False, use_sufficient_stopping=True, use_small_lr=False, initialize_custom=False): # creates a thread pool and runs all computation tasks param_combinations = get_param_combinations() num_processes = max(1, multiprocessing.cpu_count() // 2) pool = multiprocessing.Pool(processes=num_processes) utils.ensureDir(base_dir + 'mc-data-{}/'.format(offset_str)) pool.map(OffsetMCRunner(offset_str, base_dir, use_sgd, use_early_stopping, use_sufficient_stopping, use_small_lr, initialize_custom), param_combinations, chunksize=1) pool.terminate() pool.join()
def run_old(init_param='kaiming', device='cpu'): dist_grid = [ExampleDistribution() ] + [RBFDataDistribution(d=2**k) for k in range(7)] std_grid = [0.1, 0.5, 1.0, 2.0] bi_grid = [('zero', 0.0), ('he+5', 0.0), ('he+1', 0.0), ('kink_uniform', 0.0)] \ + [(bim, big) for big in std_grid for bim in ['normal', 'uniform']] for opt in ['gd', 'gd-mom', 'adam']: base_lr = 1e-2 if opt == 'adam' else ( 4e-1 if init_param == 'ntk' else 8e-3) lr_grid = [base_lr * np.sqrt(2)**k for k in range(-8, 9)] for dist in dist_grid: for bim, big in bi_grid: folder_name = f'{init_param}_{opt}_{dist.get_name()}_{bim}-{big:g}' path = Path(custom_paths.get_results_path() ) / 'nn_comparison' / folder_name for lr in lr_grid: print(f'Running combination {folder_name} with lr {lr:g}') file = path / f'{lr:g}.pkl' utils.ensureDir(file) if utils.existsFile(file): continue torch.cuda.empty_cache() trainer = SimpleParallelTrainer(n_parallel=100, n_train=256, n_valid=1024, n_test=1024, data_distribution=dist, lr=lr, bias_init_gain=big, bias_init_mode=bim, init_param=init_param, n_epochs=10000, seed=0, device=device, n_hidden=256, opt=opt) results = trainer.fit(do_plot=False, verbose=False) if results is None: print('Got NaN values') utils.serialize(file, { 'trainer': trainer, 'results': results })
def start(self): global mongod_port global mongod if self.proc: print >> sys.stderr, "probable bug: self.proc already set in start()" return self.ensure_test_dirs() dir_name = smoke_db_prefix + "/data/db/sconsTests/" self.port = int(mongod_port) self.slave = False if 'slave' in self.kwargs: dir_name = smoke_db_prefix + '/data/db/sconsTestsSlave/' srcport = mongod_port self.port += 1 self.slave = True if os.path.exists(dir_name): if 'slave' in self.kwargs: argv = [ utils.find_python(), "buildscripts/cleanbb.py", '--nokill', dir_name ] else: argv = [ utils.find_python(), "buildscripts/cleanbb.py", dir_name ] call(argv) utils.ensureDir(dir_name) argv = [ mongod_executable, "--port", str(self.port), "--dbpath", dir_name ] # These parameters are alwas set for tests # SERVER-9137 Added httpinterface parameter to keep previous behavior argv += ['--setParameter', 'enableTestCommands=1', '--httpinterface'] if self.kwargs.get('small_oplog'): argv += ["--master", "--oplogSize", "511"] params = self.kwargs.get('set_parameters', None) if params: for p in params.split(','): argv += ['--setParameter', p] if self.kwargs.get('small_oplog_rs'): argv += ["--replSet", "foo", "--oplogSize", "511"] if self.slave: argv += ['--slave', '--source', 'localhost:' + str(srcport)] if self.kwargs.get('no_journal'): argv += ['--nojournal'] if self.kwargs.get('no_preallocj'): argv += ['--nopreallocj'] if self.kwargs.get('auth'): argv += ['--auth'] authMechanism = self.kwargs.get('authMechanism', 'MONGODB-CR') if authMechanism != 'MONGODB-CR': argv += [ '--setParameter', 'authenticationMechanisms=' + authMechanism ] self.auth = True if self.kwargs.get('use_ssl'): argv += [ '--sslOnNormalPorts', '--sslPEMKeyFile', 'jstests/libs/server.pem', '--sslCAFile', 'jstests/libs/ca.pem', '--sslWeakCertificateValidation' ] print "running " + " ".join(argv) self.proc = self._start(buildlogger(argv, is_global=True)) if not self.did_mongod_start(self.port): raise Exception("Failed to start mongod") if self.auth: self.setup_admin_user(self.port) if self.slave: local = Connection(port=self.port, slave_okay=True).local synced = False while not synced: synced = True for source in local.sources.find(fields=["syncedTo"]): synced = synced and "syncedTo" in source and source[ "syncedTo"]
def ensure_test_dirs(self): utils.ensureDir(smoke_db_prefix + "/tmp/unittest/") utils.ensureDir(smoke_db_prefix + "/data/") utils.ensureDir(smoke_db_prefix + "/data/db/")
def start(self): global mongod_port global mongod if self.proc: print >> sys.stderr, "probable bug: self.proc already set in start()" return self.ensure_test_dirs() dir_name = smoke_db_prefix + "/data/db/sconsTests/" self.port = int(mongod_port) self.slave = False if "slave" in self.kwargs: dir_name = smoke_db_prefix + "/data/db/sconsTestsSlave/" srcport = mongod_port self.port += 1 self.slave = True if os.path.exists(dir_name): if "slave" in self.kwargs: argv = [utils.find_python(), "buildscripts/cleanbb.py", "--nokill", dir_name] else: argv = [utils.find_python(), "buildscripts/cleanbb.py", dir_name] call(argv) utils.ensureDir(dir_name) argv = [mongod_executable, "--port", str(self.port), "--dbpath", dir_name] # This should always be set for tests argv += ["--setParameter", "enableTestCommands=1"] if self.kwargs.get("small_oplog"): argv += ["--master", "--oplogSize", "511"] if self.kwargs.get("small_oplog_rs"): argv += ["--replSet", "foo", "--oplogSize", "511"] if self.slave: argv += ["--slave", "--source", "localhost:" + str(srcport)] if self.kwargs.get("no_journal"): argv += ["--nojournal"] if self.kwargs.get("no_preallocj"): argv += ["--nopreallocj"] if self.kwargs.get("auth"): argv += ["--auth"] authMechanism = self.kwargs.get("authMechanism", "MONGO-CR") if authMechanism != "MONGO-CR": argv.append("--setParameter=authenticationMechanisms=" + authMechanism) self.auth = True if self.kwargs.get("use_ssl"): argv += [ "--sslOnNormalPorts", "--sslPEMKeyFile", "jstests/libs/server.pem", "--sslCAFile", "jstests/libs/ca.pem", "--sslWeakCertificateValidation", ] print "running " + " ".join(argv) self.proc = self._start(buildlogger(argv, is_global=True)) if not self.did_mongod_start(self.port): raise Exception("Failed to start mongod") if self.auth: self.setup_admin_user(self.port) if self.slave: local = Connection(port=self.port, slave_okay=True).local synced = False while not synced: synced = True for source in local.sources.find(fields=["syncedTo"]): synced = synced and "syncedTo" in source and source["syncedTo"]
def plot_loss(): n_hidden = 16 n_parallel = 1 lr = 1e-3 n_steps = 201 n_epochs_per_step = 100 np.random.seed(4) x, y = mc_training.get_standard_dataset() #y += 0.5 (a, b, c, w) = CheckingNN.createRandomWeights(n_parallel, n_hidden) x_weights = 1. / len(x) * np.ones(len(x)) train_setups = [TrainingSetup(x, x_weights, y) for i in range(n_parallel)] lrs = np.array([lr for i in range(n_parallel)]) net = CheckingNN(initial_weights=(a, b, c, w), train_setups=train_setups, lrs=lrs) vars = net.create_training_vars() losses = [] y_eval = [] x_eval = [] for step in range(n_steps): pred_diff = net.predict(x, 0) - y loss = np.dot(pred_diff, pred_diff) / (2 * len(x)) losses.append(loss - 1.0) x_eval_current = np.sort( np.hstack([[-3.0, 3.0], -net.b[0, 0, :] / net.a[0, 0, :]])) x_eval.append(x_eval_current) y_eval.append(net.predict(x_eval_current, 0)) for step_epoch in range(n_epochs_per_step): net.train_one_epoch(vars) plt.figure('Loss', figsize=(3, 2)) plt.semilogy(np.arange(n_steps) * n_epochs_per_step, losses, 'k') plt.xlabel(r'Epoch $k$') plt.ylabel(r"$L_D(W_k) - \inf_{k'} L_D(W_{k'})$") utils.ensureDir('./plots/') plt.tight_layout() plt.savefig('./plots/loss.pdf') plt.figure('NN training', figsize=(6, 4)) linewidth = 0.5 plt.plot(x, y, 'k.') plt.plot(x_eval[0], y_eval[0], 'k--', linewidth=linewidth, label='Initial') plt.plot(x_eval[10], y_eval[10], '#AAAAAA', linewidth=linewidth, label='1000 Epochs') plt.plot(x_eval[n_steps - 1], y_eval[n_steps - 1], 'k', linewidth=linewidth, label='20000 Epochs') plt.scatter(x_eval[n_steps - 1][1:-1], y_eval[n_steps - 1][1:-1], s=1) plt.xlabel(r'$x$') plt.ylabel(r'$y$') plt.legend() plt.tight_layout() plt.savefig('./plots/training.pdf') plt.figure('Kinks', figsize=(3, 2)) kink_movement = np.array([x_eval[i][1:-1] for i in range(len(x_eval))]) for i in range(kink_movement.shape[1]): plt.plot(np.arange(kink_movement.shape[0]) * n_epochs_per_step, kink_movement[:, i], 'k', linewidth=linewidth) plt.xlabel(r'Epoch $k$') plt.ylabel(r'$-b_{i, k}/a_{i, k}$') plt.tight_layout() plt.savefig('./plots/kink_movement.pdf')
def start(self): global mongod_port global mongod if self.proc: print >> sys.stderr, "probable bug: self.proc already set in start()" return self.ensure_test_dirs() dir_name = smoke_db_prefix + "/data/db/sconsTests/" self.port = int(mongod_port) self.slave = False if 'slave' in self.kwargs: dir_name = smoke_db_prefix + '/data/db/sconsTestsSlave/' srcport = mongod_port self.port += 1 self.slave = True if os.path.exists(dir_name): if 'slave' in self.kwargs: argv = [utils.find_python(), "buildscripts/cleanbb.py", '--nokill', dir_name] else: argv = [utils.find_python(), "buildscripts/cleanbb.py", dir_name] call(argv) utils.ensureDir(dir_name) argv = [mongod_executable, "--port", str(self.port), "--dbpath", dir_name] # These parameters are alwas set for tests # SERVER-9137 Added httpinterface parameter to keep previous behavior argv += ['--setParameter', 'enableTestCommands=1', '--httpinterface'] if self.kwargs.get('small_oplog'): argv += ["--master", "--oplogSize", "511"] params = self.kwargs.get('set_parameters', None) if params: for p in params.split(','): argv += ['--setParameter', p] if self.kwargs.get('small_oplog_rs'): argv += ["--replSet", "foo", "--oplogSize", "511"] if self.slave: argv += ['--slave', '--source', 'localhost:' + str(srcport)] if self.kwargs.get('no_journal'): argv += ['--nojournal'] if self.kwargs.get('no_preallocj'): argv += ['--nopreallocj'] if self.kwargs.get('auth'): argv += ['--auth'] authMechanism = self.kwargs.get('authMechanism', 'MONGODB-CR') if authMechanism != 'MONGODB-CR': argv += ['--setParameter', 'authenticationMechanisms=' + authMechanism] self.auth = True if self.kwargs.get('use_ssl') or self.kwargs.get('use_x509'): argv += ['--sslMode', "sslOnly", '--sslPEMKeyFile', 'jstests/libs/server.pem', '--sslCAFile', 'jstests/libs/ca.pem', '--sslWeakCertificateValidation'] if self.kwargs.get('use_x509'): argv += ['--clusterAuthMode','x509']; self.auth = True print "running " + " ".join(argv) self.proc = self._start(buildlogger(argv, is_global=True)) if not self.did_mongod_start(self.port): raise Exception("Failed to start mongod") if self.auth: self.setup_admin_user(self.port) if self.slave: local = Connection(port=self.port, slave_okay=True).local synced = False while not synced: synced = True for source in local.sources.find(fields=["syncedTo"]): synced = synced and "syncedTo" in source and source["syncedTo"]
def start(self): global mongod_port global mongod global shell_executable global _debug global valgrind global drd if self.proc: print >> sys.stderr, "probable bug: self.proc already set in start()" return self.ensure_test_dirs() dir_name = smoke_db_prefix + "/data/db/sconsTests/" self.port = int(mongod_port) self.slave = False if 'slave' in self.kwargs: dir_name = smoke_db_prefix + '/data/db/sconsTestsSlave/' srcport = mongod_port self.port += 1 self.slave = True if os.path.exists(dir_name): pass """ Cleanbb is the most irritating script ever created. if 'slave' in self.kwargs: argv = [utils.find_python(), "buildscripts/cleanbb.py", '--nokill', dir_name] else: argv = [utils.find_python(), "buildscripts/cleanbb.py", dir_name] if quiet: f = open(server_log_file, "a") try: call(argv, stdout=f) finally: f.close() else: call(argv) """ utils.ensureDir(dir_name) argv = [ mongod_executable, "--port", str(self.port), "--dbpath", dir_name ] # This should always be set for tests argv += ['--setParameter', 'enableTestCommands=1'] if valgrind or drd: argv += ['--setParameter', 'numCachetableBucketMutexes=32'] if self.kwargs.get('small_oplog'): argv += ["--master", "--oplogSize", "511"] if self.kwargs.get('small_oplog_rs'): argv += ["--replSet", "foo", "--oplogSize", "511"] if self.slave: argv += ['--slave', '--source', 'localhost:' + str(srcport)] if self.kwargs.get('no_journal'): argv += ['--nojournal'] if self.kwargs.get('no_preallocj'): argv += ['--nopreallocj'] if self.kwargs.get('auth'): argv += ['--auth'] authMechanism = self.kwargs.get('authMechanism', 'MONGODB-CR') if authMechanism != 'MONGODB-CR': argv.append('--setParameter=authenticationMechanisms=' + authMechanism) self.auth = True if len(server_log_file) > 0: argv += ['--logpath', server_log_file] if len(smoke_server_opts) > 0: argv += [smoke_server_opts] if self.kwargs.get('use_ssl'): argv += [ '--sslOnNormalPorts', '--sslPEMKeyFile', 'jstests/libs/server.pem', '--sslCAFile', 'jstests/libs/ca.pem', '--sslWeakCertificateValidation' ] if not quiet: print "running " + " ".join(argv) self.proc = self._start(buildlogger(argv, is_global=True)) if not self.did_mongod_start(self.port): raise Exception("Failed to start mongod") run_mongo_cmd = [shell_executable, '--port', str(self.port), '--quiet'] if self.kwargs.get('use_ssl'): run_mongo_cmd += [ '--ssl', '--sslPEMKeyFile', 'jstests/libs/server.pem', '--sslCAFile', 'jstests/libs/ca.pem' ] if "true" == check_output( run_mongo_cmd + ['--eval', 'print(db.runCommand("buildInfo").debug)']).strip(): _debug = True if self.auth: self.setup_admin_user(self.port) if self.slave: local = Connection(port=self.port, slave_okay=True).local synced = False while not synced: synced = True for source in local.sources.find(fields=["syncedTo"]): synced = synced and "syncedTo" in source and source[ "syncedTo"]
def start(self): global mongod_port global mongod global shell_executable global _debug global valgrind global drd if self.proc: print >> sys.stderr, "probable bug: self.proc already set in start()" return self.ensure_test_dirs() dir_name = smoke_db_prefix + "/data/db/sconsTests/" self.port = int(mongod_port) self.slave = False if 'slave' in self.kwargs: dir_name = smoke_db_prefix + '/data/db/sconsTestsSlave/' srcport = mongod_port self.port += 1 self.slave = True if os.path.exists(dir_name): pass """ Cleanbb is the most irritating script ever created. if 'slave' in self.kwargs: argv = [utils.find_python(), "buildscripts/cleanbb.py", '--nokill', dir_name] else: argv = [utils.find_python(), "buildscripts/cleanbb.py", dir_name] if quiet: f = open(server_log_file, "a") try: call(argv, stdout=f) finally: f.close() else: call(argv) """ utils.ensureDir(dir_name) argv = [mongod_executable, "--port", str(self.port), "--dbpath", dir_name] # This should always be set for tests argv += ['--setParameter', 'enableTestCommands=1'] if valgrind or drd: argv += ['--setParameter', 'numCachetableBucketMutexes=32'] if self.kwargs.get('small_oplog'): argv += ["--master", "--oplogSize", "511"] if self.kwargs.get('small_oplog_rs'): argv += ["--replSet", "foo", "--oplogSize", "511"] if self.slave: argv += ['--slave', '--source', 'localhost:' + str(srcport)] if self.kwargs.get('no_journal'): argv += ['--nojournal'] if self.kwargs.get('no_preallocj'): argv += ['--nopreallocj'] if self.kwargs.get('auth'): argv += ['--auth'] authMechanism = self.kwargs.get('authMechanism', 'MONGODB-CR') if authMechanism != 'MONGODB-CR': argv.append('--setParameter=authenticationMechanisms=' + authMechanism) self.auth = True if len(server_log_file) > 0: argv += ['--logpath', server_log_file] if len(smoke_server_opts) > 0: argv += [smoke_server_opts] if self.kwargs.get('use_ssl'): argv += ['--sslOnNormalPorts', '--sslPEMKeyFile', 'jstests/libs/server.pem', '--sslCAFile', 'jstests/libs/ca.pem', '--sslWeakCertificateValidation'] if not quiet: print "running " + " ".join(argv) self.proc = self._start(buildlogger(argv, is_global=True)) if not self.did_mongod_start(self.port): raise Exception("Failed to start mongod") run_mongo_cmd = [shell_executable, '--port', str(self.port), '--quiet'] if self.kwargs.get('use_ssl'): run_mongo_cmd += ['--ssl', '--sslPEMKeyFile', 'jstests/libs/server.pem', '--sslCAFile', 'jstests/libs/ca.pem'] if "true" == check_output(run_mongo_cmd + ['--eval', 'print(db.runCommand("buildInfo").debug)']).strip(): _debug = True if self.auth: self.setup_admin_user(self.port) if self.slave: local = Connection(port=self.port, slave_okay=True).local synced = False while not synced: synced = True for source in local.sources.find(fields=["syncedTo"]): synced = synced and "syncedTo" in source and source["syncedTo"]
print jsonstr jsonpath = os.path.abspath(os.path.join(outfolder, 'world.txt')) file = open(jsonpath, "w") file.write(jsonstr) file.close() print 'DONE...' if __name__ == "__main__": if len(sys.argv) <> 3: print "Usage: level-gen.py [input-svg-file] [output-folder]" exit(1) inputFn = os.path.abspath(sys.argv[1]) outputFolder = os.path.abspath(sys.argv[2]) utils.ensureDir(outputFolder) print 'Level generator:' print '----------------' print 'Input Svg File: ' + inputFn print 'Output Folder: ' + outputFolder begin(inputFn, outputFolder)
def start(self): global mongod_port global mongod if self.proc: print >> sys.stderr, "probable bug: self.proc already set in start()" return self.ensure_test_dirs() dir_name = smoke_db_prefix + "/data/db/sconsTests/" self.port = int(mongod_port) self.slave = False if 'slave' in self.kwargs: dir_name = smoke_db_prefix + '/data/db/sconsTestsSlave/' srcport = mongod_port self.port += 1 self.slave = True if os.path.exists(dir_name): pass """ Cleanbb is the most irritating script ever created. if 'slave' in self.kwargs: argv = [utils.find_python(), "buildscripts/cleanbb.py", '--nokill', dir_name] else: argv = [utils.find_python(), "buildscripts/cleanbb.py", dir_name] if quiet: f = open(server_log_file, "a") try: call(argv, stdout=f) finally: f.close() else: call(argv) """ utils.ensureDir(dir_name) argv = [mongod_executable, "--port", str(self.port), "--dbpath", dir_name] if self.kwargs.get('small_oplog'): argv += ["--master", "--oplogSize", "511"] if self.kwargs.get('small_oplog_rs'): argv += ["--replSet", "foo", "--oplogSize", "511"] if self.slave: argv += ['--slave', '--source', 'localhost:' + str(srcport)] if self.kwargs.get('no_journal'): argv += ['--nojournal'] if self.kwargs.get('no_preallocj'): argv += ['--nopreallocj'] if self.kwargs.get('auth'): argv += ['--auth'] self.auth = True if len(server_log_file) > 0: argv += ['--logpath', server_log_file] if len(smoke_server_opts) > 0: argv += [smoke_server_opts] if not quiet: print "running " + " ".join(argv) self.proc = self._start(buildlogger(argv, is_global=True)) if not self.did_mongod_start(self.port): raise Exception("Failed to start mongod") if self.auth: self.setup_admin_user(self.port) if self.slave: local = Connection(port=self.port, slave_okay=True).local synced = False while not synced: synced = True for source in local.sources.find(fields=["syncedTo"]): synced = synced and "syncedTo" in source and source["syncedTo"]
def main(): # loading config file ... cfgPath = sys.argv[1] if len(sys.argv) > 1 else './config.toml' cfg = loadConfig(cfgPath) try: # ... and unpacking variables dictget = lambda d, *k: [d[i] for i in k] dataStats = cfg['data_stats'] modelParams = cfg['model_params'] trainCSV, testCSV = dictget(cfg['database'], 'train', 'test') seqLength, stepSize = dictget(cfg['model_params'], 'seqLength', 'stepSize') modelArch, modelDir, modelName = dictget(cfg['model_arch'], 'modelArch', 'modelDir', 'modelName') optimizer, lossFunc, metricFuncs = dictget(cfg['training_params'], 'optimizer', 'lossFunc', 'metricFuncs') lr, epochs, batchSize, patience, = dictget(cfg['training_params'], 'learningRate', 'epochs', 'batchSize', 'patience') except KeyError as err: print("\n\nERROR: not all parameters defined in config.toml : ", err) print("Exiting ... \n\n") sys.exit(1) print("Loading training data ...") xTrain, yTrain, stats = getData(trainCSV, seqLength=seqLength, stepSize=stepSize, stats=dataStats) print("Training Data Shape : ", xTrain.shape, "\n") print("Loading testing data ...") xTest, yTest, stats = getData(testCSV, seqLength=seqLength, stepSize=stepSize, stats=dataStats) print("Testing Data Shape : ", xTest.shape, "\n") yTrain = np.expand_dims( yTrain, -1) # adding extra axis as model expects 2 axis in the output yTest = np.expand_dims(yTest, -1) print("Compiling Model") opt = getOptimizer(optimizer, lr) model = makeModel(modelArch, modelParams, verbose=True) model.compile(loss=lossFunc, optimizer=opt, metrics=metricFuncs) # setting up directories modelFolder = os.path.join(modelDir, modelName) weightsFolder = os.path.join(modelFolder, "weights") bestModelPath = os.path.join(weightsFolder, "best.hdf5") ensureDir(bestModelPath) saveConfig(cfgPath, modelFolder) # callbacks monitorMetric = 'val_loss' check1 = ModelCheckpoint(os.path.join(weightsFolder, modelName + "_{epoch:03d}.hdf5"), monitor=monitorMetric, mode='auto') check2 = ModelCheckpoint(bestModelPath, monitor=monitorMetric, save_best_only=True, mode='auto') check3 = EarlyStopping(monitor=monitorMetric, min_delta=0.01, patience=patience, verbose=0, mode='auto') check4 = CSVLogger(os.path.join(modelFolder, modelName + '_trainingLog.csv'), separator=',', append=True) check5 = ReduceLROnPlateau(monitor=monitorMetric, factor=0.1, patience=patience // 3, verbose=1, mode='auto', min_delta=0.001, cooldown=0, min_lr=1e-10) cb = [check2, check3, check4, check5] if cfg['training_params']['saveAllWeights']: cb.append(check1) print("Starting Training ...") model.fit(x=xTrain, y=yTrain, batch_size=batchSize, epochs=epochs, verbose=1, callbacks=cb, validation_data=(xTest, yTest), shuffle=True)
def extractROIFromCenterAndSize(self, roiCenterWorld, roiSizeWorld, magnification=None, outputDir=None, prefix=None, oz=0, flipX=False, flipY=False, removeTIFF=True, makeNifti=True): import nibabel as nib import acsv import utils if magnification is None: magnification = self.sourceLens ijk2ras = self.getAffine(magnification) ras2ijk = np.linalg.inv(ijk2ras) roiCenterPixel = nib.affines.apply_affine(ras2ijk, roiCenterWorld) sx, sy, sz, _ = np.diag(ijk2ras) roiSizePixel = np.abs(roiSizeWorld / np.array((sx, sy, sz))) topLeft = np.round(roiCenterPixel - roiSizePixel / 2).astype(int) topRight = np.round(roiCenterPixel + roiSizePixel / 2).astype(int) topRightX = topRight[0] topLeftX, topLeftY, _ = topLeft width, height, _ = np.round(roiSizePixel).astype(int) if flipX: topLeftX = self.flip(topRightX, 0, magnification) topRightX = self.flip(topLeftX, 0, magnification) if flipY: topLeftY = self.flip(topLeftY, 1, magnification) - height numTilesX = width / MAX_TILE_SIZE + 1 numTilesY = height / MAX_TILE_SIZE + 1 numTiles = numTilesX * numTilesY tileWidth = width / numTilesX tileHeight = height / numTilesY if outputDir is None: outputDir = os.path.dirname(self.filepath) if prefix is None: prefix = os.path.splitext(os.path.basename(self.filepath))[0] for tileY in range(numTilesY): for tileX in range(numTilesX): tileTopLeftX = topLeftX + tileX * tileWidth tileTopLeftY = topLeftY + tileY * tileHeight tileTopRightX = tileTopLeftX + tileWidth - 1 if flipX: tileColumn = numTilesX - tileX - 1 else: tileColumn = tileX if flipY: tileRow = numTilesY - tileY - 1 else: tileRow = tileY if numTiles == 1: outputPath = os.path.join(outputDir, prefix + '.tif') else: outputPath = os.path.join( outputDir, prefix + '_tile_%d_%d.tif' % (tileRow, tileColumn)) utils.ensureDir(outputPath) roiPath = self.extractROI(magnification, tileTopLeftX, tileTopLeftY, tileWidth, tileHeight, outputPath=outputPath) if makeNifti: import ImageUtils as iu roiAffine = ijk2ras[:] roiAffine[:3, :3] = np.abs(roiAffine[:3, :3]) roiAffine[0, 3] = self.flip( tileTopRightX, 0, magnification) * roiAffine[0, 0] roiAffine[1, 3] = tileTopLeftY * roiAffine[1, 1] if flipX: roiAffine[0, 0] *= -1 roiAffine[0, 3] = abs( roiAffine[0, 0]) * (tileTopLeftX + tileWidth) if flipY: roiAffine[1, 1] *= -1 roiAffine[1, 3] = abs(roiAffine[1, 1]) * self.flip( tileTopLeftY, 1, magnification) roiAffine[2, 3] = oz iu.histologyImageToNiftiRGB(roiPath, affine=roiAffine) if removeTIFF: os.remove(roiPath)