def _setup_seeds(hex_str): # pragma: main """This function should only be called from main. Be careful with this function as it manipulates the global random streams. This is part of the general experiment setup before a study. If torch becomes used in any of our optimizers then this will need to come back, could also do TF seed init. ``` torch.manual_seed(random_seed(master_stream)) if torch.cuda.is_available(): torch.cuda.manual_seed(random_seed(master_stream)) ``` """ # Set all random seeds: avoid correlated streams ==> must use diff seeds. # Could use UUID class, but more direct to just convert the hex to py int. # pyrandom is better for master because it is not limited to 32-bit seeds. master_stream = pyrandom.Random(int(hex_str, 16)) pyrandom.seed(random_seed(master_stream)) np.random.seed(random_seed(master_stream))
def _hyperopt_suggest(self): new_ids = self.trials.new_trial_ids(1) assert len(new_ids) == 1 self.trials.refresh() seed = random_seed(self.random) new_trials = tpe.suggest(new_ids, self.domain, self.trials, seed) assert len(new_trials) == 1 self.trials.insert_trial_docs(new_trials) self.trials.refresh() new_trial, = new_trials # extract singleton return new_trial
def main(): """See README for instructions on calling launcher. """ description = "Launch series of studies across functions and optimizers" args = cmd.parse_args(cmd.launcher_parser(description)) logger.setLevel(logging.INFO) # Note this is the module-wide logger if args[CmdArgs.verbose]: logger.addHandler(logging.StreamHandler()) # Get optimizer settings, says which file to call for each optimizer settings = cmd.load_optimizer_settings(args[CmdArgs.optimizer_root]) opt_file_lookup = { optimizer: wrapper_file for optimizer, (wrapper_file, _) in settings.items() } # Setup uuid if args[CmdArgs.uuid] is None: args[CmdArgs.uuid] = pyuuid.uuid4( ).hex # debatable if uuid1 or uuid4 is better here else: warnings.warn( "User UUID supplied. This is only desired for debugging. Careless use could lead to study id conflicts.", UserWarning, ) run_uuid = pyuuid.UUID(hex=args[CmdArgs.uuid]) assert run_uuid.hex == args[CmdArgs.uuid] logger.info("Supply --uuid %s to reproduce this run." % run_uuid.hex) # Log all the options print("Launcher options (JSON):\n") print(json.dumps({"bayesmark-launch-args": cmd.serializable_dict(args)})) print("\n") # Set the master seed (derive from the uuid we just setup) pyrandom.seed(run_uuid.int) np.random.seed(random_seed(pyrandom)) # Now run it, either to dry run file or executes sub-processes if args[CmdArgs.dry_run]: with absopen(args[CmdArgs.jobs_file], "w") as fp: dry_run(args, opt_file_lookup, run_uuid, fp) else: timeout = args[CmdArgs.timeout] if args[CmdArgs.timeout] > 0 else None real_run(args, opt_file_lookup, run_uuid, timeout) logger.info("done")
def _suggest(self): """Helper function to `suggest` that does the work of calling `hyperopt` via its dumb API. """ new_ids = self.trials.new_trial_ids(1) assert len(new_ids) == 1 self.trials.refresh() seed = random_seed(self.random) new_trials = tpe.suggest(new_ids, self.domain, self.trials, seed) assert len(new_trials) == 1 self.trials.insert_trial_docs(new_trials) self.trials.refresh() new_trial, = new_trials # extract singleton return new_trial
def test_random_seed(seed): random = np.random.RandomState(seed) seed = np_util.random_seed(random)