def get_federov_data(self, factors): low_level_limits = IntVector([self.parameter_ranges[f][0] for f in factors]) high_level_limits = IntVector([self.parameter_ranges[f][1] - 1 for f in factors]) factor_centers = IntVector([0 for f in factors]) factor_levels = IntVector([self.parameter_ranges[f][1] for f in factors]) factor_round = IntVector([0 for f in factors]) is_factor = BoolVector([False for f in factors]) mix = BoolVector([False for f in factors]) opt_federov_data = { "var": StrVector(factors), "low": low_level_limits, "high": high_level_limits, "center": factor_centers, "nLevels": factor_levels, "round": factor_round, "factor": is_factor, "mix": mix } opt_federov_dataframe = DataFrame(opt_federov_data) opt_federov_dataframe = opt_federov_dataframe.rx(StrVector(["var", "low", "high", "center", "nLevels", "round", "factor", "mix"])) return opt_federov_dataframe
def generate_valid_sample(self, sample_size): search_space_dataframe = {} for n in self.axis_names: search_space_dataframe[n] = [] search_space = {} evaluated = 0 info( "Generating valid search space of size {0} (does not spend evaluations)" .format(sample_size)) while len(search_space) < sample_size: candidate_point = self.getRandomCoord() candidate_point_key = str(candidate_point) evaluated += 1 if candidate_point_key not in search_space: perf_params = self.coordToPerfParams(candidate_point) is_valid = eval(self.constraint, copy.copy(perf_params), dict(self.input_params)) if is_valid: search_space[candidate_point_key] = candidate_point for n in perf_params: candidate_value = self.parameter_values[n].index( perf_params[n]) search_space_dataframe[n].append(candidate_value) if len(search_space) % int(sample_size / 10) == 0: info("Valid coordinates: " + str(len(search_space)) + "/" + str(sample_size)) info("Tested coordinates: " + str(evaluated)) if evaluated % 1000000 == 0: info("Tested coordinates: " + str(evaluated)) info("Valid/Tested configurations: " + str(len(search_space)) + "/" + str(evaluated)) for k in search_space_dataframe: search_space_dataframe[k] = IntVector(search_space_dataframe[k]) search_space_dataframe_r = DataFrame(search_space_dataframe) search_space_dataframe_r = search_space_dataframe_r.rx( StrVector(self.axis_names)) info("Generated Search Space:") info(str(self.base.summary_default(search_space_dataframe_r))) coded_search_space_dataframe_r = self.encode_data( search_space_dataframe_r) return coded_search_space_dataframe_r
"sqlite:///search_space_{0}.db".format(self.seed_space_size)) for experiment in search_space_database['experiments']: search_space.append(eval(experiment["value"])) info("Starting DOPT-anova") r_search_space = {} for i in range(len(search_space[0])): r_row = [self.dim_uplimits[i] - 1, 0] for col in search_space: r_row.append(col[i]) r_search_space[initial_factors[i]] = IntVector(r_row) data = DataFrame(r_search_space) data = data.rx(StrVector(initial_factors)) self.dopt_anova(initial_factors, initial_inverse_factors, data) sys.exit() perf_cost, mean_perf_cost = self.MAXFLOAT, self.MAXFLOAT params = self.coordToPerfParams(coord) end_time = time.time() search_time = start_time - end_time speedup = float(eval_cost[0]) / float(best_perf_cost) search_time = time.time() - start_time info('----- end random search -----')