def find_important_dimensions(self, poly1, poly2): '''assuming check_contained(poly1,poly2) returns true, we are interested in the halfspaces that matters poly1 = root, poly2 = candidate ''' # #Binary Space Partitioning gurobi_model = grb.Model() gurobi_model.setParam('OutputFlag', self.output_flag) input1 = Experiment.generate_input_region(gurobi_model, self.analysis_template, poly1, self.env_input_size) relevant_directions = [] for j, template in enumerate(self.analysis_template): multiplication = 0 for i in range(self.env_input_size): multiplication += template[i] * input1[i] previous_constraint = gurobi_model.getConstrByName("check_contained_constraint") if previous_constraint is not None: gurobi_model.remove(previous_constraint) gurobi_model.update() gurobi_model.addConstr(multiplication <= poly2[j], name=f"check_contained_constraint") gurobi_model.update() x_results = self.optimise(self.analysis_template, gurobi_model, input1) if np.allclose(np.array(poly1), x_results) is False: vertices = np.stack(self.pypoman_compute_polytope_vertices(self.analysis_template, np.array(x_results))) samples = polytope.sample(1000, self.analysis_template, x_results) from scipy.spatial import ConvexHull hull = ConvexHull(samples) volume = hull.volume # estimated volume relevant_directions.append((j, volume)) return relevant_directions
def find_direction_split(self, template, x, nn, pre_nn): samples = polytope.sample(10000, template, np.array(x)) preprocessed = pre_nn(torch.tensor(samples).float()) preprocessed_np = preprocessed.detach().numpy() samples_ontput = torch.softmax(nn(preprocessed), 1) predicted_label = samples_ontput.detach().numpy()[:, 0] y = np.clip(predicted_label, 1e-7, 1 - 1e-7) inv_sig_y = np.log(y / (1 - y)) # transform to log-odds-ratio space from sklearn.linear_model import LinearRegression lr = LinearRegression() lr.fit(samples, inv_sig_y) template_2d: np.ndarray = np.array([Experiment.e(3, 2), Experiment.e(3, 0) - Experiment.e(3, 1)]) def sigmoid(x): ex = np.exp(x) return ex / (1 + ex) preds = sigmoid(lr.predict(samples)) plot_points_and_prediction(samples @ template_2d.T, preds) plot_points_and_prediction(samples @ template_2d.T, predicted_label) coeff = lr.coef_ intercept = lr.intercept_ a = sympy.symbols('x') b = sympy.symbols('y') classif_line1 = Line(coeff[0].item() * a + coeff[1].item() * b + intercept) new_coeff = -coeff[0].item() / coeff[1].item()
def sample_probabilities(self, template, x, nn, pre_nn): samples = polytope.sample(10000, template, np.array(x)) preprocessed = pre_nn(torch.tensor(samples).float()) samples_ontput = torch.softmax(nn(preprocessed), 1) predicted_label = samples_ontput.detach().numpy()[:, 0] min_prob = np.min(samples_ontput.detach().numpy(), 0) max_prob = np.max(samples_ontput.detach().numpy(), 0) result = [(min_prob[0], max_prob[0]), (min_prob[1], max_prob[1])] return result
def sample_probabilities(self, template, x, nn, pre_nn): samples = polytope.sample(10000, template, np.array(x)) preprocessed = pre_nn(torch.tensor(samples).float()) samples_ontput = nn(preprocessed) if self.use_softmax: samples_ontput = torch.softmax(samples_ontput, 1) predicted_label = samples_ontput.detach().numpy()[:, 0] min_prob = np.min(samples_ontput.detach().numpy(), 0) max_prob = np.max(samples_ontput.detach().numpy(), 0) result = list(zip(min_prob, max_prob)) return result
def sample_and_split(pre_nn, nn, template, boundaries, env_input_size, template_2d, action=0, minimum_length=0.1, use_softmax=True): # print("Performing split...", "") repeat = True samples = None while repeat: repeat = False try: samples = polytope.sample(10000, template, boundaries) # samples = sample_polyhedron(template, boundaries, 5000) except Exception as e: print("Warning: error during the sampling") repeat = True preprocessed = pre_nn(torch.tensor(samples).float()) samples_ontput = nn(preprocessed) if use_softmax: samples_ontput = torch.softmax(samples_ontput, 1) predicted_label = samples_ontput.detach().numpy()[:, action] # template_2d: np.ndarray = np.array([Experiment.e(env_input_size, 2), Experiment.e(env_input_size, 0) - Experiment.e(env_input_size, 1)]) at_least_one_valid_dimension = False dimension_lengths = [] for i, dimension in enumerate(template): inverted_dimension = find_inverted_dimension(-dimension, template) dimension_length = boundaries[i] + boundaries[inverted_dimension] dimension_lengths.append(dimension_length) if dimension_length > minimum_length: at_least_one_valid_dimension = True if at_least_one_valid_dimension: chosen_dimension, decision_point = find_dimension_split3( samples, predicted_label, template, template_2d, dimension_lengths, minimum_length) if decision_point is not None: split1, split2 = split_polyhedron_milp(template, boundaries, chosen_dimension, decision_point) return split1, split2 else: raise Exception( "could not find a split that satisfy the minimum length, consider increasing minimum_length parameter" ) # print("done") # plot_points_and_prediction(samples@template_2d.T, predicted_label) # show_polygons(template, [split1, split2], template_2d) else: raise Exception( "could not find a split that satisfy the minimum length, consider increasing minimum_length parameter" )
def plot_2d_sample(self, sample_size=10000): root = self.generate_root_polytope(self.input_boundaries) pre_nn = self.get_pre_nn() nn = self.get_nn() samples = polytope.sample(sample_size, self.analysis_template, np.array(root)) preprocessed = pre_nn(torch.tensor(samples).float()) samples_ontput = nn(preprocessed) if self.use_softmax: samples_ontput = torch.softmax(samples_ontput, 1) predicted_label = samples_ontput.detach().numpy()[:, 0] plot_points_and_prediction(samples @ self.template_2d.T, predicted_label) print("plot done")
config = get_PPO_config(1234, use_gpu=0) trainer = ppo.PPOTrainer(config=config) trainer.restore(nn_path) policy = trainer.get_policy() sequential_nn = convert_ray_policy_to_sequential(policy).cpu() layers = [] for l in sequential_nn: layers.append(l) nn = torch.nn.Sequential(*layers) horizon = 10 gateway = JavaGateway(auto_field=True) mc = gateway.jvm.explicit.MDPModelChecker(None) analysis_template = Experiment.box(2) boundaries = [6, -5, 1, 1] samples = polytope.sample(2000, analysis_template, np.array(boundaries, dtype=float)) point_probabilities = [] for i, point in enumerate(samples): # generate prism graph frontier = [(0, point)] root = point graph = networkx.DiGraph() widgets = [ progressbar.Variable('frontier'), ", ", progressbar.Variable('max_t'), ", ", progressbar.widgets.Timer() ] # with progressbar.ProgressBar(widgets=widgets) as bar_main: while len(frontier) != 0: t, state = heapq.heappop(frontier)