def run(self): """Fine-tune Squeezenet model and record patched versions.""" network = self.load_network("squeezenet") assert not isinstance(network.layers[-1], ReluLayer) # Add a normalize layer to the start to take the images to the # Squeezenet format. normalize = NormalizeLayer(means=np.array([0.485, 0.456, 0.406]), standard_deviations=np.array( [0.229, 0.224, 0.225])) network = Network([normalize] + network.layers) # Get the trainset and record it. train_inputs, train_labels = self.get_train(n_labels=9) sorted_labels = sorted(set(train_labels)) train_labels = list(map(sorted_labels.index, train_labels)) self.record_artifact(train_inputs, f"train_inputs", "pickle") self.record_artifact(sorted_labels, f"sorted_labels", "pickle") self.record_artifact(train_labels, f"train_labels", "pickle") # Add a final layer which maps it into the subset of classes # considered. final_weights = np.zeros((1000, len(sorted_labels))) final_biases = np.zeros(len(sorted_labels)) for new_label, old_label in enumerate(sorted_labels): final_weights[old_label, new_label] = 1. final_layer = FullyConnectedLayer(final_weights, final_biases) network = Network(network.layers + [final_layer]) # Record the network before patching. self.record_artifact(network, f"pre_patching", "network") which_params = int(input("Which fine-tuning params? (1 or 2): ")) assert which_params in {1, 2} n_rows = int( input("How many rows of Table 1 to generate (1, 2, 3, or 4): ")) for n_points in [100, 200, 400, 800][:n_rows]: print("~~~~", "Points:", n_points, "~~~~") key = f"{n_points}_-1" patcher = FTRepair(network, train_inputs[:n_points], train_labels[:n_points]) patcher.lr = 0.0001 patcher.momentum = 0.0 # This is just a maximum epoch timeout, it will stop once the # constraints are met. patcher.epochs = 500 if which_params == 1: patcher.batch_size = 2 else: patcher.batch_size = 16 patched = patcher.compute() self.record_artifact(patcher.timing, f"{key}/timing", "pickle") self.record_artifact( patched, f"{key}/patched", "network" if patched is not None else "pickle")
def run(self): """Repair Squeezenet model and record patched versions.""" network = self.load_network("squeezenet") assert not isinstance(network.layers[-1], ReluLayer) # Add a normalize layer to the start to take the images to the # Squeezenet format. normalize = NormalizeLayer(means=np.array([0.485, 0.456, 0.406]), standard_deviations=np.array( [0.229, 0.224, 0.225])) network = Network([normalize] + network.layers) # Get the trainset and record it. train_inputs, train_labels = self.get_train(n_labels=9) sorted_labels = sorted(set(train_labels)) train_labels = list(map(sorted_labels.index, train_labels)) self.record_artifact(train_inputs, f"train_inputs", "pickle") self.record_artifact(sorted_labels, f"sorted_labels", "pickle") self.record_artifact(train_labels, f"train_labels", "pickle") # Add a final layer which maps it into the subset of classes # considered. final_weights = np.zeros((1000, len(sorted_labels))) final_biases = np.zeros(len(sorted_labels)) for new_label, old_label in enumerate(sorted_labels): final_weights[old_label, new_label] = 1. final_layer = FullyConnectedLayer(final_weights, final_biases) network = Network(network.layers + [final_layer]) # Record the network before patching. self.record_artifact(network, f"pre_patching", "network") # All the layers we can patch. patchable = [ i for i, layer in enumerate(network.layers) if isinstance(layer, (FullyConnectedLayer, Conv2DLayer)) ] n_rows = int( input("How many rows of Table 1 to generate (1, 2, 3, or 4): ")) for n_points in [100, 200, 400, 800][:n_rows]: print("~~~~", "Points:", n_points, "~~~~") for layer in patchable: print("::::", "Layer:", layer, "::::") key = f"{n_points}_{layer}" patcher = ProvableRepair(network, layer, train_inputs[:n_points], train_labels[:n_points]) patcher.batch_size = 8 patcher.gurobi_timelimit = (n_points // 10) * 60 patcher.gurobi_crossover = 0 patched = patcher.compute() self.record_artifact(patcher.timing, f"{key}/timing", "pickle") self.record_artifact( patched, f"{key}/patched", "ddnn" if patched is not None else "pickle")
def patchable_network(cls): """Returns the network used in the patching section. """ A1 = np.array([[-1., 1.], [1., 0.], [0., 1.]]).T b1 = np.array([-0.5, 0., 0.]) A2 = np.array([[1., 1., 1.], [0., -1., -1.]]).T b2 = np.array([0., 1.]) return Network([ FullyConnectedLayer(A1, b1), ReluLayer(), FullyConnectedLayer(A2, b2) ])
def run(self): """Runs the corruption-patching experiment.""" network = self.load_network("mnist_relu_3_100") assert isinstance(network.layers[-1], ReluLayer) network = Network(network.layers[:-1]) self.record_artifact(network, "original", "network") n_rows = int( input("How many rows of Table 2 to generate (1, 2, 3, or 4): ")) for n_lines in [10, 25, 50, 100][:n_rows]: print(f"Running with {n_lines} lines") self.run_for(network, n_lines)
def load_network(network_name, maxify_acas=True): """Loads an experiment network given by @network_name. Currently supports models of the form: - acas_#_# (ACAS Xu models translated from the ReluPlex format) - {cifar10,mnist}_relu_#_# (fully-connected ReLU models from ERAN) - {cifar10,mnist}_relu_conv{small,medium,big}{_diffai,_pgd} (convolutional ReLU models from ERAN). And should be referenced in BUILD rule experiments:models. maxify_acas controlls whether the ACAS model is "cleaned" before returned; cleaning removes the unnecessary ReLU layer at the end as well as inverts the outputs so the recommended action becomes the maximal score. """ if "acas_" in network_name: _, i, j = network_name.split("_") network = Network.from_file("experiments/models/acas_%s_%s.eran" % (i, j)) if maxify_acas: # We remove ReLU layers from the end of the model as they don't # actually change the classification (when one exists). assert not hasattr(network.layers[:-1], "weights") network.layers = network.layers[:-1] # ACAS Xu networks use the minimal score as the class instead # of the more-standard maximum score; this inverts the last # layer so the minimal score becomes the max. network.layers[-1].weights *= -1.0 network.layers[-1].biases *= -1.0 return network if "squeezenet" in network_name: return Network.from_file( "external/onnx_squeezenet/squeezenet1.1.onnx") return Network.from_file("external/%s_model/file/model.eran" % (network_name))
def read_artifact(self, key): """Reads an artifact from the loaded artifact store indexed by @key. Experiment.open() *MUST* be called before using read_artifact(...). This method is intended to be used only by the analyze() method (not run, which should be calling record_artifact). """ assert self.artifacts is not None try: artifact = next(artifact for artifact in self.artifacts if artifact["key"] == key) except StopIteration: raise KeyError def read_pb(path, pb_type): """Deserializes protobuf data stored to a file. @path is the file path, @pb_type is the Protobuf descriptor to parse as. """ with open(path, "rb") as from_file: string_rep = from_file.read() serialized = pb_type() serialized.ParseFromString(string_rep) return serialized if artifact["type"] == "rgb_image": return imageio.imread(artifact["path"]) if artifact["type"] == "np_array": return np.load(artifact["path"], allow_pickle=True) if artifact["type"] == "pickle": with open(artifact["path"], "rb") as from_file: return pickle.load(from_file) if artifact["type"] == "rawpath": return artifact["path"] if artifact["type"] == "csv": return self.read_csv(artifact["path"]) if artifact["type"] == "network": return Network.deserialize( read_pb(artifact["path"], syrenn_pb.Network)) if artifact["type"] == "masking_network": return MaskingNetwork.deserialize( read_pb(artifact["path"], syrenn_pb.MaskingNetwork)) raise NotImplementedError
def habitability_network(cls, params=False): """Returns the habitability network from the overview. If @params=True, returns a list of the parameters of the network. This option is used to linearize the network around a point in .linearize(), which is in turn used to explicitly state the maps in LaTeX. """ A1 = np.array([[-1.0, 0.25, 1], [+1.0, 0.5, 1], [0, 1, 0], [0.5, 0.5, 2]]).T b1 = np.array([1, -1, -1, -5]) A2 = np.array([[-2, 1.0, 1.0, 1], [1.0, 2.0, -1.0, 2]]).T b2 = np.array([1, 0]) if params: return [A1, b1, A2, b2] return Network([ FullyConnectedLayer(A1, b1), ReluLayer(), FullyConnectedLayer(A2, b2) ])