def create_search_space( input_shape=(10,), output_shape=(7,), num_layers=10, *args, **kwargs ): arch = AutoKSearchSpace(input_shape, output_shape, regression=True) source = prev_input = arch.input_nodes[0] # look over skip connections within a range of the 3 previous nodes anchor_points = collections.deque([source], maxlen=3) for _ in range(num_layers): vnode = VariableNode() add_dense_to_(vnode) arch.connect(prev_input, vnode) # * Cell output cell_output = vnode cmerge = ConstantNode() cmerge.set_op(AddByProjecting(arch, [cell_output], activation="relu")) for anchor in anchor_points: skipco = VariableNode() skipco.add_op(Tensor([])) skipco.add_op(Connect(arch, anchor)) arch.connect(skipco, cmerge) # ! for next iter prev_input = cmerge anchor_points.append(prev_input) return arch
def create_conv_lstm_search_space( input_shape=(7, 808, 782, 1), output_shape=(7, 808, 782, 1), num_layers=10, *args, **kwargs, ): arch = KSearchSpace(input_shape, output_shape) source = prev_input = arch.input_nodes[0] # look over skip connections within a range of the 3 previous nodes anchor_points = collections.deque([source], maxlen=3) for _ in range(num_layers): vnode = VariableNode() add_convlstm_to_(vnode) arch.connect(prev_input, vnode) # * Cell output cell_output = vnode cmerge = ConstantNode() cmerge.set_op( AddByProjecting(arch, [cell_output], activation="relu", axis=-2)) for anchor in anchor_points: skipco = VariableNode() skipco.add_op(Tensor([])) skipco.add_op(Connect(arch, anchor)) arch.connect(skipco, cmerge) # ! for next iter prev_input = cmerge anchor_points.append(prev_input) # Add layer to enforce consistency cnode = ConstantNode() units = output_shape[-1] add_convlstm_oplayer_(cnode, units) arch.connect(prev_input, cnode) return arch
def build_sub_graph(self, input_, num_layers=3): source = prev_input = input_ # look over skip connections within a range of the 3 previous nodes anchor_points = collections.deque([source], maxlen=3) for _ in range(num_layers): vnode = VariableNode() self.add_dense_to_(vnode) self.ss.connect(prev_input, vnode) # * Cell output cell_output = vnode cmerge = ConstantNode() cmerge.set_op( AddByProjecting(self.ss, [cell_output], activation="relu")) for anchor in anchor_points: skipco = VariableNode() skipco.add_op(Zero()) skipco.add_op(Connect(self.ss, anchor)) self.ss.connect(skipco, cmerge) prev_input = cmerge # ! for next iter anchor_points.append(prev_input) return prev_input
def build( self, input_shape, output_shape, regression=True, num_units=(1, 11), num_layers=10, **kwargs ): """ Args: input_shape (tuple, optional): True shape of inputs (no batch size dimension). Defaults to (2,). output_shape (tuple, optional): True shape of outputs (no batch size dimension).. Defaults to (1,). num_layers (int, optional): Maximum number of layers to have. Defaults to 10. num_units (tuple, optional): Range of number of units such as range(start, end, step_size). Defaults to (1, 11). regression (bool, optional): A boolean defining if the model is a regressor or a classifier. Defaults to True. Returns: AutoKSearchSpace: A search space object based on tf.keras implementations. """ ss = AutoKSearchSpace(input_shape, output_shape, regression=regression) prev_node = ss.input_nodes[0] for _ in range(num_layers): vnode = VariableNode() vnode.add_op(Identity()) for i in range(*num_units): vnode.add_op(Dense(i, tf.nn.relu)) ss.connect(prev_node, vnode) prev_node = vnode return ss
def test_create_more_nodes(self): from deephyper.nas.space import AutoKSearchSpace from deephyper.nas.space.node import VariableNode from deephyper.nas.space.op.op1d import Dense struct = AutoKSearchSpace((5, ), (1, ), regression=True) vnode1 = VariableNode() struct.connect(struct.input_nodes[0], vnode1) vnode1.add_op(Dense(10)) vnode2 = VariableNode() vnode2.add_op(Dense(10)) struct.connect(vnode1, vnode2) struct.set_ops([0, 0]) falias = "test_auto_keras_search_spaceure" struct.draw_graphviz(f"{falias}.dot") model = struct.create_model() from tensorflow.keras.utils import plot_model plot_model(model, to_file=f"{falias}.png", show_shapes=True)
def build_sub_graph(self, input_, num_layers=3): source = prev_input = input_ mirror = False is_input = False if type(source) is ConstantNode: if type(source._op) is Tensor: if "input_" in source._op.tensor.name: is_input = True input_name = source._op.tensor.name input_shape = tuple(source._op.tensor.shape[1:]) if self.shapes_to_vnodes.get(input_shape) is None: self.shapes_to_vnodes[input_shape] = [] else: mirror = True memory = self.shapes_to_vnodes[input_shape][::-1] # look over skip connections within a range of the 3 previous nodes anchor_points = collections.deque([source], maxlen=3) for layer_i in range(num_layers): if not(mirror): vnode = VariableNode() self.add_dense_to_(vnode) if is_input: self.shapes_to_vnodes[input_shape].append(vnode) else: vnode = MirrorNode(memory.pop()) self.ss.connect(prev_input, vnode) # * Cell output prev_node = vnode if layer_i == num_layers-1: return prev_node cmerge = ConstantNode() cmerge.set_op(Concatenate(self.ss, [prev_node])) for anchor in anchor_points: if not(mirror): skipco = VariableNode() if is_input: self.shapes_to_vnodes[input_shape].append(skipco) else: skipco = MimeNode(memory.pop()) skipco.add_op(Zero()) skipco.add_op(Connect(self.ss, anchor)) self.ss.connect(skipco, cmerge) prev_input = cmerge # ! for next iter anchor_points.append(prev_input) return prev_input
def test_create_search_space(input_shape=(2, ), output_shape=(1, ), **kwargs): struct = AutoKSearchSpace(input_shape, output_shape, regression=True) vnode1 = VariableNode() for _ in range(1, 11): vnode1.add_op(Operation(layer=tf.keras.layers.Dense(10))) struct.connect(struct.input_nodes[0], vnode1) struct.set_ops([0]) struct.create_model()
def test_mirror_node(): vnode = VariableNode() vop = Dense(10) vnode.add_op(vop) vnode.add_op(Dense(20)) mnode = MirrorNode(vnode) vnode.set_op(0) assert vnode.op == vop assert mnode.op == vop
def create_search_space( input_shape=(20, ), output_shape=(20, ), num_layers=5, *args, **kwargs): vocab_size = 10000 ss = KSearchSpace(input_shape, (*output_shape, vocab_size)) source = ss.input_nodes[0] emb = VariableNode() add_embedding_(emb, vocab_size) ss.connect(source, emb) timestep_dropout = prev_input = ConstantNode(op=TimestepDropout(rate=0.1)) ss.connect(emb, timestep_dropout) # look over skip connections within a range of the 2 previous nodes anchor_points = collections.deque([timestep_dropout], maxlen=3) for _ in range(num_layers): vnode = VariableNode() add_lstm_seq_(vnode) ss.connect(prev_input, vnode) # * Cell output cell_output = vnode cmerge = ConstantNode() cmerge.set_op(AddByProjecting(ss, [cell_output], activation="relu")) for anchor in anchor_points: skipco = VariableNode() skipco.add_op(Zero()) skipco.add_op(Connect(ss, anchor)) ss.connect(skipco, cmerge) # ! for next iter prev_input = cmerge anchor_points.append(prev_input) # out = ConstantNode( # op=tf.keras.layers.TimeDistributed( # tf.keras.layers.Dense(units=vocab_size, activation="softmax") # ) # ) out = ConstantNode( op=tf.keras.layers.Dense(units=vocab_size, activation="softmax")) ss.connect(prev_input, out) return ss
def test_mime_node(): vnode = VariableNode() vop = Dense(10) vnode.add_op(vop) vnode.add_op(Dense(20)) mnode = MimeNode(vnode) mop = Dense(30) mnode.add_op(mop) mnode.add_op(Dense(40)) vnode.set_op(0) assert vnode.op == vop assert mnode.op == mop
def create_search_space( input_shape=(32, 32, 3), output_shape=(10, ), num_filters=8, num_blocks=4, normal_cells=2, reduction_cells=1, repetitions=3, *args, **kwargs, ): ss = AutoKSearchSpace(input_shape, output_shape, regression=False) source = prev_input = ss.input_nodes[0] # look over skip connections within a range of the 3 previous nodes hidden_states = collections.deque([source, source], maxlen=2) for ri in range(repetitions): for nci in range(normal_cells): # generate a normal cell cout = generate_cell( ss, hidden_states, num_blocks, strides=1, mime=ri + nci > 0, num_filters=num_filters, ) hidden_states.append(cout) if ri < repetitions - 1: # we don't want the last cell to be a reduction cell for rci in range(reduction_cells): # generate a reduction cell cout = generate_cell( ss, hidden_states, num_blocks, strides=2, mime=ri + rci > 0, num_filters=num_filters, ) hidden_states.append(cout) # out_node = ConstantNode(op=Dense(100, activation=tf.nn.relu)) out_dense = VariableNode() out_dense.add_op(Identity()) for units in [10, 20, 50, 100, 200, 500, 1000]: out_dense.add_op(Dense(units, activation=tf.nn.relu)) ss.connect(cout, out_dense) out_dropout = VariableNode() out_dropout.add_op(Identity()) for drop_rate in [0.01, 0.02, 0.05, 0.1, 0.2, 0.5, 0.8]: out_dropout.add_op(Dropout(rate=drop_rate)) ss.connect(out_dense, out_dropout) return ss
def build( self, input_shape, output_shape, regression=True, num_layers=10, dropout=0.0, **kwargs, ): ss = AutoKSearchSpace(input_shape, output_shape, regression=regression) source = prev_input = ss.input_nodes[0] # look over skip connections within a range of the 3 previous nodes anchor_points = collections.deque([source], maxlen=3) for _ in range(num_layers): vnode = VariableNode() self.add_dense_to_(vnode) ss.connect(prev_input, vnode) # * Cell output cell_output = vnode cmerge = ConstantNode() cmerge.set_op(AddByProjecting(ss, [cell_output], activation="relu")) for anchor in anchor_points: skipco = VariableNode() skipco.add_op(Zero()) skipco.add_op(Connect(ss, anchor)) ss.connect(skipco, cmerge) prev_input = cmerge # ! for next iter anchor_points.append(prev_input) if dropout >= 0.0: dropout_node = ConstantNode(op=Dropout(rate=dropout)) ss.connect(prev_input, dropout_node) return ss
def test_create_one_vnode_with_wrong_output_shape(self): from deephyper.nas.space import KSearchSpace struct = KSearchSpace((5, ), (1, )) from deephyper.nas.space.node import VariableNode vnode = VariableNode() struct.connect(struct.input_nodes[0], vnode) from deephyper.nas.space.op.op1d import Dense vnode.add_op(Dense(10)) struct.set_ops([0]) with pytest.raises(WrongOutputShape): struct.create_model()
def test_create_multiple_inputs_with_one_vnode(self): from deephyper.nas.space import KSearchSpace from deephyper.nas.space.node import VariableNode, ConstantNode from deephyper.nas.space.op.op1d import Dense from deephyper.nas.space.op.merge import Concatenate struct = KSearchSpace([(5, ), (5, )], (1, )) merge = ConstantNode() merge.set_op(Concatenate(struct, struct.input_nodes)) vnode1 = VariableNode() struct.connect(merge, vnode1) vnode1.add_op(Dense(1)) struct.set_ops([0]) struct.create_model()
def build(self, input_shape, output_shape, units=[128, 64, 32, 16, 8, 16, 32, 64, 128], num_layers=5, **kwargs): ss = KSearchSpace(input_shape, output_shape) inp = ss.input_nodes[0] # auto-encoder units = [128, 64, 32, 16, 8, 16, 32, 64, 128] prev_node = inp d = 1 for i in range(len(units)): vnode = VariableNode() vnode.add_op(Identity()) if d == 1 and units[i] < units[i + 1]: d = -1 for u in range(min(2, units[i]), max(2, units[i]) + 1, 2): vnode.add_op(Dense(u, tf.nn.relu)) latente_space = vnode else: for u in range(min(units[i], units[i + d]), max(units[i], units[i + d]) + 1, 2): vnode.add_op(Dense(u, tf.nn.relu)) ss.connect(prev_node, vnode) prev_node = vnode out2 = ConstantNode(op=Dense(output_shape[0][0], name="output_0")) ss.connect(prev_node, out2) # regressor prev_node = latente_space # prev_node = inp for _ in range(num_layers): vnode = VariableNode() for i in range(16, 129, 16): vnode.add_op(Dense(i, tf.nn.relu)) ss.connect(prev_node, vnode) prev_node = vnode out1 = ConstantNode(op=Dense(output_shape[1][0], name="output_1")) ss.connect(prev_node, out1) return ss
def test_create_one_vnode(self): from deephyper.nas.space import KSearchSpace struct = KSearchSpace((5, ), (1, )) from deephyper.nas.space.node import VariableNode vnode = VariableNode() struct.connect(struct.input_nodes[0], vnode) from deephyper.nas.space.op.op1d import Dense vnode.add_op(Dense(1)) struct.set_ops([0]) falias = "test_keras_search_spaceure" struct.draw_graphviz(f"{falias}.dot") model = struct.create_model() from tensorflow.keras.utils import plot_model plot_model(model, to_file=f"{falias}.png", show_shapes=True)
def gen_vnode(self) -> VariableNode: vnode = VariableNode() for i in range(1, 11): vnode.add_op(Dense(i, tf.nn.relu)) return vnode