def __init__(self, input_shape, output_shape, batch_size=None, *args, **kwargs): super().__init__( seed = kwargs.get('seed', None) ) if type(input_shape) is tuple: # we have only one input tensor here op = Tensor( keras.layers.Input(input_shape, name="input_0", batch_size=batch_size) ) self.input_nodes = [ConstantNode(op=op, name="Input_0")] elif type(input_shape) is list and all( map(lambda x: type(x) is tuple, input_shape) ): # we have a list of input tensors here self.input_nodes = list() for i in range(len(input_shape)): batch_size = batch_size[i] if type(batch_size) is list else None op = Tensor( keras.layers.Input( input_shape[i], name=f"input_{i}", batch_size=batch_size ) ) inode = ConstantNode(op=op, name=f"Input_{i}") self.input_nodes.append(inode) else: raise InputShapeOfWrongType(input_shape) for node in self.input_nodes: self.graph.add_node(node) self.output_shape = output_shape self.output_node = None self._model = None
def build_sub_graph(self, input_, num_layers=3): source = prev_input = input_ # look over skip connections within a range of the 3 previous nodes anchor_points = collections.deque([source], maxlen=3) for _ in range(num_layers): vnode = VariableNode() self.add_dense_to_(vnode) self.ss.connect(prev_input, vnode) # * Cell output cell_output = vnode cmerge = ConstantNode() cmerge.set_op( AddByProjecting(self.ss, [cell_output], activation="relu")) for anchor in anchor_points: skipco = VariableNode() skipco.add_op(Zero()) skipco.add_op(Connect(self.ss, anchor)) self.ss.connect(skipco, cmerge) prev_input = cmerge # ! for next iter anchor_points.append(prev_input) return prev_input
def create_search_space( input_shape=(10,), output_shape=(7,), num_layers=10, *args, **kwargs ): arch = AutoKSearchSpace(input_shape, output_shape, regression=True) source = prev_input = arch.input_nodes[0] # look over skip connections within a range of the 3 previous nodes anchor_points = collections.deque([source], maxlen=3) for _ in range(num_layers): vnode = VariableNode() add_dense_to_(vnode) arch.connect(prev_input, vnode) # * Cell output cell_output = vnode cmerge = ConstantNode() cmerge.set_op(AddByProjecting(arch, [cell_output], activation="relu")) for anchor in anchor_points: skipco = VariableNode() skipco.add_op(Tensor([])) skipco.add_op(Connect(arch, anchor)) arch.connect(skipco, cmerge) # ! for next iter prev_input = cmerge anchor_points.append(prev_input) return arch
def create_conv_lstm_search_space( input_shape=(7, 808, 782, 1), output_shape=(7, 808, 782, 1), num_layers=10, *args, **kwargs, ): arch = KSearchSpace(input_shape, output_shape) source = prev_input = arch.input_nodes[0] # look over skip connections within a range of the 3 previous nodes anchor_points = collections.deque([source], maxlen=3) for _ in range(num_layers): vnode = VariableNode() add_convlstm_to_(vnode) arch.connect(prev_input, vnode) # * Cell output cell_output = vnode cmerge = ConstantNode() cmerge.set_op( AddByProjecting(arch, [cell_output], activation="relu", axis=-2)) for anchor in anchor_points: skipco = VariableNode() skipco.add_op(Tensor([])) skipco.add_op(Connect(arch, anchor)) arch.connect(skipco, cmerge) # ! for next iter prev_input = cmerge anchor_points.append(prev_input) # Add layer to enforce consistency cnode = ConstantNode() units = output_shape[-1] add_convlstm_oplayer_(cnode, units) arch.connect(prev_input, cnode) return arch
def build_sub_graph(self, input_, num_layers=3): source = prev_input = input_ mirror = False is_input = False if type(source) is ConstantNode: if type(source._op) is Tensor: if "input_" in source._op.tensor.name: is_input = True input_name = source._op.tensor.name input_shape = tuple(source._op.tensor.shape[1:]) if self.shapes_to_vnodes.get(input_shape) is None: self.shapes_to_vnodes[input_shape] = [] else: mirror = True memory = self.shapes_to_vnodes[input_shape][::-1] # look over skip connections within a range of the 3 previous nodes anchor_points = collections.deque([source], maxlen=3) for layer_i in range(num_layers): if not(mirror): vnode = VariableNode() self.add_dense_to_(vnode) if is_input: self.shapes_to_vnodes[input_shape].append(vnode) else: vnode = MirrorNode(memory.pop()) self.ss.connect(prev_input, vnode) # * Cell output prev_node = vnode if layer_i == num_layers-1: return prev_node cmerge = ConstantNode() cmerge.set_op(Concatenate(self.ss, [prev_node])) for anchor in anchor_points: if not(mirror): skipco = VariableNode() if is_input: self.shapes_to_vnodes[input_shape].append(skipco) else: skipco = MimeNode(memory.pop()) skipco.add_op(Zero()) skipco.add_op(Connect(self.ss, anchor)) self.ss.connect(skipco, cmerge) prev_input = cmerge # ! for next iter anchor_points.append(prev_input) return prev_input
def build(self, input_shape, output_shape, units=[128, 64, 32, 16, 8, 16, 32, 64, 128], num_layers=5, **kwargs): ss = KSearchSpace(input_shape, output_shape) inp = ss.input_nodes[0] # auto-encoder units = [128, 64, 32, 16, 8, 16, 32, 64, 128] prev_node = inp d = 1 for i in range(len(units)): vnode = VariableNode() vnode.add_op(Identity()) if d == 1 and units[i] < units[i + 1]: d = -1 for u in range(min(2, units[i]), max(2, units[i]) + 1, 2): vnode.add_op(Dense(u, tf.nn.relu)) latente_space = vnode else: for u in range(min(units[i], units[i + d]), max(units[i], units[i + d]) + 1, 2): vnode.add_op(Dense(u, tf.nn.relu)) ss.connect(prev_node, vnode) prev_node = vnode out2 = ConstantNode(op=Dense(output_shape[0][0], name="output_0")) ss.connect(prev_node, out2) # regressor prev_node = latente_space # prev_node = inp for _ in range(num_layers): vnode = VariableNode() for i in range(16, 129, 16): vnode.add_op(Dense(i, tf.nn.relu)) ss.connect(prev_node, vnode) prev_node = vnode out1 = ConstantNode(op=Dense(output_shape[1][0], name="output_1")) ss.connect(prev_node, out1) return ss
def build( self, input_shape, output_shape, regression=True, num_layers=10, dropout=0.0, **kwargs, ): ss = AutoKSearchSpace(input_shape, output_shape, regression=regression) source = prev_input = ss.input_nodes[0] # look over skip connections within a range of the 3 previous nodes anchor_points = collections.deque([source], maxlen=3) for _ in range(num_layers): vnode = VariableNode() self.add_dense_to_(vnode) ss.connect(prev_input, vnode) # * Cell output cell_output = vnode cmerge = ConstantNode() cmerge.set_op(AddByProjecting(ss, [cell_output], activation="relu")) for anchor in anchor_points: skipco = VariableNode() skipco.add_op(Zero()) skipco.add_op(Connect(ss, anchor)) ss.connect(skipco, cmerge) prev_input = cmerge # ! for next iter anchor_points.append(prev_input) if dropout >= 0.0: dropout_node = ConstantNode(op=Dropout(rate=dropout)) ss.connect(prev_input, dropout_node) return ss
def generate_cell(ss, hidden_states, num_blocks=5, strides=1, mime=False): anchor_points = [h for h in hidden_states] boutputs = [] for _ in range(num_blocks): bout = generate_block(ss, anchor_points, strides=1, mime=mime) anchor_points.append(bout) boutputs.append(bout) concat = ConstantNode(op=Concatenate(ss, boutputs, not_connected=True)) return concat
def test_create_multiple_inputs_with_one_vnode(self): from deephyper.nas.space import KSearchSpace from deephyper.nas.space.node import VariableNode, ConstantNode from deephyper.nas.space.op.op1d import Dense from deephyper.nas.space.op.merge import Concatenate struct = KSearchSpace([(5, ), (5, )], (1, )) merge = ConstantNode() merge.set_op(Concatenate(struct, struct.input_nodes)) vnode1 = VariableNode() struct.connect(merge, vnode1) vnode1.add_op(Dense(1)) struct.set_ops([0]) struct.create_model()
def create_structure(input_shape=(2, ), output_shape=(1, ), *args, **kwargs): struct = AutoKSearchSpace(input_shape, output_shape, regression=False) n1 = VariableNode('N') add_conv_op_(n1) struct.connect(struct.input_nodes[0], n1) n2 = VariableNode('N') add_activation_op_(n2) struct.connect(n1, n2) n3 = VariableNode('N') add_pooling_op_(n3) struct.connect(n2, n3) n4 = VariableNode('N') add_conv_op_(n4) struct.connect(n3, n4) n5 = VariableNode('N') add_activation_op_(n5) struct.connect(n4, n5) n6 = VariableNode('N') add_pooling_op_(n6) struct.connect(n5, n6) n7 = ConstantNode(op=Flatten(), name='N') struct.connect(n6, n7) n8 = VariableNode('N') add_dense_op_(n8) struct.connect(n7, n8) n9 = VariableNode('N') add_activation_op_(n9) struct.connect(n8, n9) n10 = VariableNode('N') add_dropout_op_(n10) struct.connect(n9, n10) n11 = VariableNode('N') add_dense_op_(n11) struct.connect(n10, n11) n12 = VariableNode('N') add_activation_op_(n12) struct.connect(n11, n12) n13 = VariableNode('N') add_dropout_op_(n13) struct.connect(n12, n13) return struct
def create_structure(input_shape=[(1, ), (942, ), (5270, ), (2048, )], output_shape=(1,), num_cells=2, *args, **kwargs): struct = AutoKSearchSpace(input_shape, output_shape, regression=True) input_nodes = struct.input_nodes output_submodels = [input_nodes[0]] for i in range(1, 4): vnode1 = VariableNode('N1') add_mlp_op_(vnode1) struct.connect(input_nodes[i], vnode1) vnode2 = VariableNode('N2') add_mlp_op_(vnode2) struct.connect(vnode1, vnode2) vnode3 = VariableNode('N3') add_mlp_op_(vnode3) struct.connect(vnode2, vnode3) output_submodels.append(vnode3) merge1 = ConstantNode(name='Merge', op=Concatenate(struct, output_submodels)) # merge1.set_op(Concatenate(struct, merge1, output_submodels)) vnode4 = VariableNode('N4') add_mlp_op_(vnode4) struct.connect(merge1, vnode4) prev = vnode4 for i in range(num_cells): vnode = VariableNode(f'N{i+1}') add_mlp_op_(vnode) struct.connect(prev, vnode) merge = ConstantNode(name='Merge', op=AddByPadding(struct, [vnode, prev])) # merge.set_op() prev = merge return struct
def generate_block(ss, anchor_points, strides=1, mime=False, first=False, num_filters=8): # generate block n1 = generate_conv_node(strides=strides, mime=mime, first=first, num_filters=num_filters) n2 = generate_conv_node(strides=strides, mime=mime, num_filters=num_filters) add = ConstantNode(op=AddByPadding(ss, [n1, n2], activation=None)) if first: source = anchor_points[-1] ss.connect(source, n1) if mime: if strides > 1: if not first: src_node = next(cycle_reduction_nodes) skipco1 = MimeNode(src_node, name="SkipCo1") src_node = next(cycle_reduction_nodes) skipco2 = MimeNode(src_node, name="SkipCo2") else: if not first: src_node = next(cycle_normal_nodes) skipco1 = MimeNode(src_node, name="SkipCo1") src_node = next(cycle_normal_nodes) skipco2 = MimeNode(src_node, name="SkipCo2") else: if not first: skipco1 = VariableNode(name="SkipCo1") skipco2 = VariableNode(name="SkipCo2") if strides > 1: if not first: reduction_nodes.append(skipco1) reduction_nodes.append(skipco2) else: if not first: normal_nodes.append(skipco1) normal_nodes.append(skipco2) for anchor in anchor_points: if not first: skipco1.add_op(Connect(ss, anchor)) ss.connect(skipco1, n1) skipco2.add_op(Connect(ss, anchor)) ss.connect(skipco2, n2) return add
def build(self, input_shape, output_shape, regression=True, **kwargs): ss = AutoKSearchSpace(input_shape, output_shape, regression=regression) if type(input_shape) is list: vnodes = [] for i in range(len(input_shape)): vn = self.gen_vnode() vnodes.append(vn) ss.connect(ss.input_nodes[i], vn) cn = ConstantNode() cn.set_op(Concatenate(ss, vnodes)) vn = self.gen_vnode() ss.connect(cn, vn) else: vnode1 = self.gen_vnode() ss.connect(ss.input_nodes[0], vnode1) return ss
def build( self, input_shape, output_shape, regression=True, num_layers=10, **kwargs, ): self.ss = AutoKSearchSpace(input_shape, output_shape, regression=regression) self.shapes_to_vnodes = {} sub_graphs_outputs = [] for input_ in self.ss.input_nodes: output_sub_graph = self.build_sub_graph(input_) sub_graphs_outputs.append(output_sub_graph) cmerge = ConstantNode() cmerge.set_op(Concatenate(self.ss, sub_graphs_outputs)) output_sub_graph = self.build_sub_graph(cmerge) return self.ss
def generate_cell(ss, hidden_states, num_blocks=5, strides=1, mime=False, num_filters=8): anchor_points = [h for h in hidden_states] boutputs = [] for i in range(num_blocks): bout = generate_block(ss, anchor_points, strides=1, mime=mime, first=i == 0, num_filters=num_filters) anchor_points.append(bout) boutputs.append(bout) concat = ConstantNode(op=Concatenate(ss, boutputs)) return concat
def create_search_space( input_shape=(20, ), output_shape=(20, ), num_layers=5, *args, **kwargs): vocab_size = 10000 ss = KSearchSpace(input_shape, (*output_shape, vocab_size)) source = ss.input_nodes[0] emb = VariableNode() add_embedding_(emb, vocab_size) ss.connect(source, emb) timestep_dropout = prev_input = ConstantNode(op=TimestepDropout(rate=0.1)) ss.connect(emb, timestep_dropout) # look over skip connections within a range of the 2 previous nodes anchor_points = collections.deque([timestep_dropout], maxlen=3) for _ in range(num_layers): vnode = VariableNode() add_lstm_seq_(vnode) ss.connect(prev_input, vnode) # * Cell output cell_output = vnode cmerge = ConstantNode() cmerge.set_op(AddByProjecting(ss, [cell_output], activation="relu")) for anchor in anchor_points: skipco = VariableNode() skipco.add_op(Zero()) skipco.add_op(Connect(ss, anchor)) ss.connect(skipco, cmerge) # ! for next iter prev_input = cmerge anchor_points.append(prev_input) # out = ConstantNode( # op=tf.keras.layers.TimeDistributed( # tf.keras.layers.Dense(units=vocab_size, activation="softmax") # ) # ) out = ConstantNode( op=tf.keras.layers.Dense(units=vocab_size, activation="softmax")) ss.connect(prev_input, out) return ss
def create_structure(input_shape=(2,), output_shape=(1,), *args, **kwargs): struct = AutoKSearchSpace(input_shape, output_shape, regression=False) n1 = ConstantNode(op=Conv1D(filter_size=20, num_filters=128), name='N') struct.connect(struct.input_nodes[0], n1) n2 = ConstantNode(op=Activation(activation='relu'), name='N') struct.connect(n1, n2) n3 = ConstantNode(op=MaxPooling1D(pool_size=1, padding='same'), name='N') struct.connect(n2, n3) n4 = ConstantNode(op=Conv1D(filter_size=10, num_filters=128),name='N') struct.connect(n3, n4) n5 = ConstantNode(op=Activation(activation='relu'), name='N') struct.connect(n4, n5) n6 = ConstantNode(op=MaxPooling1D(pool_size=10, padding='same'), name='N') struct.connect(n5, n6) n7 = ConstantNode(op=Flatten(), name='N') struct.connect(n6, n7) n8 = ConstantNode(op=Dense(units=200), name='N') struct.connect(n7, n8) n9 = ConstantNode(op=Activation(activation='relu'), name='N') struct.connect(n8, n9) n10 = ConstantNode(op=Dropout(rate=0.1), name='N') struct.connect(n9, n10) n11 = ConstantNode(op=Dense(units=20), name='N') struct.connect(n10, n11) n12 = ConstantNode(op=Activation(activation='relu'), name='N') struct.connect(n11, n12) n13 = ConstantNode(op=Dropout(rate=0.1), name='N') struct.connect(n12, n13) return struct
def create_structure(input_shape=[(1, ), (942, ), (5270, ), (2048, )], output_shape=(1, ), num_cells=2, *args, **kwargs): struct = AutoKSearchSpace(input_shape, output_shape, regression=True) input_nodes = struct.input_nodes output_submodels = [input_nodes[0]] for i in range(1, 4): cnode1 = ConstantNode(name='N', op=Dense(1000, tf.nn.relu)) struct.connect(input_nodes[i], cnode1) cnode2 = ConstantNode(name='N', op=Dense(1000, tf.nn.relu)) struct.connect(cnode1, cnode2) vnode1 = VariableNode(name='N3') add_mlp_op_(vnode1) struct.connect(cnode2, vnode1) output_submodels.append(vnode1) merge1 = ConstantNode(name='Merge') # merge1.set_op(Concatenate(struct, merge1, output_submodels)) merge1.set_op(Concatenate(struct, output_submodels)) cnode4 = ConstantNode(name='N', op=Dense(1000, tf.nn.relu)) struct.connect(merge1, cnode4) prev = cnode4 for i in range(num_cells): cnode = ConstantNode(name='N', op=Dense(1000, tf.nn.relu)) struct.connect(prev, cnode) merge = ConstantNode(name='Merge') # merge.set_op(AddByPadding(struct, merge, [cnode, prev])) merge.set_op(AddByPadding(struct, [cnode, prev])) prev = merge return struct