def test_merge(): input_shape_1 = (32,) input_shape_2 = (4, 8) block = block_module.Merge() block.set_state(block.get_state()) hp = kerastuner.HyperParameters() block.build(hp, [ak.Input(shape=input_shape_1).build(), ak.Input(shape=input_shape_2).build()]) assert common.name_in_hps('merge_type', hp)
def assemble(inputs, outputs, dataset, seed=None): """Assemble the HyperBlocks based on the dataset and input output nodes. # Arguments inputs: A list of InputNode. The input nodes of the AutoModel. outputs: A list of HyperHead. The heads of the AutoModel. dataset: tf.data.Dataset. The training dataset. seed: Int. Random seed. # Returns A list of HyperNode. The output nodes of the AutoModel. """ inputs = nest.flatten(inputs) outputs = nest.flatten(outputs) assemblers = [] for input_node in inputs: if isinstance(input_node, node.TextInput): assemblers.append(TextAssembler()) if isinstance(input_node, node.ImageInput): assemblers.append(ImageAssembler(seed=seed)) if isinstance(input_node, node.StructuredDataInput): assemblers.append( StructuredDataAssembler(column_names=input_node.column_names, seed=seed)) if isinstance(input_node, node.TimeSeriesInput): assemblers.append(TimeSeriesAssembler()) # Iterate over the dataset to fit the assemblers. hps = [] for x, _ in dataset: for temp_x, assembler in zip(x, assemblers): assembler.update(temp_x) hps += assembler.hps # Assemble the model with assemblers. middle_nodes = [] for input_node, assembler in zip(inputs, assemblers): middle_nodes.append(assembler.assemble(input_node)) # Merge the middle nodes. if len(middle_nodes) > 1: output_node = block.Merge()(middle_nodes) else: output_node = middle_nodes[0] outputs = nest.flatten( [output_blocks(output_node) for output_blocks in outputs]) hm = graph.GraphHyperModel(inputs, outputs) hm.set_hps(hps) return hm
def assemble(inputs, outputs, dataset): """Assemble the HyperBlocks based on the dataset and input output nodes. # Arguments inputs: A list of InputNode. The input nodes of the AutoModel. outputs: A list of HyperHead. The heads of the AutoModel. dataset: tf.data.Dataset. The training dataset. # Returns A list of HyperNode. The output nodes of the AutoModel. """ inputs = nest.flatten(inputs) outputs = nest.flatten(outputs) assemblers = [] for input_node in inputs: if isinstance(input_node, node.TextInput): assemblers.append(TextAssembler()) if isinstance(input_node, node.ImageInput): assemblers.append(ImageAssembler()) if isinstance(input_node, node.StructuredInput): assemblers.append(StructuredDataAssembler()) if isinstance(input_node, node.TimeSeriesInput): assemblers.append(TimeSeriesAssembler()) # Iterate over the dataset to fit the assemblers. for x, _ in dataset: for temp_x, assembler in zip(x, assemblers): assembler.update(temp_x) # Assemble the model with assemblers. middle_nodes = [] for input_node, assembler in zip(inputs, assemblers): middle_nodes.append(assembler.assemble(input_node)) # Merge the middle nodes. if len(middle_nodes) > 1: output_node = block.Merge()(middle_nodes) else: output_node = middle_nodes[0] return nest.flatten( [output_blocks(output_node) for output_blocks in outputs])