Esempio n. 1
0
def test_graph_save_load(tmp_path):
    input1 = ak.Input()
    input2 = ak.Input()
    output1 = ak.DenseBlock()(input1)
    output2 = ak.ConvBlock()(input2)
    output = ak.Merge()([output1, output2])
    output1 = ak.RegressionHead()(output)
    output2 = ak.ClassificationHead()(output)

    graph = graph_module.Graph(
        inputs=[input1, input2],
        outputs=[output1, output2],
        override_hps=[
            hp_module.Choice("dense_block_1/num_layers", [6], default=6)
        ],
    )
    path = os.path.join(tmp_path, "graph")
    graph.save(path)
    graph = graph_module.load_graph(path)

    assert len(graph.inputs) == 2
    assert len(graph.outputs) == 2
    assert isinstance(graph.inputs[0].out_blocks[0], ak.DenseBlock)
    assert isinstance(graph.inputs[1].out_blocks[0], ak.ConvBlock)
    assert isinstance(graph.override_hps[0], hp_module.Choice)
Esempio n. 2
0
def test_graph_can_init_with_one_missing_output():
    input_node = ak.ImageInput()
    output_node = ak.ConvBlock()(input_node)
    output_node = ak.RegressionHead()(output_node)
    ak.ClassificationHead()(output_node)

    graph_module.Graph(input_node, output_node)
Esempio n. 3
0
    def _assemble(self):
        """Assemble the Blocks based on the input output nodes."""
        inputs = nest.flatten(self.inputs)
        outputs = nest.flatten(self.outputs)

        middle_nodes = []
        for input_node in inputs:
            if isinstance(input_node, input_module.TextInput):
                middle_nodes.append(hypermodels.TextBlock()(input_node))
            if isinstance(input_node, input_module.ImageInput):
                middle_nodes.append(hypermodels.ImageBlock()(input_node))
            if isinstance(input_node, input_module.StructuredDataInput):
                middle_nodes.append(
                    hypermodels.StructuredDataBlock()(input_node))
            if isinstance(input_node, input_module.TimeSeriesInput):
                middle_nodes.append(hypermodels.TimeSeriesBlock()(input_node))

        # Merge the middle nodes.
        if len(middle_nodes) > 1:
            output_node = hypermodels.Merge()(middle_nodes)
        else:
            output_node = middle_nodes[0]

        outputs = nest.flatten(
            [output_blocks(output_node) for output_blocks in outputs])
        return graph_module.Graph(inputs=inputs, outputs=outputs)
Esempio n. 4
0
def test_image_classifier_tuner1():
    tf.keras.backend.clear_session()
    input_node = ak.ImageInput(shape=(32, 32, 3))
    output_node = ak.ImageBlock()(input_node)
    output_node = ak.ClassificationHead(loss='categorical_crossentropy',
                                        output_shape=(10, ))(output_node)
    graph = graph_module.Graph(input_node, output_node)
    check_initial_hp(task_specific.IMAGE_CLASSIFIER[1], graph)
Esempio n. 5
0
def test_cat_to_num_with_img_input_error():
    input_node = ak.ImageInput()
    output_node = ak.CategoricalToNumerical()(input_node)

    with pytest.raises(TypeError) as info:
        graph_module.Graph(input_node, outputs=output_node).compile()

    assert "CategoricalToNumerical can only be used" in str(info.value)
Esempio n. 6
0
def test_text_classifier_tuner0():
    tf.keras.backend.clear_session()
    input_node = ak.TextInput(shape=(1, ))
    output_node = ak.TextBlock()(input_node)
    output_node = ak.ClassificationHead(loss='categorical_crossentropy',
                                        output_shape=(10, ))(output_node)
    graph = graph_module.Graph(input_node, output_node)
    check_initial_hp(task_specific.TEXT_CLASSIFIER[0], graph)
Esempio n. 7
0
def test_graph_compile_with_adadelta():
    input_node = ak.ImageInput(shape=(32, 32, 3))
    output_node = ak.ConvBlock()(input_node)
    output_node = ak.RegressionHead(output_shape=(1, ))(output_node)

    graph = graph_module.Graph(input_node, output_node)
    hp = kerastuner.HyperParameters()
    hp.values = {"optimizer": "adadelta"}
    graph.build(hp)
Esempio n. 8
0
    def _build_graph(self):
        # Using functional API.
        if all([isinstance(output, node_module.Node) for output in self.outputs]):
            graph = graph_module.Graph(inputs=self.inputs, outputs=self.outputs)
        # Using input/output API.
        elif all([isinstance(output, head_module.Head) for output in self.outputs]):
            graph = self._assemble()
            self.outputs = graph.outputs

        return graph
Esempio n. 9
0
def test_graph_basics():
    input_node = ak.Input(shape=(30, ))
    output_node = input_node
    output_node = ak.DenseBlock()(output_node)
    output_node = ak.RegressionHead(output_shape=(1, ))(output_node)

    model = graph_module.Graph(inputs=input_node, outputs=output_node).build(
        kerastuner.HyperParameters())
    assert model.input_shape == (None, 30)
    assert model.output_shape == (None, 1)
Esempio n. 10
0
def test_input_missing():
    input_node1 = ak.Input()
    input_node2 = ak.Input()
    output_node1 = ak.DenseBlock()(input_node1)
    output_node2 = ak.DenseBlock()(input_node2)
    output_node = ak.Merge()([output_node1, output_node2])
    output_node = ak.RegressionHead()(output_node)

    with pytest.raises(ValueError) as info:
        graph_module.Graph(inputs=input_node1, outputs=output_node)
    assert "A required input is missing for HyperModel" in str(info.value)
Esempio n. 11
0
def test_hyper_graph_cycle():
    input_node1 = ak.Input()
    input_node2 = ak.Input()
    output_node1 = ak.DenseBlock()(input_node1)
    output_node2 = ak.DenseBlock()(input_node2)
    output_node = ak.Merge()([output_node1, output_node2])
    head = ak.RegressionHead()
    output_node = head(output_node)
    head.outputs = output_node1

    with pytest.raises(ValueError) as info:
        graph_module.Graph(inputs=[input_node1, input_node2], outputs=output_node)
    assert "The network has a cycle." in str(info.value)
Esempio n. 12
0
def test_input_output_disconnect():
    input_node1 = ak.Input()
    output_node = input_node1
    _ = ak.DenseBlock()(output_node)

    input_node = ak.Input()
    output_node = input_node
    output_node = ak.DenseBlock()(output_node)
    output_node = ak.RegressionHead()(output_node)

    with pytest.raises(ValueError) as info:
        graph_module.Graph(inputs=input_node1, outputs=output_node)
    assert "Inputs and outputs not connected." in str(info.value)
Esempio n. 13
0
def test_merge():
    input_node1 = ak.Input(shape=(30,))
    input_node2 = ak.Input(shape=(40,))
    output_node1 = ak.DenseBlock()(input_node1)
    output_node2 = ak.DenseBlock()(input_node2)
    output_node = ak.Merge()([output_node1, output_node2])
    output_node = ak.RegressionHead(output_shape=(1,))(output_node)

    model = graph_module.Graph(
        inputs=[input_node1, input_node2], outputs=output_node
    ).build(kerastuner.HyperParameters())
    assert model.input_shape == [(None, 30), (None, 40)]
    assert model.output_shape == (None, 1)
Esempio n. 14
0
    def _assemble(self):
        """Assemble the Blocks based on the input output nodes."""
        inputs = nest.flatten(self.inputs)
        outputs = nest.flatten(self.outputs)

        middle_nodes = [input_node.get_block()(input_node) for input_node in inputs]

        # Merge the middle nodes.
        if len(middle_nodes) > 1:
            output_node = blocks.Merge()(middle_nodes)
        else:
            output_node = middle_nodes[0]

        outputs = nest.flatten(
            [output_blocks(output_node) for output_blocks in outputs]
        )
        return graph_module.Graph(inputs=inputs, outputs=outputs)
Esempio n. 15
0
    def _build_graph(self):
        # Using functional API.
        if all(
            [isinstance(output, node_module.Node) for output in self.outputs]):
            graph = graph_module.Graph(inputs=self.inputs,
                                       outputs=self.outputs)
        # Using input/output API.
        elif all(
            [isinstance(output, head_module.Head) for output in self.outputs]):
            # Clear session to reset get_uid(). The names of the blocks will
            # start to count from 1 for new blocks in a new AutoModel afterwards.
            # When initializing multiple AutoModel with Task API, if not
            # counting from 1 for each of the AutoModel, the predefined hp
            # values in task specifiec tuners would not match the names.
            keras.backend.clear_session()
            graph = self._assemble()
            self.outputs = graph.outputs
            keras.backend.clear_session()

        return graph
Esempio n. 16
0
def test_image_classifier_oracle():
    tf.keras.backend.clear_session()
    input_node = ak.ImageInput(shape=(32, 32, 3))
    output_node = ak.ImageBlock()(input_node)
    output_node = ak.ClassificationHead(loss='categorical_crossentropy',
                                        output_shape=(10, ))(output_node)
    graph = graph_module.Graph(input_node, output_node)
    oracle = greedy.GreedyOracle(hypermodel=graph,
                                 initial_hps=task_specific.IMAGE_CLASSIFIER,
                                 objective='val_loss')
    oracle._populate_space('0')
    hp = oracle.get_space()
    hp.values = task_specific.IMAGE_CLASSIFIER[0]
    assert len(
        set(task_specific.IMAGE_CLASSIFIER[0].keys()) -
        set(oracle.get_space().values.keys())) == 0
    oracle._populate_space('1')
    assert len(
        set(task_specific.IMAGE_CLASSIFIER[1].keys()) -
        set(oracle.get_space().values.keys())) == 0
Esempio n. 17
0
def test_graph_save_load(tmp_path):
    input1 = ak.Input()
    input2 = ak.Input()
    output1 = ak.DenseBlock()(input1)
    output2 = ak.ConvBlock()(input2)
    output = ak.Merge()([output1, output2])
    output1 = ak.RegressionHead()(output)
    output2 = ak.ClassificationHead()(output)

    graph = graph_module.Graph(
        inputs=[input1, input2],
        outputs=[output1, output2],
    )
    path = os.path.join(tmp_path, "graph")
    graph.save(path)
    graph = graph_module.load_graph(path)

    assert len(graph.inputs) == 2
    assert len(graph.outputs) == 2
    assert isinstance(graph.inputs[0].out_blocks[0], ak.DenseBlock)
    assert isinstance(graph.inputs[1].out_blocks[0], ak.ConvBlock)
Esempio n. 18
0
def test_save_custom_metrics_loss(tmp_path):
    def custom_metric(y_pred, y_true):
        return 1

    def custom_loss(y_pred, y_true):
        return y_pred - y_true

    head = ak.ClassificationHead(
        loss=custom_loss, metrics=["accuracy", custom_metric]
    )
    input_node = ak.Input()
    output_node = head(input_node)
    graph = graph_module.Graph(input_node, output_node)
    path = os.path.join(tmp_path, "graph")
    graph.save(path)
    new_graph = graph_module.load_graph(
        path,
        custom_objects={"custom_metric": custom_metric, "custom_loss": custom_loss},
    )
    assert new_graph.blocks[0].metrics[1](0, 0) == 1
    assert new_graph.blocks[0].loss(3, 2) == 1
Esempio n. 19
0
def test_graph_save_load(tmp_path):
    input1 = ak.Input()
    input2 = ak.Input()
    output1 = ak.DenseBlock()(input1)
    output2 = ak.ConvBlock()(input2)
    output = ak.Merge()([output1, output2])
    output1 = ak.RegressionHead()(output)
    output2 = ak.ClassificationHead()(output)

    graph = graph_module.Graph(
        inputs=[input1, input2],
        outputs=[output1, output2],
        override_hps=[hp_module.Choice('dense_block_1/num_layers', [6], default=6)])
    config = graph.get_config()
    graph = graph_module.Graph.from_config(config)

    assert len(graph.inputs) == 2
    assert len(graph.outputs) == 2
    assert isinstance(graph.inputs[0].out_blocks[0], ak.DenseBlock)
    assert isinstance(graph.inputs[1].out_blocks[0], ak.ConvBlock)
    assert isinstance(graph.override_hps[0], hp_module.Choice)
Esempio n. 20
0
def test_set_hp():
    input_node = ak.Input((32,))
    output_node = input_node
    output_node = ak.DenseBlock()(output_node)
    head = ak.RegressionHead()
    head.output_shape = (1,)
    output_node = head(output_node)

    graph = graph_module.Graph(
        inputs=input_node,
        outputs=output_node,
        override_hps=[hp_module.Choice('dense_block_1/num_layers', [6], default=6)])
    hp = kerastuner.HyperParameters()
    graph.build(hp)

    for single_hp in hp.space:
        if single_hp.name == 'dense_block_1/num_layers':
            assert len(single_hp.values) == 1
            assert single_hp.values[0] == 6
            return
    assert False