Пример #1
0
def test_tabnet_combiner(encoder_outputs):
    # clean out unneeded encoder outputs
    encoder_outputs = {}
    encoder_outputs['feature_1'] = {
        'encoder_output': tf.random.normal([128, 1], dtype=tf.float32)
    }
    encoder_outputs['feature_2'] = {
        'encoder_output': tf.random.normal([128, 1], dtype=tf.float32)
    }

    input_features_def = [{
        'name': 'feature_1',
        'type': 'numerical'
    }, {
        'name': 'feature_2',
        'type': 'numerical'
    }]

    # setup combiner to test
    combiner = TabNetCombiner(build_inputs(input_features_def),
                              size=2,
                              output_size=2,
                              num_steps=3,
                              num_total_blocks=4,
                              num_shared_blocks=2,
                              dropout=0.1)

    # concatenate encoder outputs
    results = combiner(encoder_outputs)

    # required key present
    assert 'combiner_output' in results
    assert 'attention_masks' in results
Пример #2
0
def test_tied_micro_level(input_feature_options):
    # build input feature config
    input_feature_configs = []

    input_feature_configs.append({
        "name": "input_feature_1",
        "type": input_feature_options.feature_type
    })
    if input_feature_options.feature_options is not None:
        input_feature_configs[0].update(input_feature_options.feature_options)

    input_feature_configs.append({
        "name": "input_feature_2",
        "type": input_feature_options.feature_type
    })
    if input_feature_options.feature_options is not None:
        input_feature_configs[1].update(input_feature_options.feature_options)

    # add tied option to the second feature
    if input_feature_options.tie_features:
        input_feature_configs[1]["tied"] = "input_feature_1"

    input_features = build_inputs(input_feature_configs)

    if input_feature_options.tie_features:
        # should be same encoder
        assert input_features["input_feature_1"].encoder_obj is input_features[
            "input_feature_2"].encoder_obj
    else:
        # no tied parameter, encoders should be different
        assert input_features[
            "input_feature_1"].encoder_obj is not input_features[
                "input_feature_2"].encoder_obj
Пример #3
0
def test_tied_micro_level(input_feature_options):
    # build input feature config
    input_feature_configs = []

    input_feature_configs.append({
        'name': 'input_feature_1',
        'type': input_feature_options.feature_type
    })
    if input_feature_options.feature_options is not None:
        input_feature_configs[0].update(input_feature_options.feature_options)

    input_feature_configs.append({
        'name': 'input_feature_2',
        'type': input_feature_options.feature_type
    })
    if input_feature_options.feature_options is not None:
        input_feature_configs[1].update(input_feature_options.feature_options)

    # add tied option to the second feature
    if input_feature_options.tie_features:
        input_feature_configs[1]['tied'] = 'input_feature_1'

    input_features = build_inputs(input_feature_configs)

    if input_feature_options.tie_features:
        # should be same encoder
        assert input_features['input_feature_1'].encoder_obj is \
               input_features['input_feature_2'].encoder_obj
    else:
        # no tied parameter, encoders should be different
        assert input_features['input_feature_1'].encoder_obj is not \
               input_features['input_feature_2'].encoder_obj