Пример #1
0
 def test_ideal_2(self):
     tf.compat.v1.disable_eager_execution()
     linguistic_scale_size = 5
     first_tuple = Model2Tuple(term_index=3,
                               alpha=0.2,
                               linguistic_scale_size=linguistic_scale_size,
                               weight=None)
     second_tuple = Model2Tuple(term_index=2,
                                alpha=0,
                                linguistic_scale_size=linguistic_scale_size,
                                weight=None)
     aggregate_and_check(first_tuple, second_tuple)
Пример #2
0
 def test_ideal_negative_weights_1(self):
     tf.compat.v1.disable_eager_execution()
     linguistic_scale_size = 5
     first_tuple = Model2Tuple(term_index=4,
                               alpha=0.2,
                               linguistic_scale_size=linguistic_scale_size,
                               weight=0.5)
     second_tuple = Model2Tuple(term_index=3,
                                alpha=-0.1,
                                linguistic_scale_size=linguistic_scale_size,
                                weight=0.5)
     aggregate_and_check(first_tuple, second_tuple)
def main():
    print('Converting 2-tuple to TPR')
    tf.compat.v1.disable_eager_execution()
    linguistic_scale_size = 5
    first_tuple = Model2Tuple(term_index=4,
                              alpha=0.2,
                              linguistic_scale_size=linguistic_scale_size)
    second_tuple = Model2Tuple(term_index=2,
                               alpha=-0.1,
                               linguistic_scale_size=linguistic_scale_size)

    aggregate_and_check(first_tuple, second_tuple)
    print('Converting 2-tuple to TPR and back works with no information loss!')

    mta_result_encoded, _ = encode_model_2_tuple(first_tuple)