def create_encoded_random_tree_scipy(depth): fillers, roles, _ = input_data() single_role_shape = roles[0].shape single_filler_shape = fillers[0].shape fillers_shapes = generate_shapes(max_tree_depth=depth, role_shape=single_role_shape, filler_shape=single_filler_shape) filler_len = fillers_shapes[0][1] max_depth = len(fillers_shapes) new_left = fillers[0] new_right = fillers[1] for level_index in range(1, max_depth): left_shift_input = shift_matrix(roles[0], filler_len, level_index, name=None, mode='sparse') right_shift_input = shift_matrix(roles[1], filler_len, level_index, name=None, mode='sparse') joint = left_shift_input.dot(new_left) + right_shift_input.dot( new_right) new_left = np.append(np.zeros((filler_len, )), joint) new_right = new_left return new_left
def encode_model_2_tuple(model_2_tuple: Model2Tuple, encoder=None) -> np.array: has_weigths = model_2_tuple.weight is not None if has_weigths: roles = np.array([ [1, 0, 0], # r_i [0, 1, 0], # r_alpha [0, 0, 1], # r_w ]) else: roles = np.array([ [1, 0], # r_i [0, 1], # r_alpha ]) filler_index, filler_alpha, filler_weight = FillerFactory.from_model_2_tuple( model_2_tuple) MAX_TREE_DEPTH = 2 SINGLE_ROLE_SHAPE = roles[0].shape SINGLE_FILLER_SHAPE = filler_index.shape fillers_shapes = generate_shapes(max_tree_depth=MAX_TREE_DEPTH, role_shape=SINGLE_ROLE_SHAPE, filler_shape=SINGLE_FILLER_SHAPE) if encoder is None: encoder = build_tree_joiner_network(roles=roles, fillers_shapes=fillers_shapes) if has_weigths: fillers = np.array([filler_index, filler_alpha, filler_weight]) subtrees = (filler_index, filler_alpha, filler_weight) else: fillers = np.array([filler_index, filler_alpha]) subtrees = (filler_index, filler_alpha) model_2_tuple_encoded = elementary_join( joiner_network=encoder, input_structure_max_shape=fillers_shapes, basic_roles=roles, basic_fillers=fillers, subtrees=subtrees) return model_2_tuple_encoded, encoder
def sum_numbers(a, b, max_depth, roles, dual_roles, fillers, number_sum_blocks): single_role_shape = roles[0].shape single_filler_shape = fillers[0].shape fillers_shapes = generate_shapes(max_tree_depth=max_depth, role_shape=single_role_shape, filler_shape=single_filler_shape) joiner_network = build_tree_joiner_network(roles=roles, fillers_shapes=fillers_shapes) a_encoded = encode(a, max_depth, roles, fillers, joiner_network) b_encoded = encode(b, max_depth, roles, fillers, joiner_network) keras_sum_network = build_sum_network(roles, fillers, dual_roles, max_depth, number_sum_blocks=number_sum_blocks) a_encoded = a_encoded.reshape((1, *a_encoded.shape, 1)) b_encoded = b_encoded.reshape((1, *b_encoded.shape, 1)) decremented_number, incremented_number = keras_sum_network.predict_on_batch([ a_encoded, b_encoded ]) c = decode(decremented_number, max_depth, dual_roles, fillers) d = decode(incremented_number, max_depth, dual_roles, fillers) return c, d
def number_to_tree(target_number, max_tree_depth, fillers, roles, joiner_network=None): single_role_shape = roles[0].shape single_filler_shape = fillers[0].shape fillers_shapes = generate_shapes(max_tree_depth=max_tree_depth, role_shape=single_role_shape, filler_shape=single_filler_shape) if target_number == 0: return prepare_input(None, fillers_shapes) if not joiner_network: joiner_network = build_tree_joiner_network( roles=roles, fillers_shapes=fillers_shapes) one = fillers[0] for i in range(1): # one is a result of two joins # 1. join of filler and role_1 a.k.a zero representation # 2. join of step 1 and role_1 one = elementary_join(joiner_network=joiner_network, input_structure_max_shape=fillers_shapes, basic_roles=roles, basic_fillers=fillers, subtrees=(None, one)) number = one for i in range(target_number - 1): # easy as 2 is just one join of (one+one) # easy as 3 is just two joins: (one+one)+one number = elementary_join(joiner_network=joiner_network, input_structure_max_shape=fillers_shapes, basic_roles=roles, basic_fillers=fillers, subtrees=(number, one)) return number
def main(): tf.compat.v1.disable_eager_execution() """ First use case for the structure that should be shifted left Starting from: root We want to get: root | A (left-child-of-root) """ fillers_case_1 = np.array([ [8, 0, 0], # A [0, 15, 0], # B [0, 0, 10], # C ]) roles_case_1 = np.array([ [10, 0], # r_0 [0, 5], # r_1 ]) MAX_TREE_DEPTH = 2 SINGLE_ROLE_SHAPE = roles_case_1[0].shape SINGLE_FILLER_SHAPE = fillers_case_1[0].shape fillers_shapes = generate_shapes(max_tree_depth=MAX_TREE_DEPTH, role_shape=SINGLE_ROLE_SHAPE, filler_shape=SINGLE_FILLER_SHAPE) keras_joiner = build_tree_joiner_network(roles=roles_case_1, fillers_shapes=fillers_shapes) left_subtree_placeholder = generate_input_placeholder(fillers_shapes) left_subtree_placeholder[0] = fillers_case_1[0].reshape( 1, *SINGLE_FILLER_SHAPE) right_subtree_placeholder = generate_input_placeholder(fillers_shapes) fillers_joined = keras_joiner.predict_on_batch( [*left_subtree_placeholder, *right_subtree_placeholder]) fillers_joined = fillers_joined.reshape((*fillers_joined.shape[1:], )) print('calculated cons (A _x_ r_0)') """ Second use case for the structure that should be shifted left (continued from the first case) Starting from: root | A (left-child-of-root) We want to get: root | | A (left-child-of-left-child-of-root) """ tensor_repr_A_x_r_0 = extract_per_level_tensor_representation_after_shift( fillers_joined, max_tree_depth=MAX_TREE_DEPTH, role_shape=SINGLE_ROLE_SHAPE, filler_shape=SINGLE_FILLER_SHAPE) print('split per layer') prepared_for_shift = reshape_to_satisfy_max_depth_after_shift( tensor_repr_A_x_r_0, MAX_TREE_DEPTH, SINGLE_ROLE_SHAPE, SINGLE_FILLER_SHAPE) print('reshaped for second shift') fillers_joined_second_case = keras_joiner.predict_on_batch( [*prepared_for_shift, *right_subtree_placeholder]) print('calculated cons (A _x_ r_0 _x_ r_0)') fillers_joined_second_case = fillers_joined_second_case.reshape( (*fillers_joined_second_case.shape[1:], )) tensor_repr_A_x_r_0_x_r_0 = extract_per_level_tensor_representation_after_shift( fillers_joined_second_case, max_tree_depth=MAX_TREE_DEPTH, role_shape=SINGLE_ROLE_SHAPE, filler_shape=SINGLE_FILLER_SHAPE) print('split per layer after second case') """ Third use case for the structure that should be shifted left (continued from the second case) Starting from: root | A (left-child-of-left-child-of-root) We want to get: root | \ | B (right-child-of-root) A (left-child-of-left-child-of-root) """ prepared_for_shift_A_x_r_0 = reshape_to_satisfy_max_depth_after_shift( tensor_repr_A_x_r_0, MAX_TREE_DEPTH, SINGLE_ROLE_SHAPE, SINGLE_FILLER_SHAPE) right_subtree_placeholder = generate_input_placeholder(fillers_shapes) right_subtree_placeholder[0] = fillers_case_1[1].reshape( 1, *SINGLE_FILLER_SHAPE) fillers_joined_third_case = keras_joiner.predict_on_batch( [*prepared_for_shift_A_x_r_0, *right_subtree_placeholder]) fillers_joined_third_case = fillers_joined_third_case.reshape( (*fillers_joined_third_case.shape[1:], )) tensor_repr_A_x_r_0_x_r_0_B_x_r_1 = extract_per_level_tensor_representation_after_shift( fillers_joined_third_case, max_tree_depth=MAX_TREE_DEPTH, role_shape=SINGLE_ROLE_SHAPE, filler_shape=SINGLE_FILLER_SHAPE) print('split per layer after third case') """ Fourth use case for the structure that should be shifted left (continued from the third case) Starting from: root | \ | B (right-child-of-root) A (left-child-of-left-child-of-root) We want to get: root | \ | \ B (right-child-of-root) A (left-child-of-left-child-of-root) C (right-child-of-left-child-of-root) """ left_subtree_placeholder = generate_input_placeholder(fillers_shapes) right_subtree_placeholder = generate_input_placeholder(fillers_shapes) right_subtree_placeholder[0] = fillers_case_1[2].reshape( 1, *SINGLE_FILLER_SHAPE) fillers_joined_fourth_case_simple_c = keras_joiner.predict_on_batch( [*left_subtree_placeholder, *right_subtree_placeholder]) fillers_joined_fourth_case_simple_c = fillers_joined_fourth_case_simple_c.reshape( (*fillers_joined_fourth_case_simple_c.shape[1:], )) tensor_repr_C_x_r_1 = extract_per_level_tensor_representation_after_shift( fillers_joined_fourth_case_simple_c, max_tree_depth=MAX_TREE_DEPTH, role_shape=SINGLE_ROLE_SHAPE, filler_shape=SINGLE_FILLER_SHAPE) right_subtree_placeholder = generate_input_placeholder(fillers_shapes) prepared_for_shift_C_x_r_1 = reshape_to_satisfy_max_depth_after_shift( tensor_repr_C_x_r_1, MAX_TREE_DEPTH, SINGLE_ROLE_SHAPE, SINGLE_FILLER_SHAPE) fillers_joined_fourth_case_complex_c = keras_joiner.predict_on_batch([ *prepared_for_shift_C_x_r_1, *right_subtree_placeholder, ]) fillers_joined_fourth_case_complex_c = fillers_joined_fourth_case_complex_c.reshape( (*fillers_joined_fourth_case_complex_c.shape[1:], )) tensor_repr_C_x_r_1_x_r_0 = extract_per_level_tensor_representation_after_shift( fillers_joined_fourth_case_complex_c, max_tree_depth=MAX_TREE_DEPTH, role_shape=SINGLE_ROLE_SHAPE, filler_shape=SINGLE_FILLER_SHAPE) tree_representation = sum_tensors(tensor_repr_C_x_r_1_x_r_0, tensor_repr_A_x_r_0_x_r_0_B_x_r_1) print('calculated tree representation') prepared_for_shift_tree_representation = reshape_to_satisfy_max_depth_after_shift( tree_representation, MAX_TREE_DEPTH + 1, SINGLE_ROLE_SHAPE, SINGLE_FILLER_SHAPE) return prepared_for_shift_tree_representation