def p_builder(p_input): # CANDO: change activation p_layer1 = keras.layers.Dense(dim_l1, use_bias=use_bias, activation='relu', kernel_regularizer='l2', bias_regularizer='l2', name='p_layer1')(p_input) p_adj = layers.Bilinear(0, use_bias=use_bias, kernel_regularizer='l2', bias_regularizer='l2', name='p_adj')([p_layer1, p_layer1]) p_v = keras.layers.Dense(dim_data, use_bias=use_bias, kernel_regularizer='l2', bias_regularizer='l2', name='p_v')(p_layer1) return ([p_adj, p_v], ('SigmoidBernoulliScaledAdjacency', 'SigmoidBernoulli'))
def p_builder(p_input): # Get slices of the embeddings for each prediction p_input_adj = layers.InnerSlice(adj_embedding_slice)(p_input) p_input_v = layers.InnerSlice(v_embedding_slice)(p_input) # Build layer1 for adj and features, if requested, and have # it shared between adjacency and features if requested. if share_l1: # Shared intermediate layer. assert with_l1, "Can't share l1 if l1 is not requested" p_layer1 = keras.layers.Dense(dim_l1, use_bias=use_bias, activation='relu', kernel_regularizer='l2', bias_regularizer='l2', name='p_layer1') p_penultimate_adj = p_layer1(p_input_adj) p_penultimate_v = p_layer1(p_input_v) else: if with_l1: # Unshared intermediate layer. p_penultimate_adj = keras.layers.Dense( dim_l1, use_bias=use_bias, activation='relu', kernel_regularizer='l2', bias_regularizer='l2', name='p_layer1_adj')(p_input_adj) p_penultimate_v = keras.layers.Dense( dim_l1, use_bias=use_bias, activation='relu', kernel_regularizer='l2', bias_regularizer='l2', name='p_layer1_v')(p_input_v) else: # No intermediate layer. p_penultimate_adj = p_input_adj p_penultimate_v = p_input_v # Prepare kwargs for the Bilinear adj decoder, then build it. adj_kwargs = {} if adj_kernel is not None: adj_kwargs['fixed_kernel'] = adj_kernel else: adj_kwargs['kernel_regularizer'] = 'l2' p_adj = layers.Bilinear( 0, use_bias=use_bias, name='p_adj', bias_regularizer='l2', **adj_kwargs)([p_penultimate_adj, p_penultimate_adj]) # Finally build the feature decoder according to the requested codec. if feature_codec in ['SigmoidBernoulli', 'SoftmaxMultinomial']: p_v = keras.layers.Dense(dim_data, use_bias=use_bias, kernel_regularizer='l2', bias_regularizer='l2', name='p_v')(p_penultimate_v) else: assert feature_codec == 'OrthogonalGaussian' p_v_μ_flat = keras.layers.Dense( dim_data, use_bias=use_bias, kernel_regularizer='l2', bias_regularizer='l2', name='p_v_mu_flat')(p_penultimate_v) p_v_logS_flat = keras.layers.Dense( dim_data, use_bias=use_bias, kernel_regularizer='l2', bias_regularizer='l2', name='p_v_logS_flat')(p_penultimate_v) p_v = keras.layers.Concatenate(name='p_v_mulogS_flat')( [p_v_μ_flat, p_v_logS_flat]) return ([p_adj, p_v], ('SigmoidBernoulliScaledAdjacency', feature_codec))