def _predict_scores_fixed(self, X, **kwargs): n_inst, n_obj, n_feat = X.shape inp = Input(shape=(n_obj, n_feat)) lambdas = [create_input_lambda(i)(inp) for i in range(n_obj)] scores = concatenate([self.scoring_model(lam) for lam in lambdas]) model = Model(inputs=inp, outputs=scores) return model.predict(X)
def construct_model(self): """ Construct the ListNet architecture. Weight sharing guarantees that we have a latent utility model for any given object. """ hid = [ create_input_lambda(i)(self.input_layer) for i in range(self.n_top) ] for hidden_layer in self.hidden_layers: hid = [hidden_layer(x) for x in hid] outputs = [self.output_node(x) for x in hid] merged = concatenate(outputs) return merged
def _predict_scores_fixed(self, X, **kwargs): """ Predict the scores for a fixed ranking size. Parameters ---------- X : numpy array float (n_instances, n_objects, n_features) Returns ------- scores : numpy array float (n_instances, n_objects) """ # model = self._construct_scoring_model(n_objects) X = self.get_set_representation(X, kwargs) n_instances, n_objects, n_features = X.shape self.logger.info( "After applying the set representations instances {} objects {}" "features {}".format(n_instances, n_objects, n_features)) input_layer_joint = Input(shape=(n_objects, n_features), name="input_joint_model") scores = [] inputs = [ create_input_lambda(i)(input_layer_joint) for i in range(n_objects) ] for i in range(n_objects): joint = inputs[i] for j in range(len(self.joint_layers)): joint = self.joint_layers[j](joint) scores.append(self.scorer(joint)) scores = concatenate(scores, name="final_scores") joint_model = Model(inputs=input_layer_joint, outputs=scores) predicted_scores = joint_model.predict(X) self.logger.info("Done predicting scores") return predicted_scores
def _create_model(self, shape): n_objects, n_features = shape[1].value, shape[2].value if hasattr(self, 'n_features'): if self.n_features != n_features: self.logger.error("Number of features is not consistent.") input_layer = Input(shape=(n_objects, n_features)) inputs = [ create_input_lambda(i)(input_layer) for i in range(n_objects) ] # Connect input tensors with set mapping layer: set_mappings = [] for i in range(n_objects): curr = inputs[i] for j in range(len(self.set_mapping_layers)): curr = self.set_mapping_layers[j](curr) set_mappings.append((i, curr)) # TODO: is feature_repr used outside? feature_repr = average([x for (j, x) in set_mappings]) self.cached_models[n_objects] = Model(inputs=input_layer, outputs=feature_repr)
def join_input_layers(self, input_layer, *layers, n_layers, n_objects): """ Accepts input tensors and an arbitrary number of feature tensors and concatenates them into a joint layer. The input layers need to be given separately, because they need to be iterated over. Parameters ---------- input_layer : input tensor (n_objects, n_features) layers : tensors A number of tensors representing feature representations n_layers : int Number of joint hidden layers n_objects : int Number of objects """ self.logger.debug("Joining set representation and joint layers") scores = [] inputs = [ create_input_lambda(i)(input_layer) for i in range(n_objects) ] for i in range(n_objects): if n_layers >= 1: joint = concatenate([inputs[i], *layers]) else: joint = inputs[i] for j in range(len(self.joint_layers)): joint = self.joint_layers[j](joint) scores.append(self.scorer(joint)) scores = concatenate(scores, name="final_scores") self.logger.debug("Done") return scores