Exemplo n.º 1
0
def conv_knn(embeddings: Matrix, convs: Matrix, dataset: Dataset,
             num_trees: int, num_nns: int, save_loc: str,
             cache_index: bool) -> None:
    index = np.dot(convs.transpose(0, 2, 1), embeddings.transpose(1, 0))
    sorted_idxs = np.argsort(index, axis=-1)

    def parse_vec(seq_id, filter_id):
        v = convs[seq_id, :, filter_id]
        idxs = sorted_idxs[seq_id, filter_id][-num_nns:]
        words = dataset.decode(idxs, keep_first=True)
        return {
            'words': words,
            'norm': float(np.sqrt(np.sum(v**2))),
        }

    utils.save_json([[parse_vec(i, j) for i in range(convs.shape[0])]
                     for j in range(convs.shape[2])], save_loc, 'knns.json')
 def feed_forward(self,x):
     output_data = Matrix(x).transpose()
     self.activations =[]
     for i in range(len(self.theta)):
         self.activations.append(output_data)
         input_data = Matrix(output_data.matrix.copy()).transpose()
         for j in range(len(input_data.matrix)):
             input_data.matrix[j] = [1] + input_data.matrix[j]
         output_data = self.sigmoid(self.theta[i] * input_data.transpose())
     self.activations.append(output_data)
     return output_data
Exemplo n.º 3
0
def dimensionality_reduction(embeddings: Matrix, convs: Matrix,
                             save_loc: str) -> None:
    convs = convs.transpose(0, 2, 1)
    reduction = PCA(n_components=2)
    reduction.fit(embeddings)
    embs = reduction.transform(embeddings)
    conv_embs = reduction.transform(convs.reshape(-1, convs.shape[-1]))
    plt.plot(embs[:, 0], embs[:, 1], '.', label='Embeddings')
    plt.plot(conv_embs[:, 0], conv_embs[:, 1], '.', label='Convolutions')
    plt.legend()
    plt.xlabel('First principle component')
    plt.ylabel('Second principle component')
    plt.savefig(os.path.join(save_loc, 'dimensionality_reduction.png'))
    plt.close()