Esempio n. 1
0
 def _reconstruct_matrix(self, shifts=None, force=True):
     if not self._matrix_reconstructed or force:
         if shifts:
             self._matrix_reconstructed = divisi2.reconstruct(self._U, self._S, self._V, shifts=shifts)
         else:
             self._matrix_reconstructed = divisi2.reconstruct(self._U, self._S, self._V)
     return self._matrix_reconstructed
Esempio n. 2
0
 def _reconstruct_matrix(self, shifts=None, force=True):
     if not self._matrix_reconstructed or force:
         if shifts:
             self._matrix_reconstructed = divisi2.reconstruct(self._U,
                                                              self._S,
                                                              self._V,
                                                              shifts=shifts)
         else:
             self._matrix_reconstructed = divisi2.reconstruct(
                 self._U, self._S, self._V)
     return self._matrix_reconstructed
Esempio n. 3
0
def test_cnet_blend():
    from divisi2.blending import blend, blend_svd
    matrix = divisi2.network.conceptnet_matrix('en')
    isa = divisi2.network.filter_by_relation(matrix, 'IsA').squish().normalize_all()
    atloc = divisi2.network.filter_by_relation(matrix, 'AtLocation').squish().normalize_all()
    Uref, Sref, Vref = blend([isa, atloc]).svd(k=3)
    U, S, V = blend_svd([isa, atloc], k=3)
    rec_ref = divisi2.reconstruct(Uref, Sref, Vref)
    rec_opt = divisi2.reconstruct(U, S, V)

    # Check a random sampling of the items.
    import random
    for row in random.sample(rec_ref.row_labels, 50):
        for col in random.sample(rec_ref.col_labels, 50):
            assert np.allclose(rec_ref.entry_named(row, col),
                               rec_opt.entry_named(row, col))
Esempio n. 4
0
 def __init__(self, matrix, dimensionality):
   concept_axes, axis_weights, feature_axes = matrix.svd(k=dimensionality)
   self.predict = divisi2.reconstruct(concept_axes,
                                      axis_weights,
                                      feature_axes)
   self.concept_sim = divisi2.reconstruct_activation(concept_axes,
                                                     axis_weights,
                                                     post_normalize=True)
   self.feature_sim = divisi2.reconstruct_activation(feature_axes,
                                                     axis_weights,
                                                     post_normalize=True)
Esempio n. 5
0
def learn_iter(mat):
    print "Starting learning process..."
    for i in range(NUM_LEARN_ITER):
        user_mat, axis_weights, movie_mat = learn(mat)
        # Reconstruct the learning matrix here.
        for j in range(NUM_USERS):
            for k in range(NUM_MOVIES):
                mat[j, k] = divisi2.dot(user_mat[i,:], movie_mat[j,:])
    print "Learning process complete."
    start_time = time.time()
    predictions = divisi2.reconstruct(user_mat, axis_weights, movie_mat)
    print "Matrix reconstruction (elapsed time: %f s)." % (time.time() - start_time)    
    return predictions
	def __init__(self, matrix_path=data_path+'feature_matrix_zh.smat'):
		# AnalogySpace
		A = divisi2.load(matrix_path)
		self.A = A.normalize_all()
		self.concept_axes, axis_weights, self.feature_axes = self.A.svd(k=100)
		self.sim = divisi2.reconstruct_similarity(\
				self.concept_axes, axis_weights, post_normalize=False)
		self.predict = divisi2.reconstruct(\
				self.concept_axes, axis_weights, self.feature_axes)
		# Fast spreading activation
		assoc = divisi2.load(data_path+'assoc_matrix_zh.smat')
		self.assoc = assoc.normalize_all()
		U, S, _ = self.assoc.svd(k=100)
		self.spread = divisi2.reconstruct_activation(U, S)