Beispiel #1
0
def mode_dot(tensor, matrix, mode):
    new_shape = tensor.get_shape().as_list()

    if matrix.get_shape().as_list()[1] != tensor.get_shape().as_list()[mode]:
        raise ValueError("Shape error. {0}(matrix's 2nd dimension) is not as same as {1} (dimension of the tensor)".format(matrix.get_shape().as_list()[1], tensor.get_shape().as_list()[mode]))

    new_shape[mode] = matrix.get_shape().as_list()[0]

    res = tf.matmul(matrix, TNSR.unfold(tensor, mode))

    return TNSR.fold(res, mode, new_shape)
Beispiel #2
0
def mode_dot(tensor, matrix, mode):
    new_shape = tensor.get_shape().as_list()

    if matrix.get_shape().as_list()[1] != tensor.get_shape().as_list()[mode]:
        raise ValueError("Shape error. {0}(matrix's 2nd dimension) is not as same as {1} (dimension of the tensor)".format(matrix.get_shape().as_list()[1], tensor.get_shape().as_list()[mode]))

    new_shape[mode] = matrix.get_shape().as_list()[0]

    res = tf.matmul(matrix, TNSR.unfold(tensor, mode))

    return TNSR.fold(res, mode, new_shape)
Beispiel #3
0
def ttrl(x, ranks, n_outputs):
	weight_initializer = tf.contrib.layers.xavier_initializer()

	if(type(n_outputs) == type([])):
		#n_outputs 	[2,2,2,2,2,2]
		#ranks     	[1,1,2,2,3,4,3,2,2,1,1]
		#x 		 	[50,14,14,32]
		#input_shape[14,14,32,2,2,2,2,2,2]
		suffix = np.prod(n_outputs)
		input_shape = x.get_shape().as_list()[1:]+n_outputs
		bias = tf.get_variable("bias_{}".format(np.prod(n_outputs)), shape=(1, np.prod(n_outputs)))

		cores = []

		for i in range(len(input_shape)-1):
			cores.append(tf.get_variable("core_{0}_output_{1}".format(i,suffix), 
				shape = (ranks[i], input_shape[i], ranks[i+1]), 
				initializer = weight_initializer))

		cores.append(tf.get_variable("core_{0}_last_output_{1}".format(i,suffix), 
			shape = (ranks[-2], input_shape[-1], ranks[-1]), 
			initializer = weight_initializer))

		#for c in cores:
		#	print(c.get_shape().as_list())

		regression_weights = TNSR.tt_to_tensor(cores)
		w_minus1 = tf.reshape(regression_weights, [np.prod(input_shape[:3]), np.prod(n_outputs)])
		x_0 = tf.reshape(x, [-1, np.prod(x.get_shape().as_list()[1:])])
		return tf.reshape(tf.add(tf.matmul(x_0, w_minus1) ,bias), [-1]+n_outputs)

	else:
		suffix = n_outputs
		input_shape = x.get_shape().as_list()[1:]
		#bias = tf.get_variable("bias", shape=(1, n_outputs))
		bias = tf.get_variable("bias_{}".format(np.prod(n_outputs)), shape=(1, np.prod(n_outputs)))

		cores = []

		for i in range(1, len(ranks)-1):
			cores.append(tf.get_variable("core_{0}_output_{1}".format(i,suffix), 
				shape = (ranks[i-1], input_shape[i-1], ranks[i]), 
				initializer = weight_initializer))

		cores.append(tf.get_variable("core_{0}_last_output_{1}".format(i,suffix), 
			shape=(ranks[-2],n_outputs,ranks[-1]),
			initializer = weight_initializer))

		regression_weights = TNSR.tt_to_tensor(cores)
		return regression(x, regression_weights, input_shape, bias, n_outputs)
Beispiel #4
0
def cprl(x, rank, n_outputs):
	weight_initializer = tf.contrib.layers.xavier_initializer()
	input_shape = x.get_shape().as_list()[1:]
	
	bias = tf.get_variable("bias_{}".format(np.prod(n_outputs)), shape=(1, np.prod(n_outputs)))

	rank1_tnsrs = []

	print(rank)

	for i in range(rank):
		rank1_tnsr = []

		for j in range(len(input_shape)):
			rank1_tnsr.append(tf.get_variable("rank1_tnsr_{0}_{1}_{2}".format(i,j,np.prod(n_outputs)), 
				shape = (input_shape[j]), 
				initializer = weight_initializer))

		rank1_tnsr.append(tf.get_variable("rank1_tnsr_{0}_output_{1}".format(i,np.prod(n_outputs)), 
			shape = (n_outputs), 
			initializer = weight_initializer))

		rank1_tnsrs.append(rank1_tnsr)

	regression_weights = TNSR.cp_to_tensor(rank1_tnsrs)
	
	return regression(x, regression_weights, input_shape, bias, n_outputs)
Beispiel #5
0
def cprl(x, rank, n_outputs):
	weight_initializer = tf.contrib.layers.xavier_initializer()
	input_shape = x.get_shape().as_list()[1:]
	
	bias = tf.get_variable("bias_{}".format(np.prod(n_outputs)), shape=(1, np.prod(n_outputs)))

	rank1_tnsrs = []

	for i in range(rank):
		rank1_tnsr = []

		for j in range(len(input_shape)):
			rank1_tnsr.append(tf.get_variable("rank1_tnsr_{0}_{1}_{2}".format(i,j,np.prod(n_outputs)), 
				shape = (input_shape[j]), 
				initializer = weight_initializer))

		rank1_tnsr.append(tf.get_variable("rank1_tnsr_{0}_output_{1}".format(i,np.prod(n_outputs)), 
			shape = (n_outputs), 
			initializer = weight_initializer))

		rank1_tnsrs.append(rank1_tnsr)

	regression_weights = TNSR.cp_to_tensor(rank1_tnsrs)
	
	return regression(x, regression_weights, input_shape, bias, n_outputs)
Beispiel #6
0
def trl(x, ranks, n_outputs):
	weight_initializer = tf.contrib.layers.xavier_initializer()
	input_shape = x.get_shape().as_list()[1:]

	core,factors = None, None

	core = tf.get_variable("core_last", shape=ranks, initializer = weight_initializer)
	factors = [	tf.get_variable("basic_factor_{0}_{1}".format(i,e),
				shape=(input_shape[i],ranks[i]),
				initializer = weight_initializer)
				for (i, e) in enumerate(input_shape)
				]

	bias = tf.get_variable("bias_trl", shape=(1, n_outputs))

	factors.append(tf.get_variable("factor_{}".format(len(ranks)-1),
			shape=(n_outputs, ranks[-1]),
			initializer = weight_initializer))

	regression_weights = TNSR.tucker_to_tensor(core, factors)

	x_0 = tf.reshape(x, [-1, np.prod(input_shape)])

	w_minus1 = tf.reshape(regression_weights, [np.prod(input_shape),n_outputs])
	return tf.add(tf.matmul(x_0, w_minus1) ,bias)
Beispiel #7
0
def trl(x, ranks, n_outputs):
    weight_initializer = tf.contrib.layers.xavier_initializer()
    input_shape = x.get_shape().as_list()[1:]

    core, factors = None, None

    core = tf.get_variable("core_last",
                           shape=ranks,
                           initializer=weight_initializer)
    factors = [
        tf.get_variable("basic_factor_{0}_{1}".format(i, e),
                        shape=(input_shape[i], ranks[i]),
                        initializer=weight_initializer)
        for (i, e) in enumerate(input_shape)
    ]

    bias = tf.get_variable("bias_trl", shape=(1, n_outputs))

    factors.append(
        tf.get_variable("factor_{}".format(len(ranks) - 1),
                        shape=(n_outputs, ranks[-1]),
                        initializer=weight_initializer))

    regression_weights = TNSR.tucker_to_tensor(core, factors)

    x_0 = tf.reshape(x, [-1, np.prod(input_shape)])

    w_minus1 = tf.reshape(regression_weights,
                          [np.prod(input_shape), n_outputs])
    return tf.add(tf.matmul(x_0, w_minus1), bias)
Beispiel #8
0
    def restore_2(self):
        md = 2
        t = TNSR.np_mode_dot(self.cores[0],
                             np.transpose(TNSR.np_unfold(self.cores[1], 0)),
                             md).reshape(
                                 list(self.cores[0].shape)[:-1] +
                                 list(self.cores[1].shape)[1:])
        for i in range(1, len(self.tensor_size) - 1):
            print(t.shape)
            print(i)
            md = md + 1
            t = TNSR.np_mode_dot(
                t, np.transpose(TNSR.np_unfold(self.cores[i + 1], 0)),
                md).reshape(
                    list(t.shape)[:-1] + list(self.cores[i + 1].shape)[1:])

        return t.reshape(self.tensor_size)
Beispiel #9
0
def ttrl(x, ranks, n_outputs):
	weight_initializer = tf.contrib.layers.xavier_initializer()

	suffix = n_outputs
	input_shape = x.get_shape().as_list()[1:]
	bias = tf.get_variable("bias_{}".format(np.prod(n_outputs)), shape=(1, np.prod(n_outputs)))

	cores = []

	for i in range(1, len(ranks)-1):
		cores.append(tf.get_variable("core_{0}_output_{1}".format(i,suffix), 
			shape = (ranks[i-1], input_shape[i-1], ranks[i]), 
			initializer = weight_initializer))

	cores.append(tf.get_variable("core_{0}_last_output_{1}".format(i,suffix), 
		shape=(ranks[-2],n_outputs,ranks[-1]),
		initializer = weight_initializer))

	regression_weights = TNSR.tt_to_tensor(cores)
	return regression(x, regression_weights, input_shape, bias, n_outputs)
Beispiel #10
0
def ttrl(x, ranks, n_outputs):
    weight_initializer = tf.contrib.layers.xavier_initializer()

    suffix = n_outputs
    input_shape = x.get_shape().as_list()[1:]
    bias = tf.get_variable("bias_{}".format(np.prod(n_outputs)),
                           shape=(1, np.prod(n_outputs)))

    cores = []

    for i in range(1, len(ranks) - 1):
        cores.append(
            tf.get_variable("core_{0}_output_{1}".format(i, suffix),
                            shape=(ranks[i - 1], input_shape[i - 1], ranks[i]),
                            initializer=weight_initializer))

    cores.append(
        tf.get_variable("core_{0}_last_output_{1}".format(i, suffix),
                        shape=(ranks[-2], n_outputs, ranks[-1]),
                        initializer=weight_initializer))

    regression_weights = TNSR.tt_to_tensor(cores)
    return regression(x, regression_weights, input_shape, bias, n_outputs)
Beispiel #11
0
def trl(x, ranks, n_outputs):

	# n_outputs = [2,2,2,2,2,2]
	# ranks 	= [1,2,2,1]
	#FIXED_RANK = 3

	weight_initializer = tf.contrib.layers.xavier_initializer()
	input_shape = x.get_shape().as_list()[1:]

	core,factors = None, None
	"""
	if(type(n_outputs) == type([])):
		core = tf.get_variable("core_higher_{}".format(np.prod(n_outputs)), shape=ranks[1:]+[FIXED_RANK]*len(n_outputs),
			initializer = weight_initializer)
		factors = [tf.get_variable("basic_factor_higher_map_{0}_{1}".format(i,e),
		shape=(input_shape[i],e),
		initializer = weight_initializer)
		for (i, e) in enumerate(ranks[1:])
		]
	else:
		"""

	core = tf.get_variable("core_last", shape=ranks, initializer = weight_initializer)
	factors = [	tf.get_variable("basic_factor_{0}_{1}".format(i,e),
				shape=(input_shape[i],ranks[i]),
				initializer = weight_initializer)
				for (i, e) in enumerate(input_shape)
				]

	bias = tf.get_variable("bias_{}".format(np.prod(n_outputs)), shape=(1, np.prod(n_outputs)))

	#bias = tf.get_variable("bias", shape=(1, n_outputs))
	
	"""
	if(type(n_outputs) == type([])):
		for i in range(len(n_outputs)):
			factors.append(tf.get_variable("higher_factor_{0}_{1}".format(i,np.prod(n_outputs)),
				shape=(n_outputs[i], FIXED_RANK),
				initializer = weight_initializer
				))
	else:
	"""
	# append the last N+1 factor matrix to the list
	factors.append(tf.get_variable("factor_{}".format(len(ranks)-1),
			shape=(n_outputs, ranks[-1]),
			initializer = weight_initializer))

	regression_weights = TNSR.tucker_to_tensor(core, factors)

	x_0 = tf.reshape(x, [-1, np.prod(input_shape)])

	w_minus1 = None

	"""
	if type(n_outputs) == type([]):
		w_minus1 = tf.reshape(regression_weights, [np.prod(input_shape), np.prod(n_outputs)])
		return tf.reshape(tf.add(tf.matmul(x_0, w_minus1) ,bias), [-1]+n_outputs)
	else:
	"""
	
	print(core.get_shape().as_list())
	for f in factors:
		print(f.get_shape().as_list())
	

	w_minus1 = tf.reshape(regression_weights, [np.prod(input_shape),n_outputs])
	return tf.add(tf.matmul(x_0, w_minus1) ,bias)