Example #1
0
 def __init__(self,layers=2,num_hidden=200,lr=1e-3,num_epochs=10,provider=0,out=1):
     if provider == 0:
         self.train_data = MSD10GenreDataProvider('train', batch_size=50, rng=rng)
         self.valid_data = MSD10GenreDataProvider('valid', batch_size=50, rng=rng)
     else:
         self.train_data = MSD25GenreDataProvider('train', batch_size=50, rng=rng)
         self.valid_data = MSD25GenreDataProvider('valid', batch_size=50, rng=rng)
     self.inputs = tf.placeholder(tf.float32, [None, self.train_data.inputs.shape[1]], 'inputs')
     self.targets = tf.placeholder(tf.float32, [None, self.train_data.num_classes], 'targets')
     self.layers=layers
     self.num_hidden = num_hidden
     self.num_epochs = num_epochs
     self.lr = lr
     self.out = out
     if provider == 0:
         self.MSD = 'MSD10 '
     else:
         self.MSD = 'MSD25 '
     self.title = self.MSD + 'N = ' + str(self.num_hidden) + ', L = ' + str(self.layers) +', LR = ' + str(self.lr)
     with tf.name_scope('fc-layer-1'):
         hidden_1 = self.fully_connected_layer(self.inputs,self.train_data.inputs.shape[1],self.num_hidden)
     if layers>1:
         with tf.name_scope('fc-h'):
             hiddens = self.hidden_layers(hidden_1,self.num_hidden,n=layers-1,nonlinearity=tf.nn.relu)
         with tf.name_scope('output-layer'):
             self.outputs = self.fully_connected_layer(hiddens, self.num_hidden, self.train_data.num_classes, tf.identity)
     else:
         with tf.name_scope('output-layer'):
             self.outputs = self.fully_connected_layer(hidden_1, self.num_hidden, self.train_data.num_classes, tf.identity)
def getCurriculumLearningDataProviders(cross_entropies,
                                       curriculum_step,
                                       repetitions,
                                       seed=seed,
                                       rng=rng,
                                       repeat_school_class=True,
                                       shuffle_cur_curriculum=False,
                                       reverse_order=False,
                                       enable_auto_level_incr=True):
    train_data = MSD10Genre_CurriculumLearning(
        cross_entropies,
        which_set='train',
        batch_size=batch_size,
        curriculum_step=curriculum_step,
        repetitions=repetitions,
        repeat_school_class=repeat_school_class,
        shuffle_cur_curriculum=shuffle_cur_curriculum,
        reverse_order=reverse_order,
        enable_auto_level_incr=enable_auto_level_incr)

    #rng = np.random.RandomState(seed=seed)
    valid_data = MSD10GenreDataProvider('valid',
                                        batch_size=batch_size,
                                        rng=rng)

    return train_data, valid_data
Example #3
0
    def loadAndGetDataProviders(self, dataset_filename, logits_filename):
        self.train_data = MSD10Genre_Teacher_DataProvider(
            dataset_filename=dataset_filename, logits_filename=logits_filename,
            batch_size=self.batch_size, rng=self.rng
        )
        self.valid_data = MSD10GenreDataProvider('test', batch_size=self.batch_size, rng=self.rng)

        return self.train_data, self.valid_data
Example #4
0
    object = []
    f = open(filename + '.pckl', 'rb')
    object = pickle.load(f)
    f.close()
    return object


def save_to_file(filename, object):
    """ Save object to file
    """
    f = open(filename + '.pckl', 'wb')
    pickle.dump(object, f)
    f.close()


train_data = MSD10GenreDataProvider('train', batch_size=50)
valid_data = MSD10GenreDataProvider('valid', batch_size=50)


def fully_connected_layer(inputs,
                          input_dim,
                          output_dim,
                          nonlinearity=tf.nn.relu):
    weights = tf.Variable(
        tf.truncated_normal([input_dim, output_dim],
                            stddev=2. / (input_dim + output_dim)**0.5),
        'weights')
    biases = tf.Variable(tf.zeros([output_dim]), 'biases')
    outputs = nonlinearity(tf.matmul(inputs, weights) + biases)
    return outputs, weights
Example #5
0
    f = open(filename + '.pckl', 'wb')
    pickle.dump(object, f)
    f.close()

hidden1_bestmodel = load_from_file('data/hidden1_bestmodel')
hidden2_bestmodel = load_from_file('data/hidden2_bestmodel')
hidden3_bestmodel = load_from_file('data/hidden3_bestmodel')
hidden4_bestmodel = load_from_file('data/hidden4_bestmodel')
biases_1_bestmodel= load_from_file('data/biases_1_bestmodel')
biases_2_bestmodel= load_from_file('data/biases_2_bestmodel')
biases_3_bestmodel= load_from_file('data/biases_3_bestmodel')
biases_4_bestmodel= load_from_file('data/biases_4_bestmodel')

batch_size_variable = 1

train_data = MSD10GenreDataProvider('train', batch_size = batch_size_variable)
valid_data = MSD10GenreDataProvider('valid', batch_size = batch_size_variable)

def fully_connected_layer(inputs, input_dim, output_dim, weights, biases, nonlinearity=tf.nn.relu):
    weights = tf.constant(weights)
    biases = tf.constant(biases)
    outputs = nonlinearity(tf.matmul(inputs, weights) + biases)
    return outputs, weights

def conv_layer_maxpooling(inputs, image_height, image_width, in_channels, out_channels, kernel_height, kernel_width,
                          weights, biases, nonlinearity=tf.nn.relu):
    weights = tf.constant(weights)
    biases = tf.constant(biases)
    inputs_1 = tf.reshape(inputs, [batch_size_variable, image_height, image_width, in_channels])
    strides = [1, 1, 1, 1]
    padding = "VALID"