Beispiel #1
0
    def __init__(self, model_param, dimensions):

        Model_lop.__init__(self, model_param, dimensions)
        self.rnns = model_param['n_hidden']
        self.film_dim = model_param['film_dim']

        return
 def __init__(self, model_param, dimensions):
     Model_lop.__init__(self, model_param, dimensions)
     # Hidden layers architecture
     self.n_hs = model_param['n_hidden']
     self.static_bias = compute_static_bias_initialization(model_param['activation_ratio'])
     
     return
    def __init__(self, model_param, dimensions):

        Model_lop.__init__(self, model_param, dimensions)
        # Hidden layers architecture
        self.n_hs = model_param['n_hidden'] + [int(self.orch_dim)]

        return
Beispiel #4
0
 def __init__(self, model_param, dimensions):
     Model_lop.__init__(self, model_param, dimensions)
     # Hidden layers architecture
     self.n_hidden = model_param['n_hidden']
     self.n_visible = self.orch_dim
     self.n_condition = self.orch_dim * (self.temporal_order-1)+ self.piano_dim
     self.Gibbs_steps = model_param["Gibbs_steps"]
     return
Beispiel #5
0
    def __init__(self, model_param, dimensions):

        Model_lop.__init__(self, model_param, dimensions)

        # Hidden layers architecture
        self.n_hs = model_param['n_hidden']
        self.n_hs_piano = model_param['n_hidden_piano']

        return
Beispiel #6
0
	def __init__(self, model_param, dimensions):
		Model_lop.__init__(self, model_param, dimensions)
		# Hidden layers architecture
		self.layers = model_param['n_hidden']
		# Number of different ordering when sampling
		self.num_ordering = model_param['num_ordering']
		# Is it a keras model ?
		self.keras = True
		return
Beispiel #7
0
    def __init__(self, model_param, dimensions):

        Model_lop.__init__(self, model_param, dimensions)

        # Stack conv
        self.filters = model_param["num_filter_piano"]
        self.kernels = model_param["kernel_size_piano"]

        return
    def __init__(self, model_param, dimensions):

        Model_lop.__init__(self, model_param, dimensions)

        # Hidden layers architecture
        self.n_hs = model_param['n_hidden']
        self.num_filter = model_param['num_filter']
        self.filter_size = model_param['filter_size']

        return
    def __init__(self, model_param, dimensions):

        Model_lop.__init__(self, model_param, dimensions)

        # Hidden layers architecture
        self.layers = [self.piano_dim] + list(model_param['n_hidden'])
        # Is it a keras model ?
        self.keras = False

        return
Beispiel #10
0
    def __init__(self, model_param, dimensions):

        Model_lop.__init__(self, model_param, dimensions)

        # Hidden layers architecture
        self.MLP_piano_emb = model_param['MLP_piano_emb']
        self.GRU_orch_emb = model_param['GRU_orch_emb']
        self.last_MLP = model_param['last_MLP']

        return
Beispiel #11
0
	def __init__(self, model_param, dimensions):

		Model_lop.__init__(self, model_param, dimensions)

		# Hidden layers architecture
		self.layers = model_param['n_hidden']
		# Is it a keras model ?
		self.keras = True

		return
 def __init__(self, model_param, dimensions):
     Model_lop.__init__(self, model_param, dimensions)
     # Architecture
     self.layers = model_param['n_hidden']
     self.recurrent_layers = model_param['n_hidden']
     # Is it a keras model ?
     self.keras = True
     # Will be computed later
     self.context_embedding_size = None
     return
 def __init__(self, model_param, dimensions):
     Model_lop.__init__(self, model_param, dimensions)
     # Hidden layers architecture
     self.layers = model_param['n_hidden']
     # Number of different ordering when sampling
     self.num_ordering = model_param['num_ordering']
     # Is it a keras model ?
     self.keras = True
     # Will be computed later
     self.context_embedding_size = None
     return
Beispiel #14
0
 def __init__(self, model_param, dimensions):
     Model_lop.__init__(self, model_param, dimensions)
     # Architecture
     self.mlp_piano_present = model_param['mlp_piano_present']
     self.recurrent_layers = model_param['recurrent_layers']
     self.mlp_orch_present = model_param['mlp_orch_present']
     self.mlp_last_pred = model_param['mlp_last_pred']
     # Is it a keras model ?
     self.keras = True
     # Will be computed later
     self.context_embedding_size = None
     return
Beispiel #15
0
 def __init__(self, model_param, dimensions):
     Model_lop.__init__(self, model_param, dimensions)
     self.num_filter_piano = model_param["num_filter_piano"]
     self.kernel_size_piano = model_param["kernel_size_piano"]
     self.num_filter_orch = model_param["num_filter_orch"]
     self.kernel_size_orch = model_param["kernel_size_orch"]
     self.embeddings_size = model_param["embeddings_size"]
     # The last recurrent layer output a vector of dimension embedding size
     self.gru_orch = list(model_param["gru_orch"])
     self.gru_orch.append(self.embeddings_size)
     self.mlp_pred = model_param["mlp_pred"]
     return
Beispiel #16
0
    def __init__(self, model_param, dimensions):

        Model_lop.__init__(self, model_param, dimensions)

        self.num_filter_piano = model_param["num_filter_piano"]
        self.kernel_size_piano = model_param["kernel_size_piano"]
        self.mlp_piano = model_param["mlp_piano"]
        self.mlp_pred = model_param["mlp_pred"]
        self.gru_orch = model_param["gru_orch"]

        # Is it a keras model ?
        self.keras = True

        return
    def __init__(self, model_param, dimensions):
        Model_lop.__init__(self, model_param, dimensions)
        # Hidden layers architecture
        self.n_h = model_param['n_hidden']
        self.n_v = self.orch_dim
        self.n_c = self.orch_dim * (self.temporal_order-1)
        self.n_l = self.piano_dim
        self.n_f = model_param["n_factor"]

        self.n_fv = model_param["n_factor"]
        self.n_fh = model_param["n_factor"]

        self.Gibbs_steps = model_param["Gibbs_steps"]
        return
 def __init__(self, model_param, dimensions):
     Model_lop.__init__(self, model_param, dimensions)
     # Architecture
     self.mlp_piano_present = model_param['mlp_piano_present']
     self.recurrent_layers = model_param['recurrent_layers']
     self.mlp_orch_present = model_param['mlp_orch_present']
     self.mlp_last_pred = model_param['mlp_last_pred']
     # Is it a keras model ?
     self.keras = True
     # Will be computed later
     self.context_embedding_size = None
     # Static bias
     self.static_bias = compute_static_bias_initialization(
         model_param['activation_ratio'])
     return
Beispiel #19
0
	def get_hp_space():
		super_space = Model_lop.get_hp_space()

		space = {'n_hidden': list_log_hopt(500, 2000, 10, 1, 2, "n_hidden")}

		space.update(super_space)
		return space
    def get_hp_space():
        super_space = Model_lop.get_hp_space()

        space = {}

        space.update(super_space)
        return space
Beispiel #21
0
    def get_hp_space():
        super_space = Model_lop.get_hp_space()

        space = {
            'num_filter_piano':
            list_hopt_fixedSized([(20, 30, 1), (10, 20, 1)],
                                 'num_filter_piano'),
            'kernel_size_piano':
            list_hopt_fixedSized([(12, 24, 1), (12, 24, 1)],
                                 "kernel_size_piano"),
            'num_filter_orch':
            list_hopt_fixedSized([(30, 50, 1), (10, 20, 1)],
                                 'num_filter_orch'),
            'kernel_size_orch':
            list_hopt_fixedSized([(12, 24, 1), (12, 24, 1)],
                                 "kernel_size_orch"),
            'embeddings_size':
            qloguniform_int("embeddings_size", log(500), log(2000), 10),
            'mlp_pred':
            list_log_hopt(500, 2000, 10, 1, 3, "mlp_pred"),
            'gru_orch':
            list_log_hopt(500, 2000, 10, 0, 2, "gru_orch"),
        }
        space.update(super_space)
        return space
    def get_hp_space():
        super_space = Model_lop.get_hp_space()

        space = {
            'n_hidden':
            hp.choice('n_hidden', [
                [
                    hopt_wrapper.qloguniform_int('n_hidden_1_' + str(i),
                                                 log(100), log(5000), 10)
                    for i in range(1)
                ],
                [
                    hopt_wrapper.qloguniform_int('n_hidden_2_' + str(i),
                                                 log(100), log(5000), 10)
                    for i in range(2)
                ],
                [
                    hopt_wrapper.qloguniform_int('n_hidden_3_' + str(i),
                                                 log(100), log(5000), 10)
                    for i in range(3)
                ],
            ]),
        }

        space.update(super_space)
        return space
 def get_hp_space():
     super_space = Model_lop.get_hp_space()
     space = {
         'n_hidden': list_log_hopt(500, 2000, 10, 1, 2, "n_hidden"),
         'num_ordering': quniform_int('num_ordering', 5, 5, 1)
     }
     space.update(super_space)
     return space
Beispiel #24
0
    def get_hp_space():
        super_space = Model_lop.get_hp_space()

        space = {
            'n_hidden': hopt_wrapper.qloguniform_int('n_hidden', log(100), log(5000), 10),
        }

        space.update(super_space)
        return space
Beispiel #25
0
    def get_hp_space():
        super_space = Model_lop.get_hp_space()

        space = {
            'filter_0': quniform_int('filter_0', 20, 50, 1),
            'kernel_0': quniform_int('kernel_0', 8, 16, 1),
            'filter_1': quniform_int('filter_1', 20, 50, 1),
            'kernel_1': quniform_int('kernel_1', 8, 16, 1),
        }

        space.update(super_space)
        return space
Beispiel #26
0
    def get_hp_space():
        super_space = Model_lop.get_hp_space()

        space = {
            'num_filter_piano': quniform_int('num_filter_piano', 20, 50, 1),
            'kernel_size_piano': quniform_int('kernel_size_piano', 8, 16, 1),
            'mlp_piano': list_log_hopt(500, 2000, 10, 1, 3, "mlp_piano"),
            'mlp_pred': list_log_hopt(500, 2000, 10, 1, 3, "mlp_pred"),
            'gru_orch': list_log_hopt(500, 2000, 10, 1, 3, "gru_orch"),
        }
        space.update(super_space)
        return space
	def get_hp_space():
		super_space = Model_lop.get_hp_space()
		space = {
			'n_hidden_embedding': hp.choice('n_hidden_embedding', [
                [hopt_wrapper.qloguniform_int('n_hidden_embedding_'+str(i), log(1500), log(3000), 10) for i in range(1)],
                [hopt_wrapper.qloguniform_int('n_hidden_embedding_'+str(i), log(1500), log(3000), 10) for i in range(2)],
                [hopt_wrapper.qloguniform_int('n_hidden_embedding_'+str(i), log(1500), log(3000), 10) for i in range(3)],
            ]),
            'n_hidden_NADE': hp.choice('n_hidden_NADE', [
                [hopt_wrapper.qloguniform_int('n_hidden_NADE_'+str(i), log(1500), log(3000), 10) for i in range(1)],
                [hopt_wrapper.qloguniform_int('n_hidden_NADE_'+str(i), log(1500), log(3000), 10) for i in range(2)],
                [hopt_wrapper.qloguniform_int('n_hidden_NADE_'+str(i), log(1500), log(3000), 10) for i in range(3)],
            ]),
			'num_ordering': quniform_int('num_ordering', 5, 10, 1)
		}
		space.update(super_space)
		return space
Beispiel #28
0
 def get_hp_space():
     space = Model_lop.get_hp_space()
     return space
Beispiel #29
0
    def __init__(self, model_param, dimensions):

        Model_lop.__init__(self, model_param, dimensions)

        return
Beispiel #30
0
 def __init__(self, model_param, dimensions):
     Model_lop.__init__(self, model_param, dimensions)
     # Hidden layers architecture
     self.n_hidden = model_param['n_hidden']
     self.Gibbs_steps = model_param["Gibbs_steps"]
     return