示例#1
0
def runDNN(arg):


	if type(arg) is dict:
		model_config = arg
	else :
		model_config = load_model(arg,'DNN')

	dnn_config = load_dnn_spec(model_config['nnet_spec'])
	data_spec =  load_data_spec(model_config['data_spec'],model_config['batch_size']);


	#generating Random
	numpy_rng = numpy.random.RandomState(model_config['random_seed'])
	theano_rng = RandomStreams(numpy_rng.randint(2 ** 30))
	
	activationFn = parse_activation(dnn_config['activation']);

	#create working dir
	createDir(model_config['wdir']);
	
	batch_size = model_config['batch_size'];
	n_ins = model_config['n_ins']
	n_outs = model_config['n_outs']
	
	max_col_norm = dnn_config['max_col_norm']
	l1_reg = dnn_config['l1_reg']
	l2_reg = dnn_config['l2_reg']	
	adv_activation = dnn_config['adv_activation']
	hidden_layers_sizes = dnn_config['hidden_layers']
	do_dropout = dnn_config['do_dropout']
	logger.info('Building the model')

	if do_dropout:
		dropout_factor = dnn_config['dropout_factor']
		input_dropout_factor = dnn_config['input_dropout_factor']

		dnn = DNN_Dropout(numpy_rng=numpy_rng, theano_rng = theano_rng, n_ins=n_ins,
			  hidden_layers_sizes=hidden_layers_sizes, n_outs=n_outs,
			  activation = activationFn, dropout_factor = dropout_factor,
			  input_dropout_factor = input_dropout_factor, adv_activation = adv_activation,
			  max_col_norm = max_col_norm, l1_reg = l1_reg, l2_reg = l2_reg)
	else:
		
		dnn = DNN(numpy_rng=numpy_rng, theano_rng = theano_rng, n_ins=n_ins,
			  hidden_layers_sizes=hidden_layers_sizes, n_outs=n_outs,
			  activation = activationFn, adv_activation = adv_activation,
			  max_col_norm = max_col_norm, l1_reg = l1_reg, l2_reg = l2_reg)


	logger.info("Loading Pretrained network weights")
	try:
		# pretraining
		ptr_file = model_config['input_file']
		pretrained_layers = dnn_config['pretrained_layers']
		dnn.load(filename=ptr_file,max_layer_num = pretrained_layers,  withfinal=True)
	except KeyError, e:
		logger.critical("KeyMissing:"+str(e));
		logger.error("Pretrained network Missing in configFile")
		sys.exit(2)
示例#2
0
    def __init__(self, model_config):
        super(DNNPredictor, self).__init__(model_config, "DNN")
        mlp_config = load_dnn_spec(self.model_config["nnet_spec"])
        activationFn = parse_activation(mlp_config["activation"])
        n_ins = model_config["n_ins"]
        n_outs = model_config["n_outs"]
        max_col_norm = mlp_config["max_col_norm"]
        l1_reg = mlp_config["l1_reg"]
        l2_reg = mlp_config["l2_reg"]
        adv_activation = mlp_config["adv_activation"]
        hidden_layers_sizes = mlp_config["hidden_layers"]
        do_dropout = mlp_config["do_dropout"]

        if do_dropout:
            dropout_factor = dnn_config["dropout_factor"]
            input_dropout_factor = dnn_config["input_dropout_factor"]
            self.model = DNN_Dropout(
                numpy_rng=self.numpy_rng,
                theano_rng=self.theano_rng,
                n_ins=n_ins,
                hidden_layers_sizes=hidden_layers_sizes,
                n_outs=n_outs,
                activation=activationFn,
                dropout_factor=dropout_factor,
                input_dropout_factor=input_dropout_factor,
                adv_activation=adv_activation,
                max_col_norm=max_col_norm,
                l1_reg=l1_reg,
                l2_reg=l2_reg,
            )
        else:
            self.model = DNN(
                numpy_rng=self.numpy_rng,
                theano_rng=self.theano_rng,
                n_ins=n_ins,
                hidden_layers_sizes=hidden_layers_sizes,
                n_outs=n_outs,
                activation=activationFn,
                adv_activation=adv_activation,
                max_col_norm=max_col_norm,
                l1_reg=l1_reg,
                l2_reg=l2_reg,
            )

        self.__load_model__(self.model_config["input_file"], mlp_config["pretrained_layers"])
示例#3
0
    def __init__(self, model_config):
        super(DNNPredictor, self).__init__(model_config, 'DNN')
        mlp_config = load_dnn_spec(self.model_config['nnet_spec'])
        activationFn = parse_activation(mlp_config['activation'])
        n_ins = model_config['n_ins']
        n_outs = model_config['n_outs']
        max_col_norm = mlp_config['max_col_norm']
        l1_reg = mlp_config['l1_reg']
        l2_reg = mlp_config['l2_reg']
        adv_activation = mlp_config['adv_activation']
        hidden_layers_sizes = mlp_config['hidden_layers']
        do_dropout = mlp_config['do_dropout']

        if do_dropout:
            dropout_factor = dnn_config['dropout_factor']
            input_dropout_factor = dnn_config['input_dropout_factor']
            self.model = DNN_Dropout(numpy_rng=self.numpy_rng,
                                     theano_rng=self.theano_rng,
                                     n_ins=n_ins,
                                     hidden_layers_sizes=hidden_layers_sizes,
                                     n_outs=n_outs,
                                     activation=activationFn,
                                     dropout_factor=dropout_factor,
                                     input_dropout_factor=input_dropout_factor,
                                     adv_activation=adv_activation,
                                     max_col_norm=max_col_norm,
                                     l1_reg=l1_reg,
                                     l2_reg=l2_reg)
        else:
            self.model = DNN(numpy_rng=self.numpy_rng,
                             theano_rng=self.theano_rng,
                             n_ins=n_ins,
                             hidden_layers_sizes=hidden_layers_sizes,
                             n_outs=n_outs,
                             activation=activationFn,
                             adv_activation=adv_activation,
                             max_col_norm=max_col_norm,
                             l1_reg=l1_reg,
                             l2_reg=l2_reg)

        self.__load_model__(self.model_config['input_file'],
                            mlp_config['pretrained_layers'])