def runDNN(arg): if type(arg) is dict: model_config = arg else : model_config = load_model(arg,'DNN') dnn_config = load_dnn_spec(model_config['nnet_spec']) data_spec = load_data_spec(model_config['data_spec'],model_config['batch_size']); #generating Random numpy_rng = numpy.random.RandomState(model_config['random_seed']) theano_rng = RandomStreams(numpy_rng.randint(2 ** 30)) activationFn = parse_activation(dnn_config['activation']); #create working dir createDir(model_config['wdir']); batch_size = model_config['batch_size']; n_ins = model_config['n_ins'] n_outs = model_config['n_outs'] max_col_norm = dnn_config['max_col_norm'] l1_reg = dnn_config['l1_reg'] l2_reg = dnn_config['l2_reg'] adv_activation = dnn_config['adv_activation'] hidden_layers_sizes = dnn_config['hidden_layers'] do_dropout = dnn_config['do_dropout'] logger.info('Building the model') if do_dropout: dropout_factor = dnn_config['dropout_factor'] input_dropout_factor = dnn_config['input_dropout_factor'] dnn = DNN_Dropout(numpy_rng=numpy_rng, theano_rng = theano_rng, n_ins=n_ins, hidden_layers_sizes=hidden_layers_sizes, n_outs=n_outs, activation = activationFn, dropout_factor = dropout_factor, input_dropout_factor = input_dropout_factor, adv_activation = adv_activation, max_col_norm = max_col_norm, l1_reg = l1_reg, l2_reg = l2_reg) else: dnn = DNN(numpy_rng=numpy_rng, theano_rng = theano_rng, n_ins=n_ins, hidden_layers_sizes=hidden_layers_sizes, n_outs=n_outs, activation = activationFn, adv_activation = adv_activation, max_col_norm = max_col_norm, l1_reg = l1_reg, l2_reg = l2_reg) logger.info("Loading Pretrained network weights") try: # pretraining ptr_file = model_config['input_file'] pretrained_layers = dnn_config['pretrained_layers'] dnn.load(filename=ptr_file,max_layer_num = pretrained_layers, withfinal=True) except KeyError, e: logger.critical("KeyMissing:"+str(e)); logger.error("Pretrained network Missing in configFile") sys.exit(2)