print "Error: the argument %s has to be specified" % (arg) exit(1) # mandatory arguments train_data_spec = arguments['train_data'] valid_data_spec = arguments['valid_data'] extra_nnet_spec = arguments['extra_nnet_spec'] conv_nnet_spec = arguments['conv_nnet_spec'] nnet_spec = arguments['nnet_spec'] wdir = arguments['wdir'] # parse network configuration from arguments, and initialize data reading cfg = NetworkConfig() cfg.model_type = 'CNNV' cfg.parse_config_cnn(arguments, '10:' + nnet_spec, conv_nnet_spec) cfg.parse_config_extra(arguments, extra_nnet_spec) cfg.init_data_reading(train_data_spec, valid_data_spec) # parse pre-training options # pre-training files and layer number (how many layers are set to the pre-training parameters) ptr_layer_number = 0 ptr_file = '' if arguments.has_key('ptr_file') and arguments.has_key('ptr_layer_number'): ptr_file = arguments['ptr_file'] ptr_layer_number = int(arguments['ptr_layer_number']) # check working dir to see whether it's resuming training resume_training = False if os.path.exists(wdir + '/nnet.tmp') and os.path.exists(wdir + '/training_state.tmp'):
for arg in required_arguments: if arguments.has_key(arg) == False: print "Error: the argument %s has to be specified" % (arg); exit(1) # mandatory arguments train_data_spec = arguments['train_data'] valid_data_spec = arguments['valid_data'] extra_nnet_spec = arguments['extra_nnet_spec'] conv_nnet_spec = arguments['conv_nnet_spec'] nnet_spec = arguments['nnet_spec'] wdir = arguments['wdir'] # parse network configuration from arguments, and initialize data reading cfg = NetworkConfig(); cfg.model_type = 'CNNV' cfg.parse_config_cnn(arguments, '10:' + nnet_spec, conv_nnet_spec) cfg.parse_config_extra(arguments, extra_nnet_spec) cfg.init_data_reading(train_data_spec, valid_data_spec) # parse pre-training options # pre-training files and layer number (how many layers are set to the pre-training parameters) ptr_layer_number = 0; ptr_file = '' if arguments.has_key('ptr_file') and arguments.has_key('ptr_layer_number'): ptr_file = arguments['ptr_file'] ptr_layer_number = int(arguments['ptr_layer_number']) # check working dir to see whether it's resuming training resume_training = False if os.path.exists(wdir + '/nnet.tmp') and os.path.exists(wdir + '/training_state.tmp'): resume_training = True cfg.lrate = _file2lrate(wdir + '/training_state.tmp') log('> ... found nnet.tmp and training_state.tmp, now resume training from epoch ' + str(cfg.lrate.epoch))