def test(): train_iterator, valid_iterator, test_iterator = load_data() test_config = get_config('test') print('configuration:', test_config) param = { 'errors': None, 'name': "Moving-MNIST-Model-Convolutional-test-unconditional", 'problem_type': "regression" } model = DeepWeatherModel(param, test_config) param = { 'id': '1', 'learning_rate': test_config['learning_rate'], 'momentum': 0.9, 'decay_rate': 0.9, 'clip_threshold': None, 'max_epoch': test_config['max_epoch'], 'start_epoch': 0, 'max_epochs_no_best': 200, 'decay_step': 200, 'autosave_mode': ['interval', 'best'], 'save_path': test_config['save_path'], 'save_interval': 30 } optimizer = RMSProp(model, train_iterator, valid_iterator, test_iterator, param) optimizer.observe(test_config['patch_size'])
'errors': None, 'last_n': seq_length, 'name': "UCF101-VideoModel-Flow-LSTM-RMS", 'problem_type': "classification" } model = VideoModel(param) model.print_stat() ############################# optimizer param = { 'id': '1', 'learning_rate': 0.001, 'decay_rate': 0.9, 'clip_threshold': None, 'verbose': False, 'max_epoch': 200, 'start_epoch': 0, 'valid_epoch': 20, 'max_epochs_no_best': 200, 'display_freq': 150, 'valid_freq': None, 'save_freq': None, 'autosave_mode': ['interval', 'best'], 'save_path': save_path, 'save_interval': 20 } optimizer = RMSProp(model, train_iterator, valid_iterator, test_iterator, param) optimizer.train()
def train(src='radar'): random.seed(1000) np.random.seed(1000) rng = sparnn.utils.quick_npy_rng() theano_rng = sparnn.utils.quick_theano_rng() config = get_config('train', src=src) valid_config = get_config('valid', src=src) test_config = get_config('test', src=src) # init the model, change minibatch size to tensor value param = { 'errors': None, 'name': "FactorWeather-Convolutional-cloud", 'problem_type': "regression" } model = FactorWeatherModel(param, config) if not os.path.exists(config['model_path']): os.makedirs(config['model_path']) my_log.info(' Start load data ...') pool = Pool(processes=7) train_p0 = pool.apply_async(func=gen_train, args=(config, 'train', 5, 0)) train_p1 = pool.apply_async(func=gen_train, args=(config, 'train', 5, 1)) train_p2 = pool.apply_async(func=gen_train, args=(config, 'train', 5, 2)) train_p3 = pool.apply_async(func=gen_train, args=(config, 'train', 5, 3)) train_p4 = pool.apply_async(func=gen_train, args=(config, 'train', 5, 4)) valid_p = pool.apply_async(func=gen_valid, args=(valid_config, 'train')) test_p = pool.apply_async(func=gen_test, args=(test_config, 'train')) pool.close() pool.join() train_iterator0 = train_p0.get() train_iterator1 = train_p1.get() train_iterator2 = train_p2.get() train_iterator3 = train_p3.get() train_iterator4 = train_p4.get() train_iterator = load_data1(config, train_iterator0, train_iterator1, train_iterator2, train_iterator3, train_iterator4, 'train') valid_iterator = valid_p.get() test_iterator = test_p.get() my_log.info(' Load all data done!') my_log.info(train_iterator0.data['input_raw_data'].shape) my_log.info(train_iterator1.data['input_raw_data'].shape) my_log.info(train_iterator2.data['input_raw_data'].shape) my_log.info(train_iterator3.data['input_raw_data'].shape) my_log.info(train_iterator4.data['input_raw_data'].shape) my_log.info(train_iterator.data['input_raw_data'].shape) my_log.info(valid_iterator.data['input_raw_data'].shape) my_log.info(test_iterator.data['input_raw_data'].shape) # build the optimizer param = { 'id': '1', 'learning_rate': config['learning_rate'], 'momentum': 0.9, 'decay_rate': 0.9, 'clip_threshold': None, 'max_epoch': config['max_epoch'], 'start_epoch': 0, 'max_epochs_no_best': 200, 'decay_step': 200, 'autosave_mode': ['interval', 'best', 'proceed'], 'save_path': config['model_path'], 'save_interval': 30, 'model_name': 'model-proceed-' + config['name'] + '.pkl' } try: print('begin optimizer...') optimizer = RMSProp(model, train_iterator, valid_iterator, test_iterator, param) print("optimizer object finished,begin train ") optimizer.train(config, config['model_path']) except Exception as e: my_log.info('error1: ' + str(e))