def event_handler(event): if isinstance(event, paddle.event.EndIteration): if event.batch_id % 100 == 0: print "\nPass %d, Batch %d, Cost %f, %s" % ( event.pass_id, event.batch_id, event.cost, event.metrics) else: sys.stdout.write('.') sys.stdout.flush() if isinstance(event, paddle.event.EndPass): # save parameters with gzip.open('params_pass_%d.tar.gz' % event.pass_id, 'w') as f: parameters.to_tar(f) reader_test = data_provider.data_reader('data/train', 10000) result = trainer.test(reader=paddle.batch(reader=reader_test, batch_size=16), feeding=feeding) print "\nTest with Pass %d, %s" % (event.pass_id, result.metrics)
def event_handler(event): if isinstance(event, paddle.event.EndIteration): if event.batch_id % 50 == 0: print ("\n pass %d, Batch: %d cost: %f" % (event.pass_id, event.batch_id, event.cost)) else: sys.stdout.write('.') sys.stdout.flush() if isinstance(event, paddle.event.EndPass): # save parameters feeding = {'x': 0, 'y': 1} with gzip.open('output/params_pass_%d.tar.gz' % event.pass_id, 'w') as f: parameters.to_tar(f) filepath = 'data/test.data' result = trainer.test( reader=paddle.batch(data_provider.data_reader(filepath), batch_size=16), feeding=feeding) print ("\nTest with Pass %d, cost: %s" % (event.pass_id, result.cost))
def event_handler(event): if isinstance(event, paddle.event.EndIteration): if event.batch_id % 100 == 0: print "\n pass %d, Batch: %d cost: %f, %s" % ( event.pass_id, event.batch_id, event.cost, event.metrics) else: sys.stdout.write('.') sys.stdout.flush() if isinstance(event, paddle.event.EndPass): # save parameters feeding = {'image': 0, 'label': 1} with gzip.open('output/params_pass_%d.tar.gz' % event.pass_id, 'w') as f: parameters.to_tar(f) filepath = '/Users/vic/Dev/DeepLearning/Paddle/VGG-CIFAR/Images/cifar-10-batches-py/test_batch' result = trainer.test(reader=paddle.batch(data_provider.data_reader( filepath, 0), batch_size=128), feeding=feeding) print "\nTest with Pass %d, %s" % (event.pass_id, result.metrics)
def event_handler(event): if isinstance(event, paddle.event.EndIteration): if event.batch_id % 100 == 0: print "\nPass: %d Batch: %d [Cost: %f ][%s]\n" % (event.pass_id, event.batch_id, event.cost, event.metrics) else: sys.stdout.write('.') sys.stdout.flush() if isinstance(event, paddle.event.EndPass): #save parameters with gzip.open('output/params.tar.gz', 'w') as f: parameters.to_tar(f) # test feeding = {'image': 0, 'label': 1} filepath = "" result = trainer.test(reader=paddle.batch(reader=data_provider.data_reader(filepath, 0), batch_size=128), feeding=feeding) print "\nTest Result: [Cost: %f] [%s] " % (result.cost, result.metrics)
act=paddle.activation.Softmax()) label = paddle.layer.data(name='label', type=paddle.data_type.integer_value(10)) cost = paddle.layer.classification_cost(input=output_layer, label=label) parameters = paddle.parameters.create(cost) print parameters.keys() optimizer = paddle.optimizer.Momentum( momentum=0.9, regularization=paddle.optimizer.L2Regularization(rate=0.0002 * 128), learning_rate=0.1 / 128.0, learning_rate_decay_a=0.1, learning_rate_decay_b=50000 * 100, learning_rate_schedule='discexp') trainer = paddle.trainer.SGD(cost=cost, parameters=parameters, update_equation=optimizer) feeding = {'image': 0, 'label': 1} file_path = '/Users/vic/Dev/DeepLearning/Paddle/VGG-CIFAR/Images/cifar-10-batches-py/data_batch_1' reader = data_provider.data_reader(file_path, 0) trainer.train(num_passes=100, reader=paddle.batch(reader, batch_size=16), event_handler=event_handler, feeding=feeding)
parameters=parameters, update_equation=momentum_optimizer) feeding = {'image': 0, 'label': 1} def event_handler(event): if isinstance(event, paddle.event.EndIteration): if event.batch_id % 100 == 0: print "\nPass %d, Batch %d, Cost %f, %s" % ( event.pass_id, event.batch_id, event.cost, event.metrics) else: sys.stdout.write('.') sys.stdout.flush() if isinstance(event, paddle.event.EndPass): # save parameters with gzip.open('params_pass_%d.tar.gz' % event.pass_id, 'w') as f: parameters.to_tar(f) reader_test = data_provider.data_reader('data/train', 10000) result = trainer.test(reader=paddle.batch(reader=reader_test, batch_size=16), feeding=feeding) print "\nTest with Pass %d, %s" % (event.pass_id, result.metrics) reader = data_provider.data_reader('data/train', 50000) trainer.train(reader=paddle.batch(reader=reader, batch_size=16), num_passes=50, event_handler=event_handler, feeding=feeding)
def train(x_, model_path, is_predict=False): paddle.init(use_gpu=False, trainer_count=1) # 步长 TIME_STEP = 10 x = paddle.layer.data( name='x', type=paddle.data_type.dense_vector_sequence(TIME_STEP)) # 模型输出 output = network(x) # 训练 if not is_predict: label = paddle.layer.data(name='y', type=paddle.data_type.dense_vector(dim=1)) # 计算损失 loss = paddle.layer.mse_cost(input=output, label=label) parameters = paddle.parameters.create(loss) # Adam 优化算法 optimizer = paddle.optimizer.Adam( learning_rate=1e-3, regularization=paddle.optimizer.L2Regularization(rate=8e-4)) trainer = paddle.trainer.SGD(cost=loss, parameters=parameters, update_equation=optimizer) feeding = {'x': 0, 'y': 1} # 训练事件处理 def event_handler(event): if isinstance(event, paddle.event.EndIteration): if event.batch_id % 50 == 0: print("\n pass %d, Batch: %d cost: %f" % (event.pass_id, event.batch_id, event.cost)) else: sys.stdout.write('.') sys.stdout.flush() if isinstance(event, paddle.event.EndPass): # save parameters feeding = {'x': 0, 'y': 1} with gzip.open('output/params_pass_%d.tar.gz' % event.pass_id, 'w') as f: parameters.to_tar(f) filepath = 'data/test.data' # 测试数据 test_reader = data_provider.data_reader(filepath) result = trainer.test(reader=paddle.batch(test_reader, batch_size=16), feeding=feeding) print("\nTest with Pass %d, cost: %s" % (event.pass_id, result.cost)) train_file_path = 'data/train.data' reader = data_provider.data_reader(train_file_path) trainer.train(paddle.batch(reader=reader, batch_size=128), num_passes=200, event_handler=event_handler, feeding=feeding) else: # 进行预测 # 加载模型参数 with gzip.open(model_path, 'r') as openFile: parameters = paddle.parameters.Parameters.from_tar(openFile) # 使用infer进行预测 result = paddle.infer(input=x_, parameters=parameters, output_layer=output, feeding={'x': 0}) return result