示例#1
0
 def finalize(self):
     if self._distributed:
         Communicator.finalize()
    train_data = os.path.join(data_path, 'train_map.txt')
    test_data = os.path.join(data_path, 'test_map.txt')

    num_quantization_bits = args['quantized_bits']
    epochs = args['epochs']
    warm_up = args['distributed_after']
    network_name = args['network']
    scale_up = bool(args['scale_up'])

    # Create distributed trainer factory
    print(
        "Start training: quantize_bit = {}, epochs = {}, distributed_after = {}"
        .format(num_quantization_bits, epochs, warm_up))

    try:
        resnet_cifar10(train_data,
                       test_data,
                       mean_data,
                       network_name,
                       epoch_size,
                       num_quantization_bits,
                       block_size=args['block_samples'],
                       warm_up=args['distributed_after'],
                       max_epochs=epochs,
                       scale_up=scale_up,
                       log_to_file=args['logdir'],
                       profiling=args['profile'])
    finally:
        # Must call MPI finalize when process exit
        Communicator.finalize()
    if args['epoch_size'] is not None:
        epoch_size = args['epoch_size']

    mean_data=os.path.join(data_path, 'CIFAR-10_mean.xml')
    train_data=os.path.join(data_path, 'train_map.txt')
    test_data=os.path.join(data_path, 'test_map.txt')

    num_quantization_bits = args['quantized_bits']
    epochs = args['epochs']
    warm_up = args['distributed_after']
    network_name = args['network']
    scale_up = bool(args['scale_up'])

    # Create distributed trainer factory
    print("Start training: quantize_bit = {}, epochs = {}, distributed_after = {}".format(num_quantization_bits, epochs, warm_up))

    try:
        resnet_cifar10(train_data, test_data, mean_data,
                       network_name, 
                       epoch_size,
                       num_quantization_bits,
                       block_size=args['block_samples'],
                       warm_up=args['distributed_after'],
                       max_epochs=epochs,
                       scale_up=scale_up,
                       log_to_file=args['logdir'],
                       profiling=args['profile'])
    finally:
        # Must call MPI finalize when process exit
        Communicator.finalize()