Ejemplo n.º 1
0
 def ReallyCreate(inputs, outputs, name='', device_option=None,
                  args=None, **kwargs):
   operator = caffe2_pb2.OperatorDef()
   operator.type = operator_type
   operator.name = name
   if type(inputs) is str or type(inputs) is BlobReference:
     inputs = [inputs]
   elif type(inputs) is not list:
     raise ValueError("Unknown input format: %s." % str(inputs))
   if type(outputs) is str or type(outputs) is BlobReference:
     outputs = [outputs]
   elif type(outputs) is not list:
     raise ValueError("Unknown output format: %s of type %s."
                      % (str(outputs), type(outputs)))
   operator.inputs.extend([str(i) for i in inputs])
   operator.outputs.extend([str(o) for o in outputs])
   if device_option:
     operator.device_option.CopyFrom(device_option)
   # random seed is defined in the device option, so we need to do special
   # care.
   if 'random_seed' in kwargs:
     operator.device_option.random_seed = kwargs['random_seed']
     del kwargs['random_seed']
   # Add given arguments that do not need parsing
   if args:
     operator.args.extend(args)
   # Add all other arguments
   for key, value in kwargs.iteritems():
     operator.args.add().CopyFrom(utils.MakeArgument(key, value))
   return operator
Ejemplo n.º 2
0
def Benchmark(model_gen, arg):
    model, input_size = model_gen(arg.order)

    # In order to be able to run everything without feeding more stuff, let's
    # add the data and label blobs to the parameter initialization net as well.
    if arg.order == "NCHW":
        input_shape = [arg.batch_size, 3, input_size, input_size]
    else:
        input_shape = [arg.batch_size, input_size, input_size, 3]
    model.param_init_net.GaussianFill([],
                                      "data",
                                      shape=input_shape,
                                      mean=0.0,
                                      std=1.0)
    model.param_init_net.UniformIntFill([],
                                        "label",
                                        shape=[
                                            arg.batch_size,
                                        ],
                                        min=0,
                                        max=999)

    # Note: even when we are running things on CPU, adding a few engine related
    # argument will not hurt since the CPU operator registy will simply ignore
    # these options and go the default path.
    for op in model.net.Proto().op:
        if op.type == 'Conv' or op.type == 'ConvFp16':
            op.engine = 'CUDNN'
            #op.arg.add().CopyFrom(utils.MakeArgument('ws_nbytes_limit', arg.cudnn_limit))
            op.arg.add().CopyFrom(utils.MakeArgument('exhaustive_search', 1))
            op.arg.add().CopyFrom(
                utils.MakeArgument('shared_ws_name', 'cudnn_workspace'))
        elif op.type in [
                'MaxPool', 'MaxPoolFp16', 'AveragePool', 'AveragePoolFp16',
                'Relu', 'ReluFp16', 'Softmax', 'SoftmaxFp16'
        ]:
            op.engine = 'CUDNN'
    if arg.forward_only:
        print arg.model, ': running forward only.'
    else:
        print arg.model, ': running forward-backward.'
        model.AddGradientOperators()
        if arg.order == 'NHWC':
            print(
                '==WARNING==\n'
                'NHWC order with CuDNN may not be supported yet, so I might\n'
                'exit suddenly.')

    if not arg.cpu:
        model.param_init_net.RunAllOnGPU()
        model.net.RunAllOnGPU()

    workspace.RunNetOnce(model.param_init_net)
    workspace.CreateNet(model.net)
    for i in range(arg.warmup_iterations):
        workspace.RunNet(model.net.Proto().name)

    start = time.time()
    for i in range(arg.iterations):
        workspace.RunNet(model.net.Proto().name)
    print 'Spent: ', (time.time() - start) / arg.iterations
    if arg.layer_wise_benchmark:
        print 'Layer-wise benchmark.'
        workspace.BenchmarkNet(model.net.Proto().name, 1, arg.iterations, True)
    # Writes out the pbtxt for benchmarks on e.g. Android
    with open("{0}_init_batch_{1}.pbtxt".format(arg.model, arg.batch_size),
              "w") as fid:
        fid.write(str(model.param_init_net.Proto()))
    with open("{0}.pbtxt".format(arg.model, arg.batch_size), "w") as fid:
        fid.write(str(model.net.Proto()))
Ejemplo n.º 3
0
def AddArgument(op, key, value):
    """Makes an argument based on the value type."""
    op.arg.extend([utils.MakeArgument(key, value)])