示例#1
0
def BinaryData_Disparity_Single(net, **kwargs):
    '''
  @brief Setup network inputs for disparity
  @returns A two-element list of single-blob network INPUT and LABEL
  '''

    samples = []

    if kwargs['rendertype'] == 'CLEAN' or kwargs['rendertype'] == 'BOTH':
        samples += (Sample((Entry('cleanImageL',
                                  0), Entry('cleanImageR',
                                            0), Entry('dispL', 0))), )

    if kwargs['rendertype'] == 'FINAL' or kwargs['rendertype'] == 'BOTH':
        samples += (Sample((Entry('finalImageL',
                                  0), Entry('finalImageR',
                                            0), Entry('dispL', 0))), )

    nout = NumberOfEntries(samples)
    if 'output_index' in kwargs:
        nout += 1
        del kwargs['output_index']

    return Layers.BinaryData(
        net,
        nout=nout,
        include=(Proto.NetStateRule(phase=kwargs['phase']), ),
        data_param=DataParams(samples, **kwargs))
示例#2
0
def BinaryData_SceneFlow_Single_Reduced(net, **kwargs):
    '''
  @brief Setup network inputs for scene flow
  @returns A list of single-blob network INPUT and LABEL
  '''
    samples = []

    if kwargs['rendertype'] == 'CLEAN' or kwargs['rendertype'] == 'BOTH':
        samples += (Sample(
            (Entry('cleanImageL', 0), Entry('cleanImageR', 0),
             Entry('cleanImageL', +1), Entry('cleanImageR',
                                             +1), Entry('forwardFlowL', 0),
             Entry('dispL', 0), Entry('forwardDispChangeL', 0))), )

    if kwargs['rendertype'] == 'FINAL' or kwargs['rendertype'] == 'BOTH':
        samples += (Sample(
            (Entry('finalImageL', 0), Entry('finalImageR', 0),
             Entry('finalImageL', +1), Entry('finalImageR',
                                             +1), Entry('forwardFlowL', 0),
             Entry('dispL', 0), Entry('forwardDispChangeL', 0))), )

    nout = NumberOfEntries(samples)
    if 'output_index' in kwargs:
        nout += 1
        del kwargs['output_index']

    blobs = Layers.BinaryData(
        net,
        nout=nout,
        include=(Proto.NetStateRule(phase=kwargs['phase']), ),
        data_param=DataParams(samples, **kwargs))

    if kwargs['return_struct']: return _dataStructFromSceneFlow2(blobs)
    else: return blobs
示例#3
0
def dropout(pytorch_layer):
    layer = pb2.LayerParameter()
    layer.type = "Dropout"
    layer.dropout_param.dropout_ratio = float(pytorch_layer.p)
    train_only = pb2.NetStateRule()
    train_only.phase = pb2.TEST
    layer.exclude.extend([train_only])
    return layer
示例#4
0
def _get_include(phase):
    inc = caffe_pb2.NetStateRule()
    if phase == 'train':
        inc.phase = caffe_pb2.TRAIN
    elif phase == 'test':
        inc.phase = caffe_pb2.TEST
    else:
        raise ValueError("Unknown phase {}".format(phase))
    return inc
示例#5
0
def dropout(torch_layer):
    # Only run dropout
    layer = pb2.LayerParameter()
    layer.type = "Dropout"
    layer.dropout_param.dropout_ratio = torch_layer["p"]
    #assert torch_layer["v2"], "Only handle nn.Dropout v2"
    train_only = pb2.NetStateRule()
    train_only.phase = pb2.TEST
    layer.exclude.extend([train_only])
    return layer
示例#6
0
def add_accuracy_layer(net_msg,bottom):
    accuracy_layer = net_msg.layer.add()
    accuracy_layer.name = 'accuracy'
    accuracy_layer.type = 'Accuracy'
    accuracy_layer.bottom._values.append(bottom)
    accuracy_layer.bottom._values.append('label')
    accuracy_layer.top._values.append('accuracy')
    include_param = caffe_pb2.NetStateRule()
    include_param.phase = caffe_pb2.TEST
    accuracy_layer.include._values.append(include_param)
示例#7
0
def BinaryData_OpticalFlow(net, **kwargs):
    '''
  @brief Setup network inputs for optical flow
  @returns A list of single-blob network INPUT and LABEL
  '''
    samples = []

    if kwargs['rendertype'] == 'CLEAN' or kwargs['rendertype'] == 'BOTH':
        samples += (Sample((Entry('cleanImageL',
                                  0), Entry('cleanImageL',
                                            +1), Entry('forwardFlowL', 0))),
                    Sample((Entry('cleanImageL',
                                  0), Entry('cleanImageL',
                                            -1), Entry('backwardFlowL', 0))),
                    Sample((Entry('cleanImageR',
                                  0), Entry('cleanImageR',
                                            +1), Entry('forwardFlowR', 0))),
                    Sample((Entry('cleanImageR',
                                  0), Entry('cleanImageR',
                                            -1), Entry('backwardFlowR', 0))))

    if kwargs['rendertype'] == 'FINAL' or kwargs['rendertype'] == 'BOTH':
        samples += (Sample((Entry('finalImageL',
                                  0), Entry('finalImageL',
                                            +1), Entry('forwardFlowL', 0))),
                    Sample((Entry('finalImageL',
                                  0), Entry('finalImageL',
                                            -1), Entry('backwardFlowL', 0))),
                    Sample((Entry('finalImageR',
                                  0), Entry('finalImageR',
                                            +1), Entry('forwardFlowR', 0))),
                    Sample((Entry('finalImageR',
                                  0), Entry('finalImageR',
                                            -1), Entry('backwardFlowR', 0))))

    nout = NumberOfEntries(samples)
    if 'output_index' in kwargs:
        nout += 1
        del kwargs['output_index']

    return Layers.BinaryData(
        net,
        nout=nout,
        include=(Proto.NetStateRule(phase=kwargs['phase']), ),
        data_param=DataParams(samples, **kwargs))
示例#8
0
def inputConvert(layer, net, sol):
    # add layer to caffe net
    alayer = net.layer.add()

    # layer checkpoint setting
    alayer.name = layer["name"]  # name
    alayer.type = "Data"  # type
    alayer.top.extend(["data"])  # top # need to be considered????????????
    alayer.top.extend(["label"])  # top # need to be considered????????????

    # include{ phase }
    if "option" in layer["attributes"]:
        temp = caffe_pb2.NetStateRule()
        if layer["attributes"]["option"].lower() == "datasets":
            temp.phase = 0
        elif layer["attributes"]["option"].lower() == "test":
            temp.phase = 1
        alayer.include.extend([temp])

    # data_param{}
    #    source
    if "train_data" in layer["attributes"]:
        alayer.data_param.source = layer["attributes"]["train_data"]
    #    batch_size
    if "batch_size" in layer["attributes"]:
        alayer.data_param.batch_size = layer["attributes"]["batch_size"]

    # solver checkpoint setting
    # solver_mode
    if "mode" in layer["attributes"]:
        if layer["attributes"]["mode"].lower() == "cpu":
            sol.solver_mode = 0
        elif layer["attributes"]["mode"].lower() == "gpu":
            sol.solver_mode = 1
    else:
        sol.solver_mode = 1
    # max_iter
    if "iteration" in layer["attributes"]:
        sol.max_iter = layer["attributes"]["iteration"]

    # default value!!!!!!!!
    alayer.data_param.backend = 0
示例#9
0
def PhilData_OpticalFlow(net, **kwargs):
  '''
  @brief Setup PhilDataLayer for optical flow
  @returns A list of layer output blobs
  '''
  data_param = {'source'     : kwargs['source'],
                'backend'    : Params.Data.LMDB,
                'batch_size' : kwargs['batch_size'],
                'encoding'   : (Proto.DataParameter.UINT8,
                                Proto.DataParameter.UINT8,
                                Proto.DataParameter.UINT16FLOW,
                                Proto.DataParameter.BOOL1),
                'slice_point': (3, 6, 8),
                'verbose'    : kwargs['verbose'],
                'rand_permute'       : kwargs['rand_permute'],
                'rand_permute_seed'  : kwargs['rand_permute_seed']}

  if 'preselection_file' in kwargs: data_param['preselection_file'] = kwargs['preselection_file']
  if 'preselection_label' in kwargs: data_param['preselection_label'] = kwargs['preselection_label']

  ## Always returns (img_from, img_to, flow, occlusion)
  return Layers.PhilData(net, nout=4,
                         include=(Proto.NetStateRule(phase=kwargs['phase']),),
                         data_param=data_param)
示例#10
0
    def to_pbuf_message(
            self,  # pylint: disable=R0912, R0915
            layerindex,
            preceeding_layer,
            net_input):
        r"""
        Create a protobuf specification of this layer.

        It automatically wires together preceeding and following layers,
        if ``tops`` or ``bottoms`` are not set. This does not work with
        multiple in- or outputs.

        :param layerindex: int >= 0.
          The index of this layer. Is used to generate the layer name.

        :param preceeding_layer: :class:`barrista.design.LayerSpecification`.
          The preceeding layer to create the wiring with.

        :param net_input: string.
          The name of the network input (used for the first layer input).
        """
        assert layerindex >= 0
        assert self.type is not None
        if layerindex != 0:
            assert preceeding_layer is not None
        # Prepare the arguments.
        kwargs = dict()
        # Bottom.
        pbbottom = []
        omit_bottom = False
        if self.bottoms is None:
            if preceeding_layer is None:
                if net_input is not None and len(net_input) > 0:
                    pbbottom = [net_input[0]]
                else:
                    omit_bottom = True
            else:
                if preceeding_layer.tops is None:
                    if preceeding_layer.name is None:
                        pbbottom = ['_layer_{0}'.format(layerindex - 1)]
                    else:
                        pbbottom = [preceeding_layer.name]
                else:
                    pbbottom = [preceeding_layer.tops[0]]
        else:
            preplist = self.bottoms[:]
            mapidx = 0
            for btidx, btname in enumerate(preplist):
                if btname is None:
                    if preceeding_layer.tops is not None:
                        preplist[btidx] = preceeding_layer.tops[mapidx]
                    else:
                        preplist[btidx] = '_layer_{0}'.format(layerindex - 1)
                    mapidx += 1
            pbbottom = preplist
        if not omit_bottom:
            kwargs['bottom'] = pbbottom
        # Top.
        pbtop = []
        if self.tops is None:
            if self.name is None:
                pbtop = ['_layer_{0}'.format(layerindex)]
            else:
                pbtop = [self.name]
        else:
            pbtop = self.tops
        kwargs['top'] = pbtop
        # Name.
        pbname = self.name
        if pbname is None:
            pbname = '_layer_{0}'.format(layerindex)
        kwargs['name'] = pbname
        if self.phase is not None:
            kwargs['phase'] = self.phase
        # include.
        include_kwargs = dict()
        if self.include_phase is not None:
            include_kwargs['phase'] = self.include_phase
        if self.include_stages is not None and len(self.include_stages) > 0:
            include_kwargs['stage'] = self.include_stages
        if self.include_min_level is not None:
            include_kwargs['min_level'] = self.include_min_level
        if self.include_max_level is not None:
            include_kwargs['max_level'] = self.include_max_level
        if len(include_kwargs) > 0:
            kwargs['include'] = [_caffe_pb2.NetStateRule(**include_kwargs)]
        kwargs['type'] = self.type
        if self.params is not None:
            kwargs['param'] = self.params
        if _HAS_PROPAGATE_DOWN:
            kwargs['propagate_down'] = self.propagate_down
        if self.loss_param is not None:
            kwargs['loss_param'] = self.loss_param
        kwargs['loss_weight'] = self.loss_weights
        for add_pname in self._additional_parameters:
            kwargs[add_pname] = getattr(self, add_pname)
        layerMessage = _caffe_pb2.LayerParameter(**kwargs)
        assert layerMessage.IsInitialized()
        return layerMessage
示例#11
0
# Create network file
net = caffe.proto.caffe_pb2.NetParameter()

output_dir = os.path.join(config['output_dir'], config['name'])

# Insert data layers
train_layer = net.layer.add()
train_layer.name = "input"
for i in range(config['num_inputs']):
    train_layer.top.append("im{}".format(i+1))
train_layer.top.append("label")
    
train_layer.type = "Python"
train_layer.python_param.module = "input_image_tuple"
train_layer.python_param.layer = "InputImageTuple"
include_param = caffe_pb2.NetStateRule()
include_param.phase = caffe_pb2.TRAIN
train_layer.include.extend([include_param])

channels = 1
if config['color']:
    channels = 3

python_params = {'keys_file': config['train_keys'],
                 'label_file': config['train_labels'],
                 'base_dir': config['data_dir'],
                 'channel_split': config['channel_split'],
                 'direct': True,
                 'shuffle': True,
                 'saliency_dir': os.path.join(output_dir, 'vis'),
                 'data_shape': [channels, 227, 227],