Esempio n. 1
0
def make_data_proto(prms, cPrms, finePrms=None):
    baseFilePath = prms.paths.baseNetsDr
    if cPrms.nwPrms.isPythonLayer:
        dataFile = osp.join(baseFilePath, 'data_layers_python.prototxt')
    else:
        dataFile = osp.join(baseFilePath, 'data_layers.prototxt')
    dataDef = mpu.ProtoDef(dataFile)
    sliceFlag = True
    fineSiamFlag = False
    if len(prms.labelNames) == 1 and prms.labelNames[0] == 'nrml':
        sliceFlag = False
    if finePrms is not None and not finePrms.isSiamese:
        sliceFlag = False
        fineSiamFlag = True

    if len(prms.labelNames) == 1 or fineSiamFlag:
        lbName = '"%s_label"' % prms.labelNames[0]
        top2 = mpu.make_key('top', ['top'])
        for ph in ['TRAIN', 'TEST']:
            dataDef.set_layer_property('window_data', top2, lbName, phase=ph)
            if not sliceFlag:
                dataDef.set_layer_property('window_data',
                                           'top',
                                           '"data"',
                                           phase=ph)
    if sliceFlag:
        #Add slicing of labels
        sliceFile = '%s_layers.prototxt' % prms.labelNameStr
        sliceDef = mpu.ProtoDef(osp.join(baseFilePath, sliceFile))
        dataDef = _merge_defs([dataDef, sliceDef])

    #Set to the new window files
    _adapt_data_proto(dataDef, prms, cPrms, finePrms=finePrms)
    return dataDef
Esempio n. 2
0
def make_data_layers_proto(dPrms, nPrms, **kwargs):
    baseFile = dPrms.paths.baseProto % nPrms.dataNetDefProto
    netDef = mpu.ProtoDef(baseFile)
    #If net needs to be resumed
    if kwargs.has_key('resumeIter'):
        resumeIter = kwargs['resumeIter']
    else:
        resumeIter = 0
    #Modify the python layer parameters
    batchSz = [nPrms.batchSize, 50]
    meanFile = get_mean_file(nPrms.meanFile)
    for s, b in zip(['TRAIN', 'TEST'], batchSz):
        #Make the label info file
        lbInfo = dPrms['lbPrms']
        lbDict = copy.deepcopy(lbInfo.lb)
        lbDict['lbSz'] = lbInfo.get_lbsz()
        lbDict['statsFile'] = dPrms.paths.exp.other.poseStats
        lbFile = dPrms.paths.exp.other.lbInfo % lbInfo.get_lbstr()
        pickle.dump({'lbInfo': lbDict}, open(lbFile, 'w'))
        #The group files
        if s == 'TEST':
            grpListFile = dPrms.paths.exp.other.grpList % 'val'
        else:
            grpListFile = dPrms.paths.exp.other.grpList % 'train'
        #The python parameters
        if dPrms['isAlign']:
            imFolder = osp.join(cfg.pths.folderProc, 'imCrop', 'imSz256-align')
        else:
            imFolder = osp.join(cfg.pths.folderProc, 'imCrop', 'imSz256')
        prmStr = ou.make_python_param_str({
            'batch_size': b,
            'im_root_folder': imFolder,
            'grplist_file': grpListFile,
            'lbinfo_file': lbFile,
            'crop_size': nPrms.crpSz,
            'im_size': nPrms.ipImSz,
            'jitter_amt': nPrms.maxJitter,
            'random_roll_max': nPrms.maxRollJitter,
            'resume_iter': resumeIter,
            'mean_file': meanFile,
            'ncpu': nPrms.ncpu,
            'is_single_grp': nPrms.readSingleGrp
        })
        netDef.set_layer_property('window_data', ['python_param', 'param_str'],
                                  '"%s"' % prmStr,
                                  phase=s)
        #Rename the top corresponding to the labels
        lbName = '"%s_label"' % lbInfo.lb['type']
        top2 = mpu.make_key('top', ['top'])
        netDef.set_layer_property('window_data', top2, lbName, phase=s)
    #Split the pair data according to the labels
    baseFile = dPrms.paths.baseProto % '%s_layers'
    baseFile = baseFile % lbInfo.lb['type']
    splitDef = mpu.ProtoDef(baseFile)
    return _merge_defs([netDef, splitDef])
Esempio n. 3
0
def make_data_layers_proto(dPrms, nPrms, **kwargs):
    baseFile = dPrms.paths.baseProto % nPrms.dataNetDefProto
    netDef = mpu.ProtoDef(baseFile)
    #If net needs to be resumed
    if kwargs.has_key('resumeIter'):
        resumeIter = kwargs['resumeIter']
    else:
        resumeIter = 0
    #Modify the python layer parameters
    batchSz = [nPrms.batchSize, 50]
    meanFile = sev2.get_mean_file(nPrms.meanFile)
    for s, b in zip(['TRAIN', 'TEST'], batchSz):
        #The group files
        prmStr = ou.make_python_param_str({
            'batch_size': b,
            'split': s.lower(),
            'crop_scale': nPrms.cropScale,
            'im_size': nPrms.ipImSz,
            'jitter': nPrms.maxJitter,
            'mean_file': meanFile
        })
        netDef.set_layer_property('window_data', ['python_param', 'param_str'],
                                  '"%s"' % prmStr,
                                  phase=s)
    return netDef
Esempio n. 4
0
def make_loss_layers_proto(dPrms, nPrms, lastTop, **kwargs):
    #Read the basefile and construct a net
    baseFile = dPrms.paths.baseProto % nPrms.lossNetDefProto
    netDef = mpu.ProtoDef(baseFile)
    if nPrms.lossNetDefProto in ['pascal_pose_loss_log_l1_layers']:
        lNames = ['az_reg_fc', 'el_reg_fc', 'az_cls_fc', 'el_cls_fc']
        #Set the name of the bottom
        for l in lNames:
            netDef.set_layer_property(l, 'bottom', '"%s"' % lastTop)
    elif nPrms.lossNetDefProto in ['pascal_pose_loss_classify_layers']:
        netDef.set_layer_property('azimuth_fc',
                                  ['inner_product_param', 'num_output'],
                                  dPrms.nAzBins)
        netDef.set_layer_property('azimuth_fc', 'bottom', '"%s"' % lastTop)
        netDef.set_layer_property('elevation_fc',
                                  ['inner_product_param', 'num_output'],
                                  dPrms.nElBins)
        netDef.set_layer_property('elevation_fc', 'bottom', '"%s"' % lastTop)
        lNames = ['azimuth_fc', 'elevation_fc']
    else:
        raise Exception('%s not found' % nPrms.lossNetDefProto)
    if nPrms.opLrMult is not None:
        for l in lNames:
            netDef.set_layer_property(l, ['param', 'lr_mult'],
                                      '%f' % nPrms.opLrMult)
            netDef.set_layer_property(l, ['param_$dup$', 'lr_mult'],
                                      '%f' % (2 * nPrms.opLrMult))
    return netDef
Esempio n. 5
0
def get_liberty_ptch_proto(exp):
    libPrms = rlp.get_prms()
    wFile = libPrms.paths.wFile

    netDef = mpu.ProtoDef(exp.files_['netdef'])
    paramStr = netDef.get_layer_property('window_data', 'param_str')[1:-1]
    paramStr = modify_params(paramStr, 'source', wFile)
    paramStr = modify_params(paramStr, 'root_folder', libPrms.paths.jpgDir)
    paramStr = modify_params(paramStr, 'batch_size', 100)
    netDef.set_layer_property('window_data', ['python_param', 'param_str'],
                              '"%s"' % paramStr,
                              phase='TEST')
    netDef.set_layer_property('window_data', ['python_param', 'param_str'],
                              '"%s"' % paramStr)
    #If pose loss is present
    lNames = netDef.get_all_layernames()
    if 'pose_loss' in lNames:
        netDef.del_layer('pose_loss')
        netDef.del_layer('pose_fc')
        netDef.del_layer('slice_label')
        netDef.set_layer_property('window_data',
                                  'top',
                                  '"%s"' % 'ptch_label',
                                  phase='TEST',
                                  propNum=1)
        netDef.set_layer_property('window_data',
                                  'top',
                                  '"%s"' % 'ptch_label',
                                  propNum=1)
    defFile = 'test-files/ptch_liberty_test.prototxt'
    netDef.write(defFile)
    return defFile
Esempio n. 6
0
def get_street_pose_proto(exp, protoType='mx90'):
    if protoType == 'mx90':
        wFile = 'test-files/test_pose_euler_mx90_geo-dc-v2_spDist100_imSz256.txt'
        numIter = 100
    elif protoType == 'all':
        wFile = 'test-files/test_pose_euler_spDist100_geodc-v2_100K.txt'
        numIter = 1000
    netDef = mpu.ProtoDef(exp.files_['netdef'])
    paramStr = netDef.get_layer_property('window_data', 'param_str')[1:-1]
    paramStr = modify_params(paramStr, 'source', wFile)
    paramStr = modify_params(paramStr, 'batch_size', 100)
    netDef.set_layer_property('window_data', ['python_param', 'param_str'],
                              '"%s"' % paramStr,
                              phase='TEST')
    netDef.set_layer_property('window_data', ['python_param', 'param_str'],
                              '"%s"' % paramStr)
    #If ptch loss is present
    lNames = netDef.get_all_layernames()
    if 'ptch_loss' in lNames:
        netDef.del_layer('ptch_loss')
        netDef.del_layer('ptch_fc')
        netDef.del_layer('slice_label')
        netDef.del_layer('accuracy')
        netDef.set_layer_property('window_data',
                                  'top',
                                  '"%s"' % 'pose_label',
                                  phase='TEST',
                                  propNum=1)
        netDef.set_layer_property('window_data',
                                  'top',
                                  '"%s"' % 'pose_label',
                                  propNum=1)
    defFile = 'test-files/pose_street_test.prototxt'
    netDef.write(defFile)
    return defFile, numIter
Esempio n. 7
0
def make_def_proto(nw,
                   isSiamese=True,
                   baseFileStr='split_im.prototxt',
                   getStreamTopNames=False):
    '''
    If is siamese then wait for the Concat layers - and make all layers until then siamese.
  '''
    baseFile = os.path.join(baseFileStr)
    protoDef = mpu.ProtoDef(baseFile)

    #if baseFileStr in ['split_im.prototxt', 'normal.prototxt']:
    lastTop = 'data'

    siameseFlag = isSiamese
    stream1, stream2 = [], []
    mainStream = []

    nameGen = mpu.LayerNameGenerator()
    for l in nw:
        lType, lParam = l
        lName = nameGen.next_name(lType)
        #To account for layers that should not copied while finetuning
        # Such layers need to named differently.
        if lParam.has_key('nameDiff'):
            lName = lName + '-%s' % lParam['nameDiff']
        if lType == 'Concat':
            siameseFlag = False
            if not lParam.has_key('bottom2'):
                lParam['bottom2'] = lastTop + '_p'

        if siameseFlag:
            lDef, lsDef = mpu.get_siamese_layerdef_for_proto(
                lType, lName, lastTop, **lParam)
            stream1.append(lDef)
            stream2.append(lsDef)
        else:
            lDef = mpu.get_layerdef_for_proto(lType, lName, lastTop, **lParam)
            mainStream.append(lDef)

        if lParam.has_key('shareBottomWithNext'):
            assert lParam['shareBottomWithNext']
            pass
        else:
            lastTop = lName

    #Add layers
    mainStream = stream1 + stream2 + mainStream
    for l in mainStream:
        protoDef.add_layer(l['name'][1:-1], l)

    if getStreamTopNames:
        if isSiamese:
            top1Name = stream1[-1]['name'][1:-1]
            top2Name = stream2[-1]['name'][1:-1]
        else:
            top1Name, top2Name = None, None
        return protoDef, top1Name, top2Name
    else:
        return protoDef
Esempio n. 8
0
def make_base_layers_proto(dPrms, nPrms, **kwargs):
    #Read the basefile and construct a net
    baseFile = dPrms.paths.baseProto % nPrms.baseNetDefProto
    netDef = mpu.ProtoDef(baseFile)
    if nPrms.fcSz is not None:
        netDef.set_layer_property(nPrms.fcName,
                                  ['inner_product_param', 'num_output'],
                                  '%d' % (nPrms.fcSz),
                                  phase='TRAIN')
    return netDef
Esempio n. 9
0
def get_net_def(dPrms, nwPrms):
    '''
		dPrms : data parameters
		nwPrms: parameters that define the net 
	'''
    if nwPrms.baseNetDefProto is None:
        return None
    else:
        netDef = mpu.ProtoDef(nwPrms.baseNetDefProto)
    return netDef
Esempio n. 10
0
def make_loss_layers_proto(dPrms, nPrms, **kwargs):
    #Read the basefile and construct a net
    baseFile = dPrms.paths.baseProto % nPrms.lossNetDefProto
    netDef = mpu.ProtoDef(baseFile)
    fcLayerName = '%s_fc' % dPrms.lbPrms.lb['type']
    lbSz = dPrms.lbPrms.get_lbsz()
    netDef.set_layer_property(fcLayerName,
                              ['inner_product_param', 'num_output'],
                              '%d' % lbSz,
                              phase='TRAIN')

    return netDef
Esempio n. 11
0
def make_loss_layers_proto(dPrms, nPrms, lastTop, **kwargs):
    #Read the basefile and construct a net
    baseFile = dPrms.paths.baseProto % nPrms.lossNetDefProto
    netDef = mpu.ProtoDef(baseFile)
    lNames = ['sfn_fc']
    for l in lNames:
        netDef.set_layer_property(l, 'bottom', '"%s"' % lastTop)
        if nPrms.opLrMult is not None:
            netDef.set_layer_property(l, ['param', 'lr_mult'],
                                      '%f' % nPrms.opLrMult)
            netDef.set_layer_property(l, ['param_$dup$', 'lr_mult'],
                                      '%f' % (2 * nPrms.opLrMult))
    return netDef
Esempio n. 12
0
def vis_generic_window_data(protoDef,
                            numLabels,
                            layerName='window_data',
                            phase='TEST',
                            maxVis=100):
    '''
		layerName: The name of the generic_window_data layer
		numLabels: The number of labels. 
	'''
    #Just write the data part of the file.
    if not isinstance(protoDef, mpu.ProtoDef):
        protoDef = mpu.ProtoDef(protoDef)
    protoDef.del_all_layers_above(layerName)
    randInt = np.random.randint(1e+10)
    outProto = os.path.join(TMP_DATA_DIR, 'gn_window_%d.prototxt' % randInt)
    protoDef.write(outProto)
    #Extract the name of the data and the label blobs.
    dataName = protoDef.get_layer_property(layerName, 'top', propNum=0)[1:-1]
    labelName = protoDef.get_layer_property(layerName, 'top', propNum=1)[1:-1]
    crpSize = int(protoDef.get_layer_property(layerName, ['crop_size']))
    mnFile = protoDef.get_layer_property(layerName, ['mean_file'])[1:-1]
    mnDat = mpio.read_mean(mnFile)
    ch, nr, nc = mnDat.shape
    xMn = int((nr - crpSize) / 2)
    mnDat = mnDat[:, xMn:xMn + crpSize, xMn:xMn + crpSize]
    print mnDat.shape

    #Create a network
    if phase == 'TRAIN':
        net = caffe.Net(outProto, caffe.TRAIN)
    else:
        net = caffe.Net(outProto, caffe.TEST)

    lblStr = ''.join('lb-%d: %s, ' % (i, '%.2f') for i in range(numLabels))
    figDt = plt.figure()
    plt.ion()
    for i in range(maxVis):
        allDat = net.forward([dataName, labelName])
        imData = allDat[dataName] + mnDat
        lblDat = allDat[labelName]
        batchSz = imData.shape[0]
        for b in range(batchSz):
            #Plot network data.
            im1 = imData[b, 0:3].transpose((1, 2, 0))
            im2 = imData[b, 3:6].transpose((1, 2, 0))
            im1 = im1[:, :, [2, 1, 0]]
            im2 = im2[:, :, [2, 1, 0]]
            lb = lblDat[b].squeeze()
            lbStr = lblStr % tuple(lb)
            plot_pairs(im1, im2, figDt, lbStr)
            raw_input()
Esempio n. 13
0
def save_alexnet_levels():
    maxLayer = ['conv1', 'conv2', 'conv3', 'conv4', 'conv5', 'fc6']
    modelDir = '/data1/pulkitag/caffe_models/bvlc_reference'
    defFile = osp.join(modelDir, 'caffenet_deploy.prototxt')
    oDefFile = osp.join(modelDir, 'alexnet_levels',
                        'caffenet_deploy_%s.prototxt')
    modelFile = osp.join(modelDir, 'bvlc_reference_caffenet.caffemodel')
    oModelFile = osp.join(modelDir, 'alexnet_levels',
                          'bvlc_reference_caffenet_%s.caffemodel')
    for l in maxLayer:
        print(l)
        dFile = mpu.ProtoDef(defFile=defFile)
        dFile.del_all_layers_above(l)
        dFile.write(oDefFile % l)
        net = caffe.Net((oDefFile % l), modelFile, caffe.TEST)
        net.save(oModelFile % l)
Esempio n. 14
0
def make_data_layers_proto(dPrms, nPrms, **kwargs):
    baseFile = dPrms.paths.baseProto % nPrms.dataNetDefProto
    netDef = mpu.ProtoDef(baseFile)
    #If net needs to be resumed
    if kwargs.has_key('resumeIter'):
        resumeIter = kwargs['resumeIter']
    else:
        resumeIter = 0
    #Modify the python layer parameters
    batchSz = [nPrms.batchSize, 50]
    meanFile = sev2.get_mean_file(nPrms.meanFile)
    for s, b in zip(['TRAIN', 'TEST'], batchSz):
        #The group files
        prmStr = ou.make_python_param_str({
            'batch_size':
            b,
            'window_file':
            dPrms.paths.window[s.lower()],
            'im_root_folder':
            dPrms.paths.data.imFolder,
            'lb_info_file':
            dPrms.paths.exp.lbInfo,
            'crop_size':
            nPrms.crpSz,
            'im_size':
            nPrms.ipImSz,
            'jitter_amt':
            nPrms.maxJitter,
            'resume_iter':
            resumeIter,
            'mean_file':
            meanFile,
            'ncpu':
            nPrms.ncpu
        })
        netDef.set_layer_property('window_data', ['python_param', 'param_str'],
                                  '"%s"' % prmStr,
                                  phase=s)
    lbKeys = ['angleFormat', 'anglePreProc', 'azBins', 'elBins']
    lb = edict()
    for lk in lbKeys:
        lb[lk] = dPrms[lk]
    lbInfo = pickle.load(open(dPrms.paths.exp.labelStats, 'r'))
    for lk in lbInfo.keys():
        lb[lk] = lbInfo[lk]
    pickle.dump(lb, open(dPrms.paths.exp.lbInfo, 'w'))
    return netDef
Esempio n. 15
0
def get_street_ptch_proto(exp, protoType='vegas'):
    if protoType == 'vegas':
        wFile = 'test-files/vegas_ptch_test.txt'
        numIter = 1000
    elif protoType == 'gt5':
        wFile = 'test-files/ptch_test_euler-gt5.txt'
        numIter = 90
    elif protoType == 'mxRot90':
        wFile = 'test-files/test_ptch_mxRot90_equal-pos-neg_geo-dc-v2_spDist100_imSz256.txt'
        numIter = 100
    elif protoType == 'newCity':
        wFile = 'test-files/test_ptch_newcities.txt'
        numIter = 100
    elif protoType == 'allRot':
        wFile = 'test-files/test_ptch_equal-pos-neg_geo-dc-v2_spDist100_imSz256.txt'
        numIter = 100
    else:
        raise Exception('%s not recognized' % protoType)
    netDef = mpu.ProtoDef(exp.files_['netdef'])
    paramStr = netDef.get_layer_property('window_data', 'param_str')[1:-1]
    paramStr = modify_params(paramStr, 'source', wFile)
    paramStr = modify_params(paramStr, 'batch_size', 100)
    netDef.set_layer_property('window_data', ['python_param', 'param_str'],
                              '"%s"' % paramStr,
                              phase='TEST')
    netDef.set_layer_property('window_data', ['python_param', 'param_str'],
                              '"%s"' % paramStr)
    #If pose loss is present
    lNames = netDef.get_all_layernames()
    if 'pose_loss' in lNames:
        netDef.del_layer('pose_loss')
        netDef.del_layer('pose_fc')
        netDef.del_layer('slice_label')
        netDef.set_layer_property('window_data',
                                  'top',
                                  '"%s"' % 'ptch_label',
                                  phase='TEST',
                                  propNum=1)
        netDef.set_layer_property('window_data',
                                  'top',
                                  '"%s"' % 'ptch_label',
                                  propNum=1)
    defFile = 'test-files/ptch_street_test.prototxt'
    netDef.write(defFile)
    return defFile, numIter
Esempio n. 16
0
    def to_caffe(self, ipLayers=[], layerOrder=[]):
        '''
			Caffe doesnot support DAGs but MatConvNet does. layerOrder allows some matconvnet 
			nets to expressed as caffe nets by moving the order of layers so as to allow caffe
			to read the generated prototxt file. 
		'''
        pDef = mpu.ProtoDef()
        caffeLayers = co.OrderedDict()
        for lNum in range(len(self.dat_['net']['layers']['name'])):
            cl = self.make_caffe_layer(lNum)
            caffeLayers[cl['name'][1:-1]] = cl
        #Add input layers if needed
        for ipl in ipLayers:
            pDef.add_layer(ipl['name'][1:-1], ipl)
        #Add the ordered layers first
        for l in layerOrder:
            pDef.add_layer(l, caffeLayers[l])
            del caffeLayers[l]
        for key, cl in caffeLayers.iteritems():
            pDef.add_layer(key, cl)
        return pDef
def setup_experiment(prms, cPrms):
    #The size of the labels
    if prms['pose'] == 'euler':
        rotSz = 3
        trnSz = 3
    elif prms['pose'] == 'sigMotion':
        rotSz = 1
        trnSz = 2
    elif prms['pose'] == 'rotOnly':
        rotSz = 3
        trnSz = 0
    elif prms['pose'] == 'slowness':
        pass
    else:
        raise Exception('Unrecognized %s pose type' % prms['pose'])

    #The base file to start with
    baseFileStr = 'kitti_siamese_window_%s' % cPrms['concatLayer']
    if prms['lossType'] == 'classify':
        baseStr = '_cls-trn%d-rot%d' % (trnSz, rotSz)
        if cPrms['convConcat']:
            baseStr = baseStr + '_concat_conv'
        if cPrms['isMySimple']:
            baseStr = baseStr + '_mysimple'
    elif prms['lossType'] in ['contrastive']:
        baseStr = '_%s' % prms['lossType']
    else:
        baseStr = ''
    baseFile = os.path.join(baseFilePath, baseFileStr + baseStr + '.prototxt')
    print baseFile

    protoDef = mpu.ProtoDef(baseFile)
    solDef = cPrms['solver']

    caffeExp = get_experiment_object(prms, cPrms)
    caffeExp.init_from_external(solDef, protoDef)

    #Get the source file for the train and test layers
    caffeExp.set_layer_property('window_data',
                                ['generic_window_data_param', 'source'],
                                '"%s"' % prms['paths']['windowFile']['train'],
                                phase='TRAIN')
    caffeExp.set_layer_property('window_data',
                                ['generic_window_data_param', 'source'],
                                '"%s"' % prms['paths']['windowFile']['test'],
                                phase='TEST')

    #Set the root folder
    caffeExp.set_layer_property('window_data',
                                ['generic_window_data_param', 'root_folder'],
                                '"%s"' % prms['paths']['imRootDir'],
                                phase='TRAIN')
    caffeExp.set_layer_property('window_data',
                                ['generic_window_data_param', 'root_folder'],
                                '"%s"' % prms['paths']['imRootDir'],
                                phase='TEST')

    if prms['randomCrop']:
        caffeExp.set_layer_property(
            'window_data', ['generic_window_data_param', 'random_crop'],
            'true',
            phase='TRAIN')
        caffeExp.set_layer_property(
            'window_data', ['generic_window_data_param', 'random_crop'],
            'true',
            phase='TEST')

    if prms['lossType'] == 'classify':
        for t in range(trnSz):
            caffeExp.set_layer_property('translation_fc_%d' % (t + 1),
                                        ['inner_product_param', 'num_output'],
                                        prms['binCount'],
                                        phase='TRAIN')
        for r in range(rotSz):
            caffeExp.set_layer_property('rotation_fc_%d' % (r + 1),
                                        ['inner_product_param', 'num_output'],
                                        prms['binCount'],
                                        phase='TRAIN')
    elif prms['lossType'] == 'contrastive':
        caffeExp.set_layer_property('loss',
                                    ['contrastive_loss_param', 'margin'],
                                    cPrms['contrastiveMargin'])
    else:
        #Regression loss basically
        #Set the size of the rotation and translation layers
        caffeExp.set_layer_property('translation_fc',
                                    ['inner_product_param', 'num_output'],
                                    trnSz,
                                    phase='TRAIN')
        caffeExp.set_layer_property('rotation_fc',
                                    ['inner_product_param', 'num_output'],
                                    rotSz,
                                    phase='TRAIN')

    if prms['lossType'] in ['contrastive']:
        pass
    else:
        #Decide the slice point for the label
        #The slice point is decided by the translation labels.
        if trnSz == 0:
            slcPt = 1
        else:
            slcPt = trnSz
        caffeExp.set_layer_property('slice_label',
                                    ['slice_param', 'slice_point'], slcPt)
    return caffeExp
Esempio n. 18
0
def make_base_layers_proto(dPrms, nPrms, **kwargs):
    #Read the basefile and construct a net
    baseFile = dPrms.paths.baseProto % nPrms.baseNetDefProto
    netDef = mpu.ProtoDef(baseFile)
    return netDef
Esempio n. 19
0
def make_net_proto(prms, cPrms, finePrms=None):
    baseFilePath = prms.paths.baseNetsDr
    isSiamese = False
    if prms.isSiamese:
        isSiamese = True
    if finePrms is not None and not finePrms.isSiamese:
        isSiamese = False
    if isSiamese:
        netFileStr = '%s_window_siamese_%s.prototxt'
    else:
        netFileStr = '%s_window_%s.prototxt'

    netFile = netFileStr % (cPrms.nwPrms.netName, cPrms.nwPrms.concatLayer)
    netFile = osp.join(baseFilePath, netFile)
    netDef = mpu.ProtoDef(netFile)

    if cPrms.nwPrms.extraFc is not None:
        #Changethe name of the existing common_fc to common_fc_prev
        netDef.rename_layer('common_fc', 'common_fc_prev')
        netDef.set_layer_property('common_fc_prev', 'top',
                                  '"%s"' % 'common_fc_prev')
        #Rename the params
        netDef.set_layer_property('common_fc_prev', ['param', 'name'],
                                  '"%s"' % 'common_fc_prev_w')
        netDef.set_layer_property('common_fc_prev', ['param', 'name'],
                                  '"%s"' % 'common_fc_prev_b',
                                  propNum=[1, 0])
        netDef.rename_layer('relu_common', 'relu_common_prev')
        netDef.set_layer_property('relu_common_prev', 'top',
                                  '"%s"' % 'common_fc_prev')
        netDef.set_layer_property('relu_common_prev', 'bottom',
                                  '"%s"' % 'common_fc_prev')
        #Add the new layer
        eName = 'common_fc'
        lastTop = 'common_fc_prev'
        fcLayer = mpu.get_layerdef_for_proto(
            'InnerProduct', eName, lastTop, **{
                'top': eName,
                'num_output': cPrms.nwPrms.extraFc
            })
        reLayer = mpu.get_layerdef_for_proto('ReLU', 'relu_common', eName,
                                             **{'top': eName})
        netDef.add_layer(eName, fcLayer)
        netDef.add_layer('relu_common', reLayer)

    if cPrms.nwPrms.numFc5 is not None:
        netDef.set_layer_property('fc5', ['inner_product_param', 'num_output'],
                                  '%d' % cPrms.nwPrms.numFc5)
        if prms.isSiamese:
            netDef.set_layer_property('fc5_p',
                                      ['inner_product_param', 'num_output'],
                                      '%d' % cPrms.nwPrms.numFc5)

    if cPrms.nwPrms.numConv4 is not None:
        netDef.set_layer_property('conv4', ['convolution_param', 'num_output'],
                                  '%d' % cPrms.nwPrms.numConv4)
        if prms.isSiamese:
            netDef.set_layer_property('conv4_p',
                                      ['convolution_param', 'num_output'],
                                      '%d' % cPrms.nwPrms.numConv4)

    if cPrms.nwPrms.numCommonFc is not None:
        netDef.set_layer_property('common_fc',
                                  ['inner_product_param', 'num_output'],
                                  '%d' % cPrms.nwPrms.numCommonFc)

    if cPrms.nwPrms.concatDrop:
        dropLayer = mpu.get_layerdef_for_proto(
            'Dropout', 'drop-%s' % 'common_fc', 'common_fc', **{
                'top': 'common_fc',
                'dropout_ratio': 0.5
            })
        netDef.add_layer('drop-%s' % 'common_fc', dropLayer, 'TRAIN')

    if finePrms is not None:
        netDef.rename_layer('common_fc', 'common_fc_fine')

    return netDef
def setup_experiment(prms, cPrms, odoTune=False):
    '''
		odoTune: If finetuning for odometry is required. 
	'''
    #The size of the labels
    if prms['pose'] == 'euler':
        rotSz = 3
        trnSz = 3
    else:
        raise Exception('Unrecognized %s pose type' % prms['pose'])

    assert prms['lossType'] == 'classify'
    #The base file to start with
    baseFileStr = 'cities_siamese_window_%s' % cPrms['concatLayer']
    baseFilePath = '/work4/pulkitag-code/pkgs/caffe-v2-2/modelFiles/cities/base_files'
    baseStr = ''
    if cPrms['convConcat']:
        baseStr = baseStr + '_concat_conv'
    if cPrms['isMySimple']:
        baseStr = baseStr + '_mysimple'
    if odoTune:
        baseFile = os.path.join(baseFilePath,
                                baseFileStr + baseStr + '_finetune.prototxt')
    else:
        baseFile = os.path.join(baseFilePath,
                                baseFileStr + baseStr + '.prototxt')
    print baseFile

    protoDef = mpu.ProtoDef(baseFile)
    solDef = cPrms['solver']

    caffeExp = get_experiment_object(prms, cPrms)
    caffeExp.init_from_external(solDef, protoDef)

    #Get the source file for the train and test layers
    caffeExp.set_layer_property('window_data',
                                ['generic_window_data_param', 'source'],
                                '"%s"' % prms['paths']['windowFile']['train'],
                                phase='TRAIN')
    caffeExp.set_layer_property('window_data',
                                ['generic_window_data_param', 'source'],
                                '"%s"' % prms['paths']['windowFile']['test'],
                                phase='TEST')

    #Set the root folder
    caffeExp.set_layer_property('window_data',
                                ['generic_window_data_param', 'root_folder'],
                                '"%s"' % prms['paths']['imRootDir'],
                                phase='TRAIN')
    caffeExp.set_layer_property('window_data',
                                ['generic_window_data_param', 'root_folder'],
                                '"%s"' % prms['paths']['imRootDir'],
                                phase='TEST')

    if prms['randomCrop']:
        caffeExp.set_layer_property(
            'window_data', ['generic_window_data_param', 'random_crop'],
            'true',
            phase='TRAIN')
        caffeExp.set_layer_property(
            'window_data', ['generic_window_data_param', 'random_crop'],
            'true',
            phase='TEST')

    if odoTune:
        addStr = '-ft'
    else:
        addStr = ''

    #Set the size of the translation and rotation fc layers
    for t in range(trnSz):
        caffeExp.set_layer_property(('translation_fc_%d' % (t + 1)) + addStr,
                                    ['inner_product_param', 'num_output'],
                                    prms['binCount'],
                                    phase='TRAIN')
    for r in range(rotSz):
        caffeExp.set_layer_property(('rotation_fc_%d' % (r + 1)) + addStr,
                                    ['inner_product_param', 'num_output'],
                                    prms['binCount'],
                                    phase='TRAIN')

    #Decide the slice point for the label
    #The slice point is decided by the translation labels.
    caffeExp.set_layer_property('slice_label', ['slice_param', 'slice_point'],
                                trnSz)
    return caffeExp
Esempio n. 21
0
def make_loss_proto(prms, cPrms):
    baseFilePath = prms.paths.baseNetsDr
    lbDefs = []
    if not (type(cPrms.nwPrms.lossWeight) == list):
        lossWeight = [cPrms.nwPrms.lossWeight] * len(prms.labels)
    else:
        lossWeight = cPrms.nwPrms.lossWeight
    if cPrms.nwPrms.multiLossProto is not None:
        assert (prms.isMultiLabel)
        fName = '%s_%s_loss_layers.prototxt' % (prms.labelNameStr,
                                                cPrms.nwPrms.multiLossProto)
        fName = osp.join(baseFilePath, fName)
        lbDef = mpu.ProtoDef(fName)
        #Modify pose parameters
        poseLb = prms.labels[prms.labelNames.index('pose')]
        poseIdx = prms.labelNames.index('pose')
        lbDef.set_layer_property('pose_fc',
                                 ['inner_product_param', 'num_output'],
                                 '%d' % poseLb.lbSz_)
        lbDef.set_layer_property('pose_stream_fc',
                                 ['inner_product_param', 'num_output'],
                                 '%d' % cPrms.nwPrms.poseStreamNum)
        lbDef.set_layer_property('pose_loss', 'loss_weight',
                                 '%f' % lossWeight[poseIdx])
        #Modify ptch parameters
        ptchLb = prms.labels[prms.labelNames.index('ptch')]
        ptchIdx = prms.labelNames.index('ptch')
        lbDef.set_layer_property('ptch_fc',
                                 ['inner_product_param', 'num_output'],
                                 '%d' % ptchLb.lbSz_)
        lbDef.set_layer_property('ptch_stream_fc',
                                 ['inner_product_param', 'num_output'],
                                 '%d' % cPrms.nwPrms.ptchStreamNum)
        lbDef.set_layer_property('ptch_loss', 'loss_weight',
                                 '%f' % lossWeight[ptchIdx])
        return lbDef

    if prms.isSiamese and 'nrml' in prms.labelNames:
        defFile = osp.join(baseFilePath, 'nrml_loss_layers.prototxt')
        nrmlDef1 = mpu.ProtoDef(defFile)
        nrmlDef2 = mpu.ProtoDef(defFile)
        #Structure the two defs
        nrmlDef1.set_layer_property('nrml_fc', 'name', '"nrml_1_fc"')
        nrmlDef1.set_layer_property('nrml_1_fc', 'top', '"nrml_1_fc"')
        nrmlDef2.set_layer_property('nrml_fc', 'name', '"nrml_2_fc"')
        nrmlDef2.set_layer_property('nrml_2_fc', 'top', '"nrml_2_fc"')
        #Merge the two defs
        lbDef = _merge_defs(nrmlDef1, nrmlDef2)
        lbDefs.append(lbDef)
    elif 'nrml' in prms.labelNames:
        idx = prms.labelNames.index('nrml')
        lbInfo = prms.labels[idx]
        if not (lbInfo.loss_ == 'classify'):
            defFile = osp.join(baseFilePath, 'nrml_loss_layers.prototxt')
            lbDef = mpu.ProtoDef(defFile)
            lbDef.set_layer_property('nrml_loss', 'loss_weight',
                                     '%f' % lossWeight[idx])
        else:
            defFile = osp.join(baseFilePath,
                               'nrml_loss_classify_layers.prototxt')
            lbDef = mpu.ProtoDef(defFile)
            lbDef.set_layer_property('nrml_fc_1',
                                     ['inner_product_param', 'num_output'],
                                     '%d' % lbInfo.numBins_)
            lbDef.set_layer_property('nrml_fc_2',
                                     ['inner_product_param', 'num_output'],
                                     '%d' % lbInfo.numBins_)
            lbDef.set_layer_property('nrml_loss_1', 'loss_weight',
                                     '%f' % lossWeight[idx])
            lbDef.set_layer_property('nrml_loss_2', 'loss_weight',
                                     '%f' % lossWeight[idx])
            lbDef.set_layer_property('nrml_loss_1',
                                     ['loss_param', 'ignore_label'],
                                     '%d' % lbInfo.numBins_)
            lbDef.set_layer_property('nrml_loss_2',
                                     ['loss_param', 'ignore_label'],
                                     '%d' % lbInfo.numBins_)
            lbDef.set_layer_property('nrml_accuracy_1',
                                     ['accuracy_param', 'ignore_label'],
                                     '%d' % lbInfo.numBins_)
            lbDef.set_layer_property('nrml_accuracy_2',
                                     ['accuracy_param', 'ignore_label'],
                                     '%d' % lbInfo.numBins_)
        lbDefs.append(lbDef)
    if 'ptch' in prms.labelNames:
        idx = prms.labelNames.index('ptch')
        defFile = osp.join(baseFilePath, 'ptch_loss_layers.prototxt')
        lbDef = mpu.ProtoDef(defFile)
        lbDef.set_layer_property('ptch_loss', 'loss_weight',
                                 '%f' % lossWeight[idx])
        lbDefs.append(lbDef)
    if 'pose' in prms.labelNames:
        idx = prms.labelNames.index('pose')
        lbInfo = prms.labels[idx]
        if lbInfo.loss_ in ['l2', 'l1', 'logl1']:
            if lbInfo.loss_ in ['l2']:
                defFile = osp.join(baseFilePath, 'pose_loss_layers.prototxt')
            elif lbInfo.loss_ in ['l1']:
                defFile = osp.join(baseFilePath,
                                   'pose_loss_l1_layers.prototxt')
            else:
                defFile = osp.join(baseFilePath,
                                   'pose_loss_log_l1_layers.prototxt')
            lbDef = mpu.ProtoDef(defFile)
            lbDef.set_layer_property('pose_fc',
                                     ['inner_product_param', 'num_output'],
                                     '%d' % lbInfo.lbSz_)
            if lbInfo.loss_ in ['l2']:
                lbDef.set_layer_property('pose_loss', 'loss_weight',
                                         '%f' % lossWeight[idx])
            else:
                print('FOR L1 LOSS, LOSS WEIFHT DOESNT WORK')
        elif lbInfo.loss_ in ['classify']:
            defFile = osp.join(baseFilePath,
                               'pose_loss_classify_layers.prototxt')
            lbDef = mpu.ProtoDef(defFile)
            fcNames = ['yaw', 'pitch']
            for fc in fcNames:
                lbDef.set_layer_property('%s_fc' % fc,
                                         ['inner_product_param', 'num_output'],
                                         '%d' % lbInfo.numBins_)
                lbDef.set_layer_property('%s_loss' % fc, 'loss_weight',
                                         '%f' % lossWeight[idx])
                lbDef.set_layer_property('%s_loss' % fc,
                                         ['loss_param', 'ignore_label'],
                                         '%d' % lbInfo.numBins_)
                lbDef.set_layer_property('%s_accuracy' % fc,
                                         ['accuracy_param', 'ignore_label'],
                                         '%d' % lbInfo.numBins_)
        else:
            raise Exception('Loss Type %s not recognized' % lbInfo.loss_)
        lbDefs.append(lbDef)
    lbDef = _merge_defs(lbDefs)
    #Replace the EuclideanLoss with EuclideanLossWithIgnore
    l2Layers = lbDef.get_layernames_from_type('EuclideanLoss')
    for ll in l2Layers:
        lbDef.set_layer_property(ll, 'type', '"EuclideanLossWithIgnore"')
    return lbDef