Esempio n. 1
0
 def _refineProcessedData(self):
     """
     Push the processed data to dicts for easier calling...
     """
     #self._d_jointToWeighting = {jIdx:{vIDX:v}}
     #self._d_vertToWeighting = {vIdx:{jIdx:v...}} 
     if not self.l_jointsToUse:raise ValueError,"No joints to use found"
     
     #...normalize data
     _l_cleanData = []
     #{{index:value, index:value}}
     
     for i,_bfr_raw in enumerate(self._l_processed):#...for each vert
         _bfr_toNormalize = []
         _bfr_clean = {}
         _d_normalized = {}
         
         for k,value in _bfr_raw.iteritems():
             _bfr_clean[int(k)] = float(value)
             _d_normalized[int(k)] = float(value)
             
         #normalize the values...
         for k,value in _bfr_clean.iteritems():
             _bfr_toNormalize.append(value)
             
         _bfr_normalized = cgmMath.normSumList(_bfr_toNormalize,1.0)
         #self.log_info("To Normalize: {0}".format(_bfr_toNormalize))                
         #self.log_info("Normalized: {0}".format(_bfr_normalized))
         #self.log_info("{0} pre sum: {1}".format(i,sum(_bfr_toNormalize)))                
         #self.log_info("{0} sum: {1}".format(i,sum(_bfr_normalized)))
         
         """
         if _bfr_normalized != _bfr_toNormalize:
             self.log_info("{0} normalized".format(i))
             self.log_info("{0} toNorm: {1}".format(i,_bfr_toNormalize))                
             self.log_info("{0} norm:  {1}".format(i,_bfr_normalized)) """
             
         for ii,k in enumerate(_d_normalized.keys()):
             _d_normalized[k] = _bfr_normalized[ii]
         #self.log_info("{0} clean: {1}".format(i,_bfr_clean))                
         #self.log_info("{0} norm:  {1}".format(i,_d_normalized))                
             
         if not cgmMath.isFloatEquivalent(1.0, sum(_bfr_normalized) ):
             self.log_info("vert {0} not normalized".format(i))
         #self.log_info("vert {0} base: {1}".format(i,_bfr_toNormalize))
         #self.log_info("vert {0} norm: {1}".format(i,_bfr_normalized))
         _l_cleanData.append(_d_normalized)
         #if i == 3:return self._FailBreak_("stop")
     self._l_processed = _l_cleanData#...initial push data  
     
     #            
     for i in range(len(self.l_jointsToUse)):
         self._d_jointToWeighting[i] = {}
     
     for i,d_pair in enumerate(self._l_processed):
         self._d_vertToWeighting[i] = d_pair
         
         for j_idx in d_pair.keys():
             self._d_jointToWeighting[j_idx][i] = d_pair[j_idx] 
Esempio n. 2
0
 def _refineProcessedData(self):
     """
     Push the processed data to dicts for easier calling...
     """
     #self._d_jointToWeighting = {jIdx:{vIDX:v}}
     #self._d_vertToWeighting = {vIdx:{jIdx:v...}} 
     if not self.l_jointsToUse:raise ValueError,"No joints to use found"
     
     #...normalize data
     _l_cleanData = []
     #{{index:value, index:value}}
     
     for i,_bfr_raw in enumerate(self._l_processed):#...for each vert
         _bfr_toNormalize = []
         _bfr_clean = {}
         _d_normalized = {}
         
         for k,value in _bfr_raw.iteritems():
             _bfr_clean[int(k)] = float(value)
             _d_normalized[int(k)] = float(value)
             
         #normalize the values...
         for k,value in _bfr_clean.iteritems():
             _bfr_toNormalize.append(value)
             
         _bfr_normalized = cgmMath.normSumList(_bfr_toNormalize,1.0)
         #self.log_info("To Normalize: {0}".format(_bfr_toNormalize))                
         #self.log_info("Normalized: {0}".format(_bfr_normalized))
         #self.log_info("{0} pre sum: {1}".format(i,sum(_bfr_toNormalize)))                
         #self.log_info("{0} sum: {1}".format(i,sum(_bfr_normalized)))
         
         """
         if _bfr_normalized != _bfr_toNormalize:
             self.log_info("{0} normalized".format(i))
             self.log_info("{0} toNorm: {1}".format(i,_bfr_toNormalize))                
             self.log_info("{0} norm:  {1}".format(i,_bfr_normalized)) """
             
         for ii,k in enumerate(_d_normalized.keys()):
             _d_normalized[k] = _bfr_normalized[ii]
         #self.log_info("{0} clean: {1}".format(i,_bfr_clean))                
         #self.log_info("{0} norm:  {1}".format(i,_d_normalized))                
             
         if not cgmMath.isFloatEquivalent(1.0, sum(_bfr_normalized) ):
             self.log_info("vert {0} not normalized".format(i))
         #self.log_info("vert {0} base: {1}".format(i,_bfr_toNormalize))
         #self.log_info("vert {0} norm: {1}".format(i,_bfr_normalized))
         _l_cleanData.append(_d_normalized)
         #if i == 3:return self._FailBreak_("stop")
     self._l_processed = _l_cleanData#...initial push data  
     
     #            
     for i in range(len(self.l_jointsToUse)):
         self._d_jointToWeighting[i] = {}
     
     for i,d_pair in enumerate(self._l_processed):
         self._d_vertToWeighting[i] = d_pair
         
         for j_idx in d_pair.keys():
             self._d_jointToWeighting[j_idx][i] = d_pair[j_idx] 
Esempio n. 3
0
        def _fnc_processData(self):
            '''
            Sort out the components
            '''            
            #...check if our vtx counts match...
            self.log_toDo("Remap dictionary argument")
            self.log_toDo("Non matching mesh types")   
            self.mData.d_target = data.validateMeshArg(self.mData.d_target['mesh'])#...update
            
            _int_sourceCnt = int(self.mData.d_source['pointCount'])
            _int_targetCnt = int(self.mData.d_target['pointCount'])
            _type_source = self.mData.d_source['meshType']
            _type_target = self.mData.d_target['meshType']
            _target = self.mData.d_target['mesh']
            _component = self.mData.d_target['component']
            self.log_infoDict(self.mData.d_target,'target dict...')
            
            #if int(_int_sourceCnt) != int(_int_targetCnt):
                #return self._FailBreak_("Haven't implemented non matching component counts | source: {0} | target: {1}".format(_int_sourceCnt,_int_targetCnt))              
            if not _type_source == _type_target:
                return self._FailBreak_("Haven't implemented non matching mesh types | source: {0} | target: {1}".format(_type_source,_type_target))              
            
            #...generate a processed list...
            #[[jntIdx,v],[jntIdx,v]....] -- the count in the list is the vert count
            _raw_componentWeights = self.mData.d_sourceInfluences['componentWeights']
            _raw_blendweights = self.mData.d_sourceInfluences['blendWeights']
            
            _l_cleanData = []
            
            #...First loop is to only initially clean the data...
            for i in range(_int_sourceCnt):#...for each vert
                _str_i = str(i)
                _subL = []
                
                _bfr_raw = _raw_componentWeights[_str_i]
                
                _bfr_toNormalize = []
                _bfr_clean = {}
                _d_normalized = {}
                for k,value in _bfr_raw.iteritems():
                    _bfr_clean[int(k)] = float(value)
                    _d_normalized[int(k)] = None
                    
                #normalize the values...
                for k,value in _bfr_clean.iteritems():
                    _bfr_toNormalize.append(value)
                    
                _bfr_normalized = cgmMath.normSumList(_bfr_toNormalize,1.0)
                #self.log_info("To Normalize: {0}".format(_bfr_toNormalize))                
                #self.log_info("Normalized: {0}".format(_bfr_normalized))
                #self.log_info("{0} pre sum: {1}".format(i,sum(_bfr_toNormalize)))                
                #self.log_info("{0} sum: {1}".format(i,sum(_bfr_normalized)))
                
                for ii,k in enumerate(_d_normalized.keys()):
                    _d_normalized[k] = _bfr_normalized[ii]
                #self.log_info("clean: {0}".format(_bfr_clean))                
                #self.log_info("norm:  {0}".format(_d_normalized))                
                    
                if not cgmMath.isFloatEquivalent(1.0, sum(_bfr_normalized) ):
                    self.log_info("vert {0} not normalized".format(i))
                #self.log_info("vert {0} base: {1}".format(i,_bfr_toNormalize))
                #self.log_info("vert {0} norm: {1}".format(i,_bfr_normalized))
                _l_cleanData.append(_d_normalized)
                #if i == 3:return self._FailBreak_("stop")
            self._l_processed = _l_cleanData#...initial push data
            
            
            #...nameMatch ------------------------------------------------------------------------
            if self._b_nameMatch:
                self.log_info("nameMatch attempt...")
                _l_configInfluenceList = self.l_configInfluenceList
                _l_jointsToUseBaseNames = [names.getBaseName(n) for n in self.l_jointsToUse]
                
                for n in _l_jointsToUseBaseNames:#...see if all our names are there
                    if not n in _l_configInfluenceList:
                        #return self._FailBreak_
                        self.log_warning("nameMatch... joint '{0}' from joints to use list not in config list".format(n))                        
                        #self._FailBreak_("nameMatch... joint '{0}' from joints to use list not in config list".format(n))              
                        #return False
                        #return False
                    
                _d_rewire = {}       
                
                for i,n in enumerate(_l_configInfluenceList):
                    _idx_base = _l_jointsToUseBaseNames.index(n)
                    
                    #self.log_error("Rewire. Name:{0} | config idx:{1} ===> currentIdx: {2}".format(n,_idx_config,i))
                    _d_rewire[i] = _idx_base
                    
                """
                for i,n in enumerate(_l_configInfluenceList):
                    if _l_jointsToUseBaseNames[i] != n:
                        self.log_error("Name mismatch. idx:{0} | config:{1} | useJoint:{2}".format(i,n,_l_jointsToUseBaseNames[i]))
                        
                        #_d_rewire[i] = _l_configInfluenceList.index(_l_jointsToUseBaseNames[i])
                        _d_rewire[i] = _l_configInfluenceList.index(_l_jointsToUseBaseNames[_l_jointsToUseBaseNames.index(n)])
                        """
                self.log_infoDict(_d_rewire,"Rewire...")
                for i,d in enumerate(self._l_processed):
                    _d_dup = copy.copy(d)
                    #self.log_info("{0} before remap: {1}".format(i,d))                    
                    for r1,r2 in _d_rewire.iteritems():#...{1:2, 2:1}
                        if r1 in _d_dup.keys():#...1,2
                            if r2 in _d_dup.keys():
                                _bfr1 = _d_dup[r1]
                                _bfr2 = _d_dup[r2]
                                d[r1] = _bfr2
                                d[r2] = _bfr1
                            else:
                                d[r2] = d.pop(r1)
                    #self.log_info("{0} after remap: {1}".format(i,d))

                    
            if int(_int_sourceCnt) != int(_int_targetCnt) or self._b_forceClosestComponent:
                try:#closest to remap ------------------------------------------------------------------------
                    self.log_warning("Non matching component counts. Using closestTo method to remap")
                    _l_closestRetarget = []
                    #...generate a posList of the source data
                    l_source_pos = []
                    _d_pos = self.mData.d_source['d_vertPositions']
                    for i in range(_int_sourceCnt):
                        l_source_pos.append([float(v) for v in _d_pos[str(i)]])#...turn our strings to values
                       
                    self.progressBar_start(stepMaxValue=_int_targetCnt, 
                                           statusMessage='Calculating....', 
                                           interruptableState=False)  
                    
                    for i in range(_int_targetCnt):
                        _str_vert = "{0}.{1}[{2}]".format(_target,_component,i)
                        self.progressBar_iter(status = "Finding closest to '{0}'".format(_str_vert))                                        
                        
                        #self.log_info(_str_vert)
                        _pos = distance.returnWorldSpacePosition(_str_vert)#...get position       
                        _closestPos = distance.returnClosestPoint(_pos, l_source_pos)#....get closest
                        _closestIdx = l_source_pos.index(_closestPos)
                        #self.log_info("target idx: {0} | Closest idx: {1} | value{2}".format(i,_closestIdx,_l_cleanData[_closestIdx]))
                        _l_closestRetarget.append(_l_cleanData[_closestIdx])
                    self.progressBar_end()
                        
                    self._l_processed = _l_closestRetarget#...push it backs
                    self._b_smooth = True
                    
                    if _int_targetCnt >= _int_sourceCnt:
                        self._f_smoothWeightsValue = .00005
                    else:
                        self._f_smoothWeightsValue = .5
                        
                    self.log_info("closestTo remap complete...")
                except Exception,error:
                    raise Exception,"closestTo remap failure | {0}".format(error)
Esempio n. 4
0
        def _fnc_processData(self):
            '''
            Sort out the components
            '''            
            #...check if our vtx counts match...
            self.log_toDo("Remap dictionary argument")
            self.log_toDo("Non matching mesh types")   
            self.mData.d_target = data.validateMeshArg(self.mData.d_target['mesh'])#...update
            
            _int_sourceCnt = int(self.mData.d_source['pointCount'])
            _int_targetCnt = int(self.mData.d_target['pointCount'])
            _type_source = self.mData.d_source['meshType']
            _type_target = self.mData.d_target['meshType']
            _target = self.mData.d_target['mesh']
            _component = self.mData.d_target['component']
            self.log_infoDict(self.mData.d_target,'target dict...')
            
            #if int(_int_sourceCnt) != int(_int_targetCnt):
                #return self._FailBreak_("Haven't implemented non matching component counts | source: {0} | target: {1}".format(_int_sourceCnt,_int_targetCnt))              
            if not _type_source == _type_target:
                return self._FailBreak_("Haven't implemented non matching mesh types | source: {0} | target: {1}".format(_type_source,_type_target))              
            
            #...generate a processed list...
            #[[jntIdx,v],[jntIdx,v]....] -- the count in the list is the vert count
            _raw_componentWeights = self.mData.d_sourceInfluences['componentWeights']
            _raw_blendweights = self.mData.d_sourceInfluences['blendWeights']
            
            _l_cleanData = []
            
            #...First loop is to only initially clean the data...
            for i in range(_int_sourceCnt):#...for each vert
                _str_i = str(i)
                _subL = []
                
                _bfr_raw = _raw_componentWeights[_str_i]
                
                _bfr_toNormalize = []
                _bfr_clean = {}
                _d_normalized = {}
                for k,value in _bfr_raw.iteritems():
                    _bfr_clean[int(k)] = float(value)
                    _d_normalized[int(k)] = None
                    
                #normalize the values...
                for k,value in _bfr_clean.iteritems():
                    _bfr_toNormalize.append(value)
                    
                _bfr_normalized = cgmMath.normSumList(_bfr_toNormalize,1.0)
                #self.log_info("To Normalize: {0}".format(_bfr_toNormalize))                
                #self.log_info("Normalized: {0}".format(_bfr_normalized))
                #self.log_info("{0} pre sum: {1}".format(i,sum(_bfr_toNormalize)))                
                #self.log_info("{0} sum: {1}".format(i,sum(_bfr_normalized)))
                
                for ii,k in enumerate(_d_normalized.keys()):
                    _d_normalized[k] = _bfr_normalized[ii]
                #self.log_info("clean: {0}".format(_bfr_clean))                
                #self.log_info("norm:  {0}".format(_d_normalized))                
                    
                if not cgmMath.isFloatEquivalent(1.0, sum(_bfr_normalized) ):
                    self.log_info("vert {0} not normalized".format(i))
                #self.log_info("vert {0} base: {1}".format(i,_bfr_toNormalize))
                #self.log_info("vert {0} norm: {1}".format(i,_bfr_normalized))
                _l_cleanData.append(_d_normalized)
                #if i == 3:return self._FailBreak_("stop")
            self._l_processed = _l_cleanData#...initial push data
            
            
            #...nameMatch ------------------------------------------------------------------------
            if self._b_nameMatch:
                self.log_info("nameMatch attempt...")
                _l_configInfluenceList = self.l_configInfluenceList
                _l_jointsToUseBaseNames = [names.getBaseName(n) for n in self.l_jointsToUse]
                
                for n in _l_jointsToUseBaseNames:#...see if all our names are there
                    if not n in _l_configInfluenceList:
                        #return self._FailBreak_
                        self.log_warning("nameMatch... joint '{0}' from joints to use list not in config list".format(n))                        
                        #self._FailBreak_("nameMatch... joint '{0}' from joints to use list not in config list".format(n))              
                        #return False
                        #return False
                    
                _d_rewire = {}       
                
                for i,n in enumerate(_l_configInfluenceList):
                    _idx_base = _l_jointsToUseBaseNames.index(n)
                    #self.log_error("Rewire. Name:{0} | config idx:{1} ===> currentIdx: {2}".format(n,_idx_config,i))
                    _d_rewire[i] = _idx_base
                    
                """
                for i,n in enumerate(_l_configInfluenceList):
                    if _l_jointsToUseBaseNames[i] != n:
                        self.log_error("Name mismatch. idx:{0} | config:{1} | useJoint:{2}".format(i,n,_l_jointsToUseBaseNames[i]))
                        
                        #_d_rewire[i] = _l_configInfluenceList.index(_l_jointsToUseBaseNames[i])
                        _d_rewire[i] = _l_configInfluenceList.index(_l_jointsToUseBaseNames[_l_jointsToUseBaseNames.index(n)])
                        """
                self.log_infoDict(_d_rewire,"Rewire...")
                for i,d in enumerate(self._l_processed):
                    _d_dup = copy.copy(d)
                    #self.log_info("{0} before remap: {1}".format(i,d))                    
                    for r1,r2 in _d_rewire.iteritems():#...{1:2, 2:1}
                        if r1 in _d_dup.keys():#...1,2
                            if r2 in _d_dup.keys():
                                _bfr1 = _d_dup[r1]
                                _bfr2 = _d_dup[r2]
                                d[r1] = _bfr2
                                d[r2] = _bfr1
                            else:
                                d[r2] = d.pop(r1)
                    #self.log_info("{0} after remap: {1}".format(i,d))

                    
            if int(_int_sourceCnt) != int(_int_targetCnt) or self._b_forceClosestComponent:
                try:#closest to remap ------------------------------------------------------------------------
                    self.log_warning("Non matching component counts. Using closestTo method to remap")
                    _l_closestRetarget = []
                    #...generate a posList of the source data
                    l_source_pos = []
                    _d_pos = self.mData.d_source['d_vertPositions']
                    for i in range(_int_sourceCnt):
                        l_source_pos.append([float(v) for v in _d_pos[str(i)]])#...turn our strings to values
                       
                    self.progressBar_start(stepMaxValue=_int_targetCnt, 
                                           statusMessage='Calculating....', 
                                           interruptableState=False)  
                    
                    for i in range(_int_targetCnt):
                        _str_vert = "{0}.{1}[{2}]".format(_target,_component,i)
                        self.progressBar_iter(status = "Finding closest to '{0}'".format(_str_vert))                                        
                        
                        #self.log_info(_str_vert)
                        _pos = distance.returnWorldSpacePosition(_str_vert)#...get position       
                        _closestPos = distance.returnClosestPoint(_pos, l_source_pos)#....get closest
                        _closestIdx = l_source_pos.index(_closestPos)
                        #self.log_info("target idx: {0} | Closest idx: {1} | value{2}".format(i,_closestIdx,_l_cleanData[_closestIdx]))
                        _l_closestRetarget.append(_l_cleanData[_closestIdx])
                    self.progressBar_end()
                        
                    self._l_processed = _l_closestRetarget#...push it backs
                    self._b_smooth = True
                    
                    if _int_targetCnt >= _int_sourceCnt:
                        self._f_smoothWeightsValue = .00005
                    else:
                        self._f_smoothWeightsValue = .5
                        
                    self.log_info("closestTo remap complete...")
                except Exception,error:
                    raise Exception,"closestTo remap failure | {0}".format(error)