def curveInfo(curve, baseName='curveInfo'): """ >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> DESCRIPTION: Creates a curve lenght measuring node ARGUMENTS: polyFace(string) - face of a poly RETURNS: length(float) >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> """ _str_func = 'curveInfo' if VALID.is_shape(curve): l_shapes = [curve] else: l_shapes = mc.listRelatives(curve, s=True, fullPath=True) if len(l_shapes) > 1: raise ValueError, cgmGeneral.logString_msg( __str_func, "Must have one shape. Found {0} | {1}".format( len(l_shapes), l_shapes)) infoNode = create(baseName, 'curveInfo') ATTR.connect((l_shapes[0] + '.worldSpace'), (infoNode + '.inputCurve')) return infoNode
def define(self): _short = self.mNode ATTR.set(_short, 'translate', [0, 0, 0]) ATTR.set(_short, 'rotate', [0, 0, 0]) ATTR.set_standardFlags(self.mNode, attrs=['translate', 'rotate', 'sx', 'sz']) for a in ['x', 'z']: ATTR.connect("{0}.sy".format(_short), "{0}.s{1}".format(_short, a)) ATTR.set_alias(_short, 'sy', 'blockScale')
def copy_constraint(sourceConstraint=None, targetObj=None, constraintType=None, maintainOffset=True): """ Copy the constraint settings from a constraint to another object :parameters: node(str): node to query :returns list of constraints(list) """ _str_func = 'copy_constraint' log.debug("|{0}| >> constraint: {1} ".format(_str_func, sourceConstraint)) d_source = get_datDict(sourceConstraint) _type = d_source['type'] if constraintType is None: if targetObj is None: raise ValueError, "|{0}| >> Must have targetObject or constraintType ".format( _str_func) else: log.info( "|{0}| >> No constraintType passed. Using source's: '{1}' ". format(_str_func, _type)) constraintType = _type _call = _d_type_to_call.get(constraintType, False) if not _call: raise ValueError, "|{0}| >> {1} not a known type of constraint. node: {2}".format( _str_func, _type, sourceConstraint) if targetObj is None: targetObj = d_source['driven'] log.info( "|{0}| >> No target object passed. Using source's: '{1}' ".format( _str_func, targetObj)) cgmGEN.func_snapShot(vars()) result = _call(d_source['targets'], targetObj, maintainOffset=maintainOffset) d_result = get_datDict(result[0]) for i, a in enumerate(d_result['attrs']): if d_source['attrDrivers'][i]: ATTR.connect("{0}".format(d_source['attrDrivers'][i]), "{0}.{1}".format(result[0], d_result['attrs'][i])) else: ATTR.set(result[0], d_result['attrs'][i], d_source['attrWeights'][d_source['attrs'][i]]) return result
def create_distanceMeasure(start=None, end=None, baseName='measure'): """ Get the the closest return based on a source and target and variable modes :parameters: :start(str): Our start obj :end(str): End obj :baseName(str):What mode are we checking data from :returns {shape,dag,loc_start,loc_end,start,end} """ try: _str_func = 'create_distanceMeasure' #Create ==================================================================================== plug_start = POS.get_positionPlug(start) plug_end = POS.get_positionPlug(end) _res = {'start': start, 'end': end} if not plug_start: pos_start = POS.get(start) loc_start = mc.spaceLocator(name="{0}_{1}_start_loc".format( NAMES.get_base(start), baseName))[0] POS.set(loc_start, pos_start) plug_start = POS.get_positionPlug(loc_start) _res['loc_start'] = loc_start if not plug_end: pos_end = POS.get(end) loc_end = mc.spaceLocator(name="{0}_{1}_end_loc".format( NAMES.get_base(end), baseName))[0] POS.set(loc_end, pos_end) plug_end = POS.get_positionPlug(loc_end) _res['loc_end'] = loc_end mDistShape = r9Meta.MetaClass(mc.createNode('distanceDimShape')) mDistShape.rename("{0}_distShape".format(baseName)) mDistTrans = r9Meta.MetaClass(VALID.getTransform(mDistShape.mNode)) mDistTrans.rename("{0}_dist".format(baseName)) _res['dag'] = mDistTrans.mNode _res['shape'] = mDistShape.mNode ATTR.set_message(_res['dag'], 'distShape', _res['shape'], simple=True) ATTR.connect(plug_start, "{0}.startPoint".format(_res['shape'])) ATTR.connect(plug_end, "{0}.endPoint".format(_res['shape'])) return _res except Exception, err: cgmGen.cgmExceptCB(Exception, err)
def optimize(nodeTypes='multiplyDivide'): _str_func = 'optimize' log.debug("|{0}| >> ".format(_str_func) + '-' * 80) _nodeTypes = VALID.listArg(nodeTypes) d_modeToNodes = {} d_modeToPlugs = {} l_oldNodes = [] for t in _nodeTypes: if t in ['plusMinusAverage']: raise ValueError, "Don't handle type: {0}".format(t) nodes = mc.ls(type=t) l_oldNodes.extend(nodes) for n in nodes: _mode = ATTR.get(n, 'operation') _operator = ATTR.get_enumValueString(n, 'operation') #d_operator_to_NodeType[t][_mode] if not d_modeToNodes.get(_mode): d_modeToNodes[_mode] = [] d_modeToNodes[_mode].append(n) d_plugs = {} d_plugValues = {} for i, inPlug in enumerate(d_node_to_input[t]['in']): d_plugs[i] = ATTR.get_children(n, inPlug) or [] for p in d_plugs[i]: c = ATTR.get_driver(n, p, False, skipConversionNodes=True) if c: d_plugValues[p] = c else: d_plugValues[p] = ATTR.get(n, p) l_outs = ATTR.get_children(n, d_node_to_input[t]['out']) or [] for p in l_outs: d_plugValues[p] = ATTR.get_driven(n, p, False, skipConversionNodes=True) #pprint.pprint(d_modeToNodes) #pprint.pprint(d_plugs) #print l_outs #print cgmGeneral._str_subLine #pprint.pprint(d_plugValues) for i in range(len(l_outs)): _out = d_plugValues[l_outs[i]] if _out: d_set = {'out': _out, 'in': []} log.debug("|{0}| >> Output found on: {1} ".format( _str_func, _out)) _keys = d_plugs.keys() _keys.sort() for k in _keys: d_set['in'].append(d_plugValues[d_plugs[k][i]]) #d_set['in'].append(d_plugs[k][i]) #pprint.pprint(d_set) if not d_modeToPlugs.get(_mode): d_modeToPlugs[_mode] = [] d_modeToPlugs[_mode].append(d_set) # if VALID.stringArg() l_inPlugs = ['input1', 'input2'] l_outplugs = [u'output'] l_new = [] _cnt = 0 for operator, d_sets in d_modeToPlugs.iteritems(): if operator == 1: for nodeSet in d_sets: newNode = mc.createNode('multDoubleLinear') newNode = mc.rename(newNode, 'optimize_{0}_mdNode'.format(_cnt)) _cnt += 1 l_new.append(newNode) _ins = d_set['in'] _outs = d_set['out'] for iii, inPlug in enumerate(_ins): if mc.objExists(inPlug): ATTR.connect(inPlug, "{0}.{1}".format(newNode, l_inPlugs[iii])) else: ATTR.set(newNode, l_inPlugs[iii], inPlug) for out in _outs: ATTR.connect("{0}.output".format(newNode), out) #pprint.pprint(d_setsSorted) print len(d_sets) #print len(d_setsSorted) """ l_inPlugs = {0: [u'input1X', u'input1Y', u'input1Z'], 1: [u'input2X', u'input2Y', u'input2Z']} l_outplugs = [u'outputX', u'outputY', u'outputZ'] for operator,d_sets in d_modeToPlugs.iteritems(): d_setsSorted = LISTS. get_chunks(d_sets,3) for nodeSet in d_setsSorted: newNode = mc.createNode('multiplyDivide') newNode = mc.rename(newNode,'optimize_{0}_mdNode'.format(_cnt)) _cnt+=1 l_new.append(newNode) ATTR.set(newNode,'operation',operator) for i,d_set in enumerate(nodeSet): _ins = d_set['in'] _outs = d_set['out'] for iii,inPlug in enumerate(_ins): if mc.objExists(inPlug): ATTR.connect(inPlug, "{0}.{1}".format(newNode, l_inPlugs[iii][i])) else: ATTR.set(newNode,l_inPlugs[iii][i], inPlug) for out in _outs: ATTR.connect("{0}.{1}".format(newNode, l_outplugs[i]), out) #pprint.pprint(d_setsSorted) print len(d_sets) print len(d_setsSorted) """ mc.delete(l_oldNodes) return len(l_new)
def create_closest_point_node(source=None, targetSurface=None, singleReturn=False): """ Create a closest point on surface node and wire it :parameters: source(str/vector) -- source point or object targetSurface -- surface to check transform, nurbsSurface, curve, mesh supported singleReturn - only return single return if we have :returns node(list) """ try: _str_func = 'create_closest_point_node' _transform = False if VALID.vectorArg(source) is not False: _transform = mc.spaceLocator(n='closest_point_source_loc')[0] POS.set(_transform, source) elif mc.objExists(source): if SEARCH.is_transform(source): _transform = source elif VALID.is_component(source): _transform = mc.spaceLocator( n='{0}_loc'.format(NAMES.get_base(source)))[0] POS.set(_transform, POS.get(source)) else: _transform = SEARCH.get_transform(source) if not _transform: raise ValueError, "Must have a transform" if SEARCH.is_shape(targetSurface): l_shapes = [targetSurface] else: l_shapes = mc.listRelatives(targetSurface, s=True, fullPath=True) if not l_shapes: raise ValueError, "Must have shapes to check." _nodes = [] _locs = [] _types = [] _shapes = [] for s in l_shapes: _type = VALID.get_mayaType(s) if _type not in ['mesh', 'nurbsSurface', 'nurbsCurve']: log.error( "|{0}| >> Unsupported target surface type. Skipping: {1} |{2} " .format(_str_func, s, _type)) continue _loc = mc.spaceLocator()[0] _res_loc = mc.rename( _loc, '{0}_to_{1}_result_loc'.format(NAMES.get_base(source), NAMES.get_base(s))) _locs.append(_res_loc) _types.append(_type) _shapes.append(s) if _type == 'mesh': _node = mc.createNode('closestPointOnMesh') _node = mc.rename( _node, "{0}_to_{1}_closePntMeshNode".format( NAMES.get_base(source), NAMES.get_base(s))) ATTR.connect((_transform + '.translate'), (_node + '.inPosition')) ATTR.connect((s + '.worldMesh'), (_node + '.inMesh')) ATTR.connect((s + '.worldMatrix'), (_node + '.inputMatrix')) _pos = ATTR.get(_node, 'position') ATTR.connect((_node + '.position'), (_res_loc + '.translate')) _nodes.append(_node) elif _type == 'nurbsSurface': closestPointNode = mc.createNode('closestPointOnSurface') closestPointNode = mc.rename( closestPointNode, "{0}_to_{1}_closePntSurfNode".format( NAMES.get_base(source), NAMES.get_base(s))) mc.connectAttr((_transform + '.translate'), (closestPointNode + '.inPosition')) #attributes.doSetAttr(closestPointNode,'inPositionX',_point[0]) #attributes.doSetAttr(closestPointNode,'inPositionY',_point[1]) #attributes.doSetAttr(closestPointNode,'inPositionZ',_point[2]) ATTR.connect((s + '.worldSpace'), (closestPointNode + '.inputSurface')) ATTR.connect((closestPointNode + '.position'), (_res_loc + '.translate')) _nodes.append(closestPointNode) elif _type == 'nurbsCurve': _node = mc.createNode('nearestPointOnCurve') _node = mc.rename( _node, "{0}_to_{1}_nearPntCurveNode".format( NAMES.get_base(source), NAMES.get_base(s))) p = [] distances = [] mc.connectAttr((_transform + '.translate'), (_node + '.inPosition')) mc.connectAttr((s + '.worldSpace'), (_node + '.inputCurve')) ATTR.connect((_node + '.position'), (_res_loc + '.translate')) _nodes.append(_node) if not singleReturn: return _locs, _nodes, _shapes, _types _l_distances = [] pos_base = POS.get(_transform) for i, n in enumerate(_nodes): p2 = POS.get(_locs[i]) _l_distances.append(get_distance_between_points(pos_base, p2)) if not _l_distances: raise ValueError, "No distance value found" closest = min(_l_distances) _idx = _l_distances.index(closest) for i, n in enumerate(_nodes): if i != _idx: mc.delete(n, _locs[i]) return _locs[_idx], _nodes[_idx], _shapes[_idx], _types[_idx] except Exception, err: cgmGen.cgmExceptCB(Exception, err)
def get_closest_point(source=None, targetSurface=None, loc=False): """ Get the closest point on a target surface/curve/mesh to a given point or object. Evaluates to all sub shapes to get closest point for multi shape targets. :parameters: source(str/vector) -- source point or object targetSurface -- surface to check transform, nurbsSurface, curve, mesh supported loc -- whether to loc point found :returns position, distance, shape (list) """ _str_func = 'get_closest_point' _point = False if VALID.vectorArg(source) is not False: _point = source elif mc.objExists(source): _point = POS.get(source) if not _point: raise ValueError, "Must have point of reference" _loc = mc.spaceLocator(n='get_closest_point_loc')[0] POS.set(_loc, _point) if SEARCH.is_shape(targetSurface): _shapes = [targetSurface] elif VALID.is_component(targetSurface): _shapes = mc.listRelatives(VALID.get_component(targetSurface)[1], s=True, fullPath=True) else: _shapes = mc.listRelatives(targetSurface, s=True, fullPath=True) if not _shapes: log.error("|{0}| >> No shapes found. Skipping: {1}".format( _str_func, targetSurface)) mc.delete(_loc) return False _l_res_positions = [] _l_res_shapes = [] _l_res_distances = [] for s in _shapes: _type = VALID.get_mayaType(s) if _type not in ['mesh', 'nurbsSurface', 'nurbsCurve']: log.error( "|{0}| >> Unsupported target surface type. Skipping: {1} |{2} | {3}" .format(_str_func, s, _type)) _l_res_positions.append(False) continue if _type == 'mesh': _node = mc.createNode('closestPointOnMesh') ATTR.connect((_loc + '.translate'), (_node + '.inPosition')) ATTR.connect((s + '.worldMesh'), (_node + '.inMesh')) ATTR.connect((s + '.worldMatrix'), (_node + '.inputMatrix')) _pos = ATTR.get(_node, 'position') _tmpLoc = mc.spaceLocator(n='tmp')[0] ATTR.connect((_node + '.position'), (_tmpLoc + '.translate')) _l_res_positions.append(POS.get(_tmpLoc)) mc.delete(_node) mc.delete(_tmpLoc) elif _type == 'nurbsSurface': closestPointNode = mc.createNode('closestPointOnSurface') ATTR.set(closestPointNode, 'inPositionX', _point[0]) ATTR.set(closestPointNode, 'inPositionY', _point[1]) ATTR.set(closestPointNode, 'inPositionZ', _point[2]) ATTR.connect((s + '.worldSpace'), (closestPointNode + '.inputSurface')) _l_res_positions.append(ATTR.get(closestPointNode, 'position')) mc.delete(closestPointNode) elif _type == 'nurbsCurve': _node = mc.createNode('nearestPointOnCurve') p = [] distances = [] mc.connectAttr((_loc + '.translate'), (_node + '.inPosition')) mc.connectAttr((s + '.worldSpace'), (_node + '.inputCurve')) p = [ mc.getAttr(_node + '.positionX'), mc.getAttr(_node + '.positionY'), mc.getAttr(_node + '.positionZ') ] _l_res_positions.append(p) mc.delete(_node) mc.delete(_loc) if not _l_res_positions: raise ValueError, "No positions found" for p in _l_res_positions: if p: _l_res_distances.append(get_distance_between_points(_point, p)) else: _l_res_distances.append('no') closest = min(_l_res_distances) _idx = _l_res_distances.index(closest) _pos = _l_res_positions[_idx] if not _pos: return False #raise ValueError,"Failed to find point" if loc: _loc = mc.spaceLocator(n='get_closest_point_loc')[0] POS.set(_loc, _pos) return _pos, _l_res_distances[_idx], _shapes[_idx]
def get_closest_point_data_from_mesh(mesh=None, targetObj=None, targetPoint=None): """ >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> DESCRIPTION: Returns pertinent info of the closest point of a mesh to a target object - position, normal, parameterU,parameterV,closestFaceIndex,closestVertexIndex ARGUMENTS: targetObj(string) mesh(string) RETURNS: closestPointInfo(dict) >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> """ _str_func = 'get_closest_point_data_from_mesh' _point = False if targetObj is not None: _point = POS.get(targetObj) elif targetPoint: _point = targetPoint if not _point: raise ValueError, "Must have point of reference" _loc = mc.spaceLocator()[0] POS.set(_loc, _point) _shape = False if SEARCH.is_shape(mesh): if VALID.get_mayaType(mesh) == 'mesh': _shape = mesh else: raise ValueError, "Must be a mesh shape" else: _shape = SEARCH.get_nonintermediateShape(mesh) _shapes = mc.listRelatives(mesh, s=True, fullPath=True) """_meshes = [] for s in _shapes: if VALID.get_mayaType(s) == 'mesh': _meshes.append(s) if len(_meshes) > 1: _shape = _meshes[0]""" if not _shape: log.error("|{0}| >> Shapes...".format(_str_func)) for s in _shapes: print "{0} : {1}".format(s, VALID.get_mayaType(s)) raise ValueError, "Must have a mesh shape by now" """ make the closest point node """ _node = mc.createNode('closestPointOnMesh') """ to account for target objects in heirarchies """ ATTR.connect((targetObj + '.translate'), (_node + '.inPosition')) ATTR.connect((_shape + '.worldMesh'), (_node + '.inMesh')) ATTR.connect((_shape + '.matrix'), (_node + '.inputMatrix')) _u = mc.getAttr(_node + '.parameterU') _v = mc.getAttr(_node + '.parameterV') #_norm = get_normalized_uv(_shape, _u,_v) _res = {} _res['shape'] = _shape _res['position'] = ATTR.get(_node, 'position') _res['normal'] = ATTR.get(_node, 'normal') _res['parameterU'] = _u _res['parameterV'] = _v #_res['normalizedU'] = _norm[0] #_res['normalizedV'] = _norm[1] _res['closestFaceIndex'] = mc.getAttr(_node + '.closestFaceIndex') _res['closestVertexIndex'] = mc.getAttr(_node + '.closestVertexIndex') mc.delete([_node, _loc]) return _res
def plug_insertNewValues(driven = None, drivers = [], replace = False, mode = 'multiply'): """ Given an attribute, add in new values to it. If it has a plug, use that """ try: _str_func = 'plug_insertNewValues' log.debug("|{0}| >> ".format(_str_func)+ '-'*80) if mode not in ['multiply']: raise ValueError,"Mode not supported: {0}".format(mode) d_driven = cgmMeta.validateAttrArg(driven) mPlug = d_driven['mPlug'] ml_drivers = [] mPreDriver = mPlug.getDriver(asMeta=True) log.debug("|{0}| >> Pre Driver: {1}".format(_str_func,mPreDriver)) for d in drivers: d_driver = cgmMeta.validateAttrArg(d) if d_driver: ml_drivers.append(d_driver['mPlug']) log.debug("|{0}| >> Driver: {1}".format(_str_func,d_driver['mPlug'])) else: log.debug("|{0}| >> Failed to validate: {1}".format(_str_func,d)) if not ml_drivers: raise ValueError, "No drivers validated" if not replace: ml_drivers.insert(0,mPreDriver[0]) if len(ml_drivers) < 2: raise ValueError,"Must have more than two drivers. Found: {0}".format(ml_drivers) ATTR.break_connection(mPlug.p_combinedName) lastNode = None for i,mDriver in enumerate(ml_drivers[:-1]): if not lastNode: lastNode = mc.createNode('multDoubleLinear') mDriver.doConnectOut(lastNode + '.input1') ml_drivers[i+1].doConnectOut(lastNode + '.input2') else: newNode = mc.createNode('multDoubleLinear') ATTR.connect(lastNode+'.output',newNode + '.input1') ml_drivers[i+1].doConnectOut(newNode + '.input2') lastNode=newNode ATTR.connect(lastNode+'.output',mPlug.p_combinedName) except Exception,err: #pprint.pprint(vars()) cgmGEN.cgmExceptCB(Exception,err,msg=vars()) raise Exception,err
def segment_handles(self, ml_handles=None, ml_handleParents=None, mIKBaseControl=None, mRoot=None, str_ikBase=None, upMode='asdf'): try: _str_func = 'segment_handles' log_start(_str_func) mBlock = self.mBlock mRigNull = self.mRigNull _offset = self.v_offset _jointOrientation = self.d_orientation['str'] if not ml_handles: raise ValueError, "{0} | ml_handles required".format(_str_func) if not ml_handleParents: raise ValueError, "{0} | ml_handleParents required".format( _str_func) ml_ribbonIkHandles = mRigNull.msgList_get('ribbonIKDrivers') if not ml_ribbonIkHandles: ml_ribbonIkHandles = ml_handleParents #raise ValueError,"No ribbon IKDriversFound" if str_ikBase == None: str_ikBase = mBlock.getEnumValueString('ikBase') _aim = self.d_orientation['vectorAim'] _aimNeg = self.d_orientation['vectorAimNeg'] _up = self.d_orientation['vectorUp'] _out = self.d_orientation['vectorOut'] if str_ikBase == 'hips': log.debug("|{0}| >> hips setup...".format(_str_func)) if len(ml_handles) == 1: mHipHandle = ml_handles[0] RIGCONSTRAINT.build_aimSequence( ml_handles, ml_ribbonIkHandles, [mIKBaseControl], #ml_handleParents, mode='singleBlend', upMode='objectRotation') else: if str_ikBase == 'hips': log.debug("|{0}| >> hips handles...".format(_str_func)) ml_handles[0].masterGroup.p_parent = mIKBaseControl mHipHandle = ml_handles[1] mHipHandle.masterGroup.p_parent = mRoot mc.pointConstraint(mIKBaseControl.mNode, mHipHandle.masterGroup.mNode, maintainOffset=True) RIGCONSTRAINT.build_aimSequence( ml_handles[1], ml_ribbonIkHandles, [mIKBaseControl], #ml_handleParents, mode='singleBlend', upParent=self.d_orientation['vectorOut'], upMode='objectRotation') """ RIGCONSTRAINT.build_aimSequence(ml_handles[-1], ml_ribbonIkHandles, #[mRigNull.controlIK.mNode],#ml_handleParents, mode = 'singleBlend', upMode = 'objectRotation')""" for i, mHandle in enumerate(ml_handles): if mHandle in ml_handles[:2]: # + [ml_handles[-1]]: continue mHandle.masterGroup.parent = ml_handleParents[i] s_rootTarget = False s_targetForward = False s_targetBack = False mMasterGroup = mHandle.masterGroup b_first = False if mHandle == ml_handles[0]: log.debug("|{0}| >> First handle: {1}".format( _str_func, mHandle)) if len(ml_handles) <= 2: s_targetForward = ml_handleParents[-1].mNode else: s_targetForward = ml_handles[i + 1].getMessage( 'masterGroup')[0] s_rootTarget = mRoot.mNode b_first = True elif mHandle == ml_handles[-1]: log.debug("|{0}| >> Last handle: {1}".format( _str_func, mHandle)) s_rootTarget = ml_handleParents[i].mNode s_targetBack = ml_handles[i - 1].getMessage( 'masterGroup')[0] else: log.debug("|{0}| >> Reg handle: {1}".format( _str_func, mHandle)) s_targetForward = ml_handles[i + 1].getMessage( 'masterGroup')[0] s_targetBack = ml_handles[i - 1].getMessage( 'masterGroup')[0] #Decompose matrix for parent... if upMode == 'matrix': mUpDecomp = cgmMeta.cgmNode(nodeType='decomposeMatrix') mUpDecomp.doStore('cgmName', ml_handleParents[i]) mUpDecomp.addAttr('cgmType', 'aimMatrix', attrType='string', lock=True) mUpDecomp.doName() ATTR.connect( "%s.worldMatrix" % (ml_handleParents[i].mNode), "%s.%s" % (mUpDecomp.mNode, 'inputMatrix')) _d_up = { 'aimVector': _aim, 'upVector': _out, 'worldUpObject': ml_handleParents[i].mNode, 'worldUpType': 'vector', 'worldUpVector': [0, 0, 0] } else: _d_up = { 'aimVector': _aim, 'upVector': _out, 'worldUpObject': ml_handleParents[i].mNode, 'worldUpType': 'objectRotation', 'worldUpVector': [1, 0, 0] } if s_targetForward: mAimForward = mHandle.doCreateAt() mAimForward.parent = mMasterGroup mAimForward.doStore('cgmTypeModifier', 'forward') mAimForward.doStore('cgmType', 'aimer') mAimForward.doName() _const = mc.aimConstraint(s_targetForward, mAimForward.mNode, maintainOffset=True, **_d_up) s_targetForward = mAimForward.mNode if upMode == 'matrix': ATTR.connect( "%s.%s" % (mUpDecomp.mNode, "outputRotate"), "%s.%s" % (_const[0], "upVector")) else: s_targetForward = ml_handleParents[i].mNode if s_targetBack: mAimBack = mHandle.doCreateAt() mAimBack.parent = mMasterGroup mAimBack.doStore('cgmTypeModifier', 'back') mAimBack.doStore('cgmType', 'aimer') mAimBack.doName() _d_up['aimVector'] = _aimNeg _const = mc.aimConstraint(s_targetBack, mAimBack.mNode, maintainOffset=True, **_d_up) s_targetBack = mAimBack.mNode if upMode == 'matrix': ATTR.connect( "%s.%s" % (mUpDecomp.mNode, "outputRotate"), "%s.%s" % (_const[0], "upVector")) else: s_targetBack = s_rootTarget #ml_handleParents[i].mNode #pprint.pprint([s_targetForward,s_targetBack]) mAimGroup = mHandle.doGroup(True, asMeta=True, typeModifier='aim') mHandle.parent = False if b_first: const = mc.orientConstraint( [s_targetBack, s_targetForward], mAimGroup.mNode, maintainOffset=True)[0] else: const = mc.orientConstraint( [s_targetForward, s_targetBack], mAimGroup.mNode, maintainOffset=True)[0] d_blendReturn = NODEFACTORY.createSingleBlendNetwork( [mHandle.mNode, 'followRoot'], [mHandle.mNode, 'resultRootFollow'], [mHandle.mNode, 'resultAimFollow'], keyable=True) targetWeights = mc.orientConstraint(const, q=True, weightAliasList=True, maintainOffset=True) #Connect d_blendReturn['d_result1']['mi_plug'].doConnectOut( '%s.%s' % (const, targetWeights[0])) d_blendReturn['d_result2']['mi_plug'].doConnectOut( '%s.%s' % (const, targetWeights[1])) d_blendReturn['d_result1']['mi_plug'].p_hidden = True d_blendReturn['d_result2']['mi_plug'].p_hidden = True mHandle.parent = mAimGroup #...parent back else: log.debug("|{0}| >> reg handles...".format(_str_func)) for i, mHandle in enumerate(ml_handles): mHandle.masterGroup.parent = ml_handleParents[i] s_rootTarget = False s_targetForward = False s_targetBack = False mMasterGroup = mHandle.masterGroup b_first = False if mHandle == ml_handles[0]: log.debug("|{0}| >> First handle: {1}".format( _str_func, mHandle)) if len(ml_handles) <= 2: s_targetForward = ml_handleParents[-1].mNode else: s_targetForward = ml_handles[i + 1].getMessage( 'masterGroup')[0] s_rootTarget = mRoot.mNode b_first = True elif mHandle == ml_handles[-1]: log.debug("|{0}| >> Last handle: {1}".format( _str_func, mHandle)) s_rootTarget = ml_handleParents[i].mNode s_targetBack = ml_handles[i - 1].getMessage( 'masterGroup')[0] else: log.debug("|{0}| >> Reg handle: {1}".format( _str_func, mHandle)) s_targetForward = ml_handles[i + 1].getMessage( 'masterGroup')[0] s_targetBack = ml_handles[i - 1].getMessage( 'masterGroup')[0] #Decompose matrix for parent... mUpDecomp = cgmMeta.cgmNode(nodeType='decomposeMatrix') mUpDecomp.doStore('cgmName', ml_handleParents[i]) mUpDecomp.addAttr('cgmType', 'aimMatrix', attrType='string', lock=True) mUpDecomp.doName() ATTR.connect( "%s.worldMatrix" % (ml_handleParents[i].mNode), "%s.%s" % (mUpDecomp.mNode, 'inputMatrix')) if s_targetForward: mAimForward = mHandle.doCreateAt() mAimForward.parent = mMasterGroup mAimForward.doStore('cgmTypeModifier', 'forward') mAimForward.doStore('cgmType', 'aimer') mAimForward.doName() _const = mc.aimConstraint( s_targetForward, mAimForward.mNode, maintainOffset=True, #skip = 'z', aimVector=_aim, upVector=_out, worldUpObject=ml_handleParents[i].mNode, worldUpType='vector', worldUpVector=[0, 0, 0]) s_targetForward = mAimForward.mNode ATTR.connect( "%s.%s" % (mUpDecomp.mNode, "outputRotate"), "%s.%s" % (_const[0], "upVector")) else: s_targetForward = ml_handleParents[i].mNode if s_targetBack: mAimBack = mHandle.doCreateAt() mAimBack.parent = mMasterGroup mAimBack.doStore('cgmTypeModifier', 'back') mAimBack.doStore('cgmType', 'aimer') mAimBack.doName() _const = mc.aimConstraint( s_targetBack, mAimBack.mNode, maintainOffset=True, #skip = 'z', aimVector=_aimNeg, upVector=_out, worldUpObject=ml_handleParents[i].mNode, worldUpType='vector', worldUpVector=[0, 0, 0]) s_targetBack = mAimBack.mNode ATTR.connect( "%s.%s" % (mUpDecomp.mNode, "outputRotate"), "%s.%s" % (_const[0], "upVector")) else: s_targetBack = s_rootTarget #ml_handleParents[i].mNode #pprint.pprint([s_targetForward,s_targetBack]) mAimGroup = mHandle.doGroup(True, asMeta=True, typeModifier='aim') mHandle.parent = False if b_first: const = mc.orientConstraint( [s_targetBack, s_targetForward], mAimGroup.mNode, maintainOffset=True)[0] else: const = mc.orientConstraint( [s_targetForward, s_targetBack], mAimGroup.mNode, maintainOffset=True)[0] d_blendReturn = NODEFACTORY.createSingleBlendNetwork( [mHandle.mNode, 'followRoot'], [mHandle.mNode, 'resultRootFollow'], [mHandle.mNode, 'resultAimFollow'], keyable=True) targetWeights = mc.orientConstraint(const, q=True, weightAliasList=True, maintainOffset=True) #Connect d_blendReturn['d_result1']['mi_plug'].doConnectOut( '%s.%s' % (const, targetWeights[0])) d_blendReturn['d_result2']['mi_plug'].doConnectOut( '%s.%s' % (const, targetWeights[1])) d_blendReturn['d_result1']['mi_plug'].p_hidden = True d_blendReturn['d_result2']['mi_plug'].p_hidden = True mHandle.parent = mAimGroup #...parent back for mHandle in ml_handles: if mHandle in [ml_handles[0], ml_handles[-1]]: mHandle.followRoot = 1 ATTR.set_default(mHandle.mNode, 'followRoot', 1.0) else: mHandle.followRoot = .5 ATTR.set_default(mHandle.mNode, 'followRoot', .5) except Exception, err: cgmGEN.cgmExceptCB(Exception, err, localDat=vars())
def build_aimSequence(l_driven = None, l_targets = None, l_parents = None, l_upTargets = None, msgLink_masterGroup = 'masterGroup', aim = [0,0,1], up = [0,1,0], mode = 'sequence',#sequence,singleBlend upMode = 'objRotation',#objRotation,decomposeMatrix upParent = [0,1,0], rootTargetEnd = None, rootTargetStart=None,#specify root targets by index and mObj mRoot = None,#need for sequence interpType = None, maintainOffset = False): """ This kind of setup is for setting up a blended constraint so that obj2 in an obj1/obj2/obj3 sequence can aim forward or back as can obj3. :parameters: l_jointChain1 - First set of objects :returns: :raises: Exception | if reached """ _str_func = 'build_aimSequence' ml_driven = cgmMeta.validateObjListArg(l_driven,'cgmObject') ml_targets = cgmMeta.validateObjListArg(l_targets,'cgmObject',noneValid=True) ml_parents = cgmMeta.validateObjListArg(l_parents,'cgmObject',noneValid=True) ml_upTargets = cgmMeta.validateObjListArg(l_upTargets,'cgmObject',noneValid=True) if not ml_upTargets: ml_upTargets = ml_parents axis_aim = VALID.simpleAxis(aim) axis_aimNeg = axis_aim.inverse axis_up = VALID.simpleAxis(up) v_aim = axis_aim.p_vector#aimVector v_aimNeg = axis_aimNeg.p_vector#aimVectorNegative v_up = axis_up.p_vector #upVector #cgmGEN.func_snapShot(vars()) if mode == 'singleBlend': if len(ml_targets) != 2: cgmGEN.func_snapShot(vars()) return log.error("|{0}| >> Single blend mode must have 2 targets.".format(_str_func)) if len(ml_driven) != 1: cgmGEN.func_snapShot(vars()) return log.error("|{0}| >> Single blend mode must have 1 driven obj.".format(_str_func)) if not ml_parents: cgmGEN.func_snapShot(vars()) return log.error("|{0}| >> Single blend mode must have handleParents.".format(_str_func)) if len(ml_parents) != 1: cgmGEN.func_snapShot(vars()) return log.error("|{0}| >> Single blend mode must have 1 handleParent.".format(_str_func)) mDriven = ml_driven[0] if not mDriven.getMessage(msgLink_masterGroup): log.debug("|{0}| >> No master group, creating...".format(_str_func)) raise ValueError, log.error("|{0}| >> Add the create masterGroup setup, Josh".format(_str_func)) mMasterGroup = mDriven.getMessage(msgLink_masterGroup,asMeta=True)[0] s_rootTarget = False s_targetForward = ml_targets[-1].mNode s_targetBack = ml_targets[0].mNode i = 0 mMasterGroup.p_parent = ml_parents[i] mUpDecomp = None if upMode == 'decomposeMatrix': #Decompose matrix for parent... mUpDecomp = cgmMeta.cgmNode(nodeType = 'decomposeMatrix') mUpDecomp.rename("{0}_aimMatrix".format(ml_parents[i].p_nameBase)) #mUpDecomp.doStore('cgmName',ml_parents[i]) #mUpDecomp.addAttr('cgmType','aimMatrix',attrType='string',lock=True) #mUpDecomp.doName() ATTR.connect("{0}.worldMatrix".format(ml_parents[i].mNode),"{0}.{1}".format(mUpDecomp.mNode,'inputMatrix')) d_worldUp = {'worldUpObject' : ml_parents[i].mNode, 'worldUpType' : 'vector', 'worldUpVector': [0,0,0]} elif upMode == 'objectRotation': d_worldUp = {'worldUpObject' : ml_parents[i].mNode, 'worldUpType' : 'objectRotation', 'worldUpVector': upParent} else: raise ValueError, log.error("|{0}| >> Unknown upMode: {1}".format(_str_func,upMode)) if s_targetForward: mAimForward = mDriven.doCreateAt() mAimForward.parent = mMasterGroup mAimForward.doStore('cgmTypeModifier','forward') mAimForward.doStore('cgmType','aimer') mAimForward.doName() _const=mc.aimConstraint(s_targetForward, mAimForward.mNode, maintainOffset = True, #skip = 'z', aimVector = v_aim, upVector = v_up, **d_worldUp) s_targetForward = mAimForward.mNode if mUpDecomp: ATTR.connect("%s.%s"%(mUpDecomp.mNode,"outputRotate"),"%s.%s"%(_const[0],"upVector")) else: s_targetForward = ml_parents[i].mNode if s_targetBack: mAimBack = mDriven.doCreateAt() mAimBack.parent = mMasterGroup mAimBack.doStore('cgmTypeModifier','back') mAimBack.doStore('cgmType','aimer') mAimBack.doName() _const = mc.aimConstraint(s_targetBack, mAimBack.mNode, maintainOffset = True, #skip = 'z', aimVector = v_aimNeg, upVector = v_up, **d_worldUp) s_targetBack = mAimBack.mNode if mUpDecomp: ATTR.connect("%s.%s"%(mUpDecomp.mNode,"outputRotate"),"%s.%s"%(_const[0],"upVector")) else: s_targetBack = s_rootTarget #ml_parents[i].mNode pprint.pprint([s_targetForward,s_targetBack]) mAimGroup = mDriven.doGroup(True,asMeta=True,typeModifier = 'aim') mDriven.parent = False const = mc.orientConstraint([s_targetForward, s_targetBack], mAimGroup.mNode, maintainOffset = True)[0] d_blendReturn = NODEFACTORY.createSingleBlendNetwork([mDriven.mNode,'followRoot'], [mDriven.mNode,'resultRootFollow'], [mDriven.mNode,'resultAimFollow'], keyable=True) targetWeights = mc.orientConstraint(const,q=True, weightAliasList=True,maintainOffset=True) #Connect d_blendReturn['d_result1']['mi_plug'].doConnectOut('%s.%s' % (const,targetWeights[0])) d_blendReturn['d_result2']['mi_plug'].doConnectOut('%s.%s' % (const,targetWeights[1])) d_blendReturn['d_result1']['mi_plug'].p_hidden = True d_blendReturn['d_result2']['mi_plug'].p_hidden = True mDriven.parent = mAimGroup#...parent back mDriven.followRoot = .5 return True elif mode == 'sequence': """ if len(ml_targets) != 2: cgmGEN.func_snapShot(vars()) return log.error("|{0}| >> Single blend mode must have 2 targets.".format(_str_func)) if len(ml_driven) != 1: cgmGEN.func_snapShot(vars()) return log.error("|{0}| >> Single blend mode must have 1 driven obj.".format(_str_func)) if not ml_parents: cgmGEN.func_snapShot(vars()) return log.error("|{0}| >> Single blend mode must have handleParents.".format(_str_func)) if len(ml_parents) != 1: cgmGEN.func_snapShot(vars()) return log.error("|{0}| >> Single blend mode must have 1 handleParent.".format(_str_func)) """ for i,mDriven in enumerate(ml_driven): log.debug("|{0}| >> on: {1} | {2}".format(_str_func,i,mDriven)) mUpDecomp = False if not mDriven.getMessage(msgLink_masterGroup): log.debug("|{0}| >> No master group, creating...".format(_str_func)) raise ValueError, log.error("|{0}| >> Add the create masterGroup setup, Josh".format(_str_func)) mDriven.masterGroup.parent = ml_parents[i] if upMode == 'decomposeMatrix': #Decompose matrix for parent... mUpDecomp = cgmMeta.cgmNode(nodeType = 'decomposeMatrix') mUpDecomp.rename("{0}_aimMatrix".format(ml_parents[i].p_nameBase)) #mUpDecomp.doStore('cgmName',ml_parents[i]) #mUpDecomp.addAttr('cgmType','aimMatrix',attrType='string',lock=True) #mUpDecomp.doName() ATTR.connect("{0}.worldMatrix".format(ml_upTargets[i].mNode),"{0}.{1}".format(mUpDecomp.mNode,'inputMatrix')) d_worldUp = {'worldUpObject' : ml_upTargets[i].mNode, 'worldUpType' : 'vector', 'worldUpVector': [0,0,0]} elif upMode == 'objectRotation': d_worldUp = {'worldUpObject' : ml_upTargets[i].mNode, 'worldUpType' : 'objectRotation', 'worldUpVector': upParent} else: raise ValueError, log.error("|{0}| >> Unknown upMode: {1}".format(_str_func,upMode)) s_rootTarget = False s_targetForward = False s_targetBack = False mMasterGroup = mDriven.masterGroup b_first = False if mDriven == ml_driven[0]: log.debug("|{0}| >> First handle: {1}".format(_str_func,mDriven)) if len(ml_driven) <=2: s_targetForward = ml_parents[-1].mNode else: s_targetForward = ml_driven[i+1].getMessage('masterGroup')[0] if rootTargetStart: s_rootTarget = rootTargetStart.mNode else: s_rootTarget = mRoot.mNode b_first = True elif mDriven == ml_driven[-1]: log.debug("|{0}| >> Last handle: {1}".format(_str_func,mDriven)) if rootTargetEnd: s_rootTarget = rootTargetEnd.mNode else: s_rootTarget = ml_parents[i].mNode s_targetBack = ml_driven[i-1].getMessage('masterGroup')[0] else: log.debug("|{0}| >> Reg handle: {1}".format(_str_func,mDriven)) s_targetForward = ml_driven[i+1].getMessage('masterGroup')[0] s_targetBack = ml_driven[i-1].getMessage('masterGroup')[0] #Decompose matrix for parent... """ mUpDecomp = cgmMeta.cgmNode(nodeType = 'decomposeMatrix') mUpDecomp.doStore('cgmName',ml_parents[i]) mUpDecomp.addAttr('cgmType','aimMatrix',attrType='string',lock=True) mUpDecomp.doName() ATTR.connect("%s.worldMatrix"%(ml_parents[i].mNode),"%s.%s"%(mUpDecomp.mNode,'inputMatrix')) """ if s_targetForward: mAimForward = mDriven.doCreateAt() mAimForward.parent = mMasterGroup mAimForward.doStore('cgmTypeModifier','forward') mAimForward.doStore('cgmType','aimer') mAimForward.doName() _const=mc.aimConstraint(s_targetForward, mAimForward.mNode, maintainOffset = True, #skip = 'z', aimVector = v_aim, upVector = v_up,**d_worldUp) s_targetForward = mAimForward.mNode if mUpDecomp: ATTR.connect("%s.%s"%(mUpDecomp.mNode,"outputRotate"),"%s.%s"%(_const[0],"upVector")) elif s_rootTarget: s_targetForward = s_rootTarget else: s_targetForward = ml_parents[i].mNode if s_targetBack: mAimBack = mDriven.doCreateAt() mAimBack.parent = mMasterGroup mAimBack.doStore('cgmTypeModifier','back') mAimBack.doStore('cgmType','aimer') mAimBack.doName() _const = mc.aimConstraint(s_targetBack, mAimBack.mNode, maintainOffset = True, #skip = 'z', aimVector = v_aimNeg, upVector = v_up, **d_worldUp) s_targetBack = mAimBack.mNode if mUpDecomp: ATTR.connect("%s.%s"%(mUpDecomp.mNode,"outputRotate"),"%s.%s"%(_const[0],"upVector")) else: s_targetBack = s_rootTarget #ml_parents[i].mNode #pprint.pprint([s_targetForward,s_targetBack]) mAimGroup = mDriven.doGroup(True,asMeta=True,typeModifier = 'aim') mDriven.parent = False log.debug("|{0}| >> obj: {1} | {2}".format(_str_func,i,mDriven)) log.debug("|{0}| >> forward: {1}".format(_str_func,s_targetForward)) log.debug("|{0}| >> back: {1}".format(_str_func,s_targetBack)) log.debug(cgmGEN._str_subLine) if b_first: const = mc.orientConstraint([s_targetBack, s_targetForward], mAimGroup.mNode, maintainOffset = True)[0] else: const = mc.orientConstraint([s_targetForward, s_targetBack], mAimGroup.mNode, maintainOffset = True)[0] d_blendReturn = NODEFACTORY.createSingleBlendNetwork([mDriven.mNode,'followRoot'], [mDriven.mNode,'resultRootFollow'], [mDriven.mNode,'resultAimFollow'], keyable=True) targetWeights = mc.orientConstraint(const,q=True, weightAliasList=True,maintainOffset=True) #Connect d_blendReturn['d_result1']['mi_plug'].doConnectOut('%s.%s' % (const,targetWeights[0])) d_blendReturn['d_result2']['mi_plug'].doConnectOut('%s.%s' % (const,targetWeights[1])) d_blendReturn['d_result1']['mi_plug'].p_hidden = True d_blendReturn['d_result2']['mi_plug'].p_hidden = True mDriven.parent = mAimGroup#...parent back if interpType: ATTR.set(const,'interpType',interpType) #if mDriven in [ml_driven[0],ml_driven[-1]]: # mDriven.followRoot = 1 #else: mDriven.followRoot = .5 return True raise ValueError,"Not done..." return for i,mObj in enumerate(ml_driven): return mObj.masterGroup.parent = ml_parents[i] s_rootTarget = False s_targetForward = False s_targetBack = False mMasterGroup = mObj.masterGroup b_first = False if mObj == ml_driven[0]: log.debug("|{0}| >> First handle: {1}".format(_str_func,mObj)) if len(ml_driven) <=2: s_targetForward = ml_parents[-1].mNode else: s_targetForward = ml_driven[i+1].getMessage('masterGroup')[0] s_rootTarget = mRoot.mNode b_first = True elif mObj == ml_driven[-1]: log.debug("|{0}| >> Last handle: {1}".format(_str_func,mObj)) s_rootTarget = ml_parents[i].mNode s_targetBack = ml_driven[i-1].getMessage('masterGroup')[0] else: log.debug("|{0}| >> Reg handle: {1}".format(_str_func,mObj)) s_targetForward = ml_driven[i+1].getMessage('masterGroup')[0] s_targetBack = ml_driven[i-1].getMessage('masterGroup')[0] #Decompose matrix for parent... mUpDecomp = cgmMeta.cgmNode(nodeType = 'decomposeMatrix') mUpDecomp.doStore('cgmName',ml_parents[i]) mUpDecomp.addAttr('cgmType','aimMatrix',attrType='string',lock=True) mUpDecomp.doName() ATTR.connect("%s.worldMatrix"%(ml_parents[i].mNode),"%s.%s"%(mUpDecomp.mNode,'inputMatrix')) if s_targetForward: mAimForward = mObj.doCreateAt() mAimForward.parent = mMasterGroup mAimForward.doStore('cgmTypeModifier','forward') mAimForward.doStore('cgmType','aimer') mAimForward.doName() _const=mc.aimConstraint(s_targetForward, mAimForward.mNode, maintainOffset = True, #skip = 'z', aimVector = [0,0,1], upVector = [1,0,0], worldUpObject = ml_parents[i].mNode, worldUpType = 'vector', worldUpVector = [0,0,0]) s_targetForward = mAimForward.mNode ATTR.connect("%s.%s"%(mUpDecomp.mNode,"outputRotate"),"%s.%s"%(_const[0],"upVector")) else: s_targetForward = ml_parents[i].mNode if s_targetBack: mAimBack = mObj.doCreateAt() mAimBack.parent = mMasterGroup mAimBack.doStore('cgmTypeModifier','back') mAimBack.doStore('cgmType','aimer') mAimBack.doName() _const = mc.aimConstraint(s_targetBack, mAimBack.mNode, maintainOffset = True, #skip = 'z', aimVector = [0,0,-1], upVector = [1,0,0], worldUpObject = ml_parents[i].mNode, worldUpType = 'vector', worldUpVector = [0,0,0]) s_targetBack = mAimBack.mNode ATTR.connect("%s.%s"%(mUpDecomp.mNode,"outputRotate"),"%s.%s"%(_const[0],"upVector")) else: s_targetBack = s_rootTarget #ml_parents[i].mNode pprint.pprint([s_targetForward,s_targetBack]) mAimGroup = mObj.doGroup(True,asMeta=True,typeModifier = 'aim') mObj.parent = False if b_first: const = mc.orientConstraint([s_targetBack, s_targetForward], mAimGroup.mNode, maintainOffset = True)[0] else: const = mc.orientConstraint([s_targetForward, s_targetBack], mAimGroup.mNode, maintainOffset = True)[0] d_blendReturn = NODEFACTORY.createSingleBlendNetwork([mObj.mNode,'followRoot'], [mObj.mNode,'resultRootFollow'], [mObj.mNode,'resultAimFollow'], keyable=True) targetWeights = mc.orientConstraint(const,q=True, weightAliasList=True,maintainOffset=True) #Connect d_blendReturn['d_result1']['mi_plug'].doConnectOut('%s.%s' % (const,targetWeights[0])) d_blendReturn['d_result2']['mi_plug'].doConnectOut('%s.%s' % (const,targetWeights[1])) d_blendReturn['d_result1']['mi_plug'].p_hidden = True d_blendReturn['d_result2']['mi_plug'].p_hidden = True mObj.parent = mAimGroup#...parent back if mObj in [ml_driven[0],ml_driven[-1]]: mObj.followRoot = 1 else: mObj.followRoot = .5
def create_uvPickerNetwork(target=None, name='iris', mode=1, enums=None, count=9, split=3): _str_func = 'create_uvPickerNetwork' log.debug("|{0}| >> ".format(_str_func) + '-' * 80) if count / split != split: raise ValueError, "{0} || Split must divide evently to count. count: {1} | split: {2}".format( _str_func, count, split) if not target: target = mc.group(em=True, name='uvPickerDefault') if mode == 2: log.debug(cgmGen.logString_msg(_str_func, '2 Attr mode')) if not enums: enums = ['{0}_{1}'.format(name, i) for i in range(2)] for a in enums: ATTR.add(target, a, 'enum', enumOptions=[str(i) for i in range(split)]) for a in 'U', 'V': _a = 'res_{0}{1}'.format(name, a) ATTR.add(target, _a, 'float', keyable=False, hidden=False) ATTR.set_hidden(target, _a, False) mMD = cgmMeta.cgmNode(name="{0}_picker_md".format(name), nodeType='multiplyDivide') mMD.operation = 2 mMD.input2X = split mMD.input2Y = split mMD.doConnectIn('input1X', "{0}.{1}".format(target, enums[0])) mMD.doConnectIn('input1Y', "{0}.{1}".format(target, enums[1])) mMD.doConnectOut('outputX', '{0}.res_{1}U'.format(target, name)) mMD.doConnectOut('outputY', '{0}.res_{1}V'.format(target, name)) else: log.debug(cgmGen.logString_msg(_str_func, '1 Attr mode')) _d_values = { 9: [ [.999, .666], [.333, .666], [.666, .666], [.999, .333], [.333, .333], [.666, .333], [.999, .999], [.333, .999], [.666, .999], ] } l_dat = _d_values.get(count) if not l_dat: raise ValueError, "{0} | count {1} not supported".format( _str_func, count) for a in 'U', 'V': _a = 'res_{0}{1}'.format(name, a) ATTR.add(target, _a, 'float', keyable=False, hidden=False) ATTR.set_hidden(target, _a, False) mPMA = cgmMeta.cgmNode(name="{0}_picker_pma".format(name), nodeType='plusMinusAverage') mPMA.operation = 1 ATTR.add(target, name, 'enum', enumOptions=[str(i) for i in range(9)]) mAttr = cgmMeta.cgmAttr(target, name) for i, vSet in enumerate(l_dat): _iterBase = "{0}_{1}".format(name, i) if mc.objExists('%s_condNode' % _iterBase): mc.delete('%s_condNode' % _iterBase) mNode = cgmMeta.cgmNode(name="{0}_condNode".format(_iterBase), nodeType='condition') mNode.secondTerm = i mNode.colorIfTrueR = vSet[0] mNode.colorIfTrueG = vSet[1] mNode.colorIfFalseR = 0 mNode.colorIfFalseG = 0 mAttr.doConnectOut('%s.firstTerm' % mNode.mNode) ATTR.connect('%s.outColor' % mNode.mNode, "{0}.input3D[{1}]".format(mPMA.mNode, i)) #attributes.doConnectAttr('%s.outColorR'%mNode.mNode,'%s.%s'%(c,self.connectToAttr)) mPMA.doConnectOut('output3Dx', '{0}.res_{1}U'.format(target, name)) mPMA.doConnectOut('output3Dy', '{0}.res_{1}V'.format(target, name))
def attach_toShape(obj=None, targetShape=None, connectBy='parent', driver=None): """ :parameters: obj - transform to attach targetShape(str) - Curve, Nurbs, Mesh connectBy(str) parent - parent to track transform parentGroup - parent to group and have group follow conPoint - just point contraint conPointGroup - pointConstrain group conPointOrientGroup - point/orient constrain group conParentGroup - parent Constrain group None - just the tracker nodes :returns: resulting dat """ try: _str_func = 'attach_toShape' mObj = cgmMeta.validateObjArg(obj, 'cgmObject') mDriver = cgmMeta.validateObjArg(driver, 'cgmObject', noneValid=True) targetShape = VALID.mNodeString(targetShape) log.debug("targetShape: {0}".format(targetShape)) #Get our data... d_closest = DIST.get_closest_point_data(targetShape, mObj.mNode) log.debug("|{0}| >> jnt: {1} | {2}".format(_str_func, mObj.mNode, d_closest)) #pprint.pprint(d_closest) md_res = {} if d_closest['type'] in ['mesh', 'nurbsSurface']: log.debug("|{0}| >> Follicle mode...".format(_str_func)) _shape = SHAPES.get_nonintermediate(d_closest['shape']) log.debug("_shape: {0}".format(_shape)) l_follicleInfo = NODES.createFollicleOnMesh(_shape) i_follicleTrans = cgmMeta.asMeta(l_follicleInfo[1], 'cgmObject', setClass=True) i_follicleShape = cgmMeta.asMeta(l_follicleInfo[0], 'cgmNode') #> Name... i_follicleTrans.doStore('cgmName', mObj) i_follicleTrans.doStore('cgmTypeModifier', 'surfaceTrack') i_follicleTrans.doName() _trackTransform = i_follicleTrans.mNode #>Set follicle value... if d_closest['type'] == 'mesh': i_follicleShape.parameterU = d_closest['parameterU'] i_follicleShape.parameterV = d_closest['parameterV'] else: i_follicleShape.parameterU = d_closest['normalizedU'] i_follicleShape.parameterV = d_closest['normalizedV'] _res = [i_follicleTrans.mNode, i_follicleShape.mNode] md_res['mFollicle'] = i_follicleTrans md_res['mFollicleShape'] = i_follicleShape else: log.debug("|{0}| >> Curve mode...".format(_str_func)) #d_returnBuff = distance.returnNearestPointOnCurveInfo(obj,crv) _shape = SHAPES.get_nonintermediate(d_closest['shape']) mPOCI = cgmMeta.cgmNode(nodeType='pointOnCurveInfo') mc.connectAttr("%s.worldSpace" % _shape, "%s.inputCurve" % mPOCI.mNode) mPOCI.parameter = d_closest['parameter'] mTrack = mObj.doCreateAt() mTrack.doStore('cgmName', mObj) mTrack.doStore('cgmType', 'surfaceTrack') mTrack.doName() _trackTransform = mTrack.mNode mc.connectAttr("%s.position" % mPOCI.mNode, "%s.t" % _trackTransform) mPOCI.doStore('cgmName', mObj) mPOCI.doName() _res = [mTrack.mNode, mPOCI.mNode] if mDriver: if d_closest['type'] in ['nurbsSurface']: mFollicle = i_follicleTrans mFollShape = i_follicleShape minU = ATTR.get(_shape, 'minValueU') maxU = ATTR.get(_shape, 'maxValueU') minV = ATTR.get(_shape, 'minValueV') maxV = ATTR.get(_shape, 'maxValueV') mDriverLoc = mDriver.doLoc() mc.pointConstraint(mDriver.mNode, mDriverLoc.mNode) #mLoc = mObj.doLoc() str_baseName = "{0}_to_{1}".format(mDriver.p_nameBase, mObj.p_nameBase) mPlug_normalizedU = cgmMeta.cgmAttr( mDriverLoc.mNode, "{0}_normalizedU".format(str_baseName), attrType='float', hidden=False, lock=False) mPlug_sumU = cgmMeta.cgmAttr(mDriverLoc.mNode, "{0}_sumU".format(str_baseName), attrType='float', hidden=False, lock=False) mPlug_normalizedV = cgmMeta.cgmAttr( mDriverLoc.mNode, "{0}_normalizedV".format(str_baseName), attrType='float', hidden=False, lock=False) mPlug_sumV = cgmMeta.cgmAttr(mDriverLoc.mNode, "{0}_sumV".format(str_baseName), attrType='float', hidden=False, lock=False) #res_closest = DIST.create_closest_point_node(mLoc.mNode, mCrv_reparam.mNode,True) log.debug("|{0}| >> Closest info {1}".format(_str_func, _res)) srfNode = mc.createNode('closestPointOnSurface') mc.connectAttr("%s.worldSpace[0]" % _shape, "%s.inputSurface" % srfNode) mc.connectAttr("%s.translate" % mDriverLoc.mNode, "%s.inPosition" % srfNode) #mc.connectAttr("%s.position" % srfNode, "%s.translate" % mLoc.mNode, f=True) #mClosestPoint = cgmMeta.validateObjArg(srfNode,setClass=True) #mClosestPoint.doStore('cgmName',mObj) #mClosestPoint.doName() log.debug("|{0}| >> paramU {1}.parameterU | {2}".format( _str_func, srfNode, ATTR.get(srfNode, 'parameterU'))) log.debug("|{0}| >> paramV {1}.parameterV | {2}".format( _str_func, srfNode, ATTR.get(srfNode, 'parameterV'))) l_argBuild = [] mPlug_uSize = cgmMeta.cgmAttr(mDriverLoc.mNode, "{0}_uSize".format(str_baseName), attrType='float', hidden=False, lock=False) mPlug_vSize = cgmMeta.cgmAttr(mDriverLoc.mNode, "{0}_vSize".format(str_baseName), attrType='float', hidden=False, lock=False) l_argBuild.append("{0} = {1} - {2}".format( mPlug_vSize.p_combinedName, maxV, minV)) l_argBuild.append("{0} = {1} - {2}".format( mPlug_uSize.p_combinedName, maxU, minU)) l_argBuild.append("{0} = {1} + {2}.parameterU".format( mPlug_sumU.p_combinedName, minU, srfNode)) l_argBuild.append("{0} = {1} / {2}".format( mPlug_normalizedU.p_combinedName, mPlug_sumU.p_combinedName, mPlug_uSize.p_combinedName)) l_argBuild.append("{0} = {1} + {2}.parameterV".format( mPlug_sumV.p_combinedName, minV, srfNode)) l_argBuild.append("{0} = {1} / {2}".format( mPlug_normalizedV.p_combinedName, mPlug_sumV.p_combinedName, mPlug_vSize.p_combinedName)) for arg in l_argBuild: log.debug("|{0}| >> Building arg: {1}".format( _str_func, arg)) NODEFACTORY.argsToNodes(arg).doBuild() ATTR.connect(mPlug_normalizedU.p_combinedShortName, '{0}.parameterU'.format(mFollShape.mNode)) ATTR.connect(mPlug_normalizedV.p_combinedShortName, '{0}.parameterV'.format(mFollShape.mNode)) md_res['mDriverLoc'] = mDriverLoc elif d_closest['type'] in ['curve', 'nurbsCurve']: mDriverLoc = mDriver.doLoc() mc.pointConstraint(mDriver.mNode, mDriverLoc.mNode) _resClosest = DIST.create_closest_point_node( mDriverLoc.mNode, _shape, True) _loc = _resClosest[0] md_res['mDriverLoc'] = mDriverLoc md_res['mDrivenLoc'] = cgmMeta.asMeta(_loc) md_res['mTrack'] = mTrack else: log.warning( cgmGEN.logString_msg( _str_func, "Shape type not currently supported for driver setup. Type: {0}" .format(d_closest['type']))) #if connectBy is None: #return _res if connectBy == 'parent': mObj.p_parent = _trackTransform elif connectBy == 'conPoint': mc.pointConstraint(_trackTransform, mObj.mNode, maintainOffset=True) elif connectBy == 'conParent': mc.parentConstraint(_trackTransform, mObj.mNode, maintainOffset=True) elif connectBy == 'parentGroup': mGroup = mObj.doGroup(asMeta=True) #_grp = TRANS.group_me(obj,True) #TRANS.parent_set(_grp,_trackTransform) mGroup.p_parent = _trackTransform _res = _res + [mGroup.mNode] elif connectBy == 'conPointGroup': mLoc = mObj.doLoc() mLoc.p_parent = _trackTransform mGroup = mObj.doGroup(asMeta=True) mc.pointConstraint(mLoc.mNode, mGroup.mNode) _res = _res + [mGroup.mNode] elif connectBy == 'conPointOrientGroup': mLoc = mObj.doLoc() mLoc.p_parent = _trackTransform mGroup = mObj.doGroup(asMeta=True) mc.pointConstraint(mLoc.mNode, mGroup.mNode) mc.orientConstraint(mLoc.mNode, mGroup.mNode) _res = _res + [mGroup.mNode] elif connectBy == 'conParentGroup': mLoc = mObj.doLoc() mLoc.p_parent = _trackTransform mGroup = mObj.doGroup(asMeta=True) mc.parentConstraint(mLoc.mNode, mGroup.mNode) _res = _res + [mGroup.mNode] elif connectBy is None: pass else: raise NotImplementedError, "|{0}| >>invalid connectBy: {1}".format( _str_func, connectBy) if md_res: return _res, md_res return _res #pprint.pprint(vars()) except Exception, err: cgmGEN.cgmExceptCB(Exception, err)
def getControlShader(direction='center', controlType='main', transparent=False, proxy=False, directProxy=False, shaderNode='phong'): """ Proxy mode modifies the base value and setups up a different shader """ if directProxy: _node = "cgmShader_directProxy" else: if controlType == 'puppetmesh': _node = "cgmShader_{0}".format(controlType.capitalize()) else: _node = "cgmShader_{0}{1}".format(direction, controlType.capitalize()) if transparent: _node = _node + '_trans' if proxy: _node = _node + '_proxy' log.debug(_node) _set = False if not mc.objExists(_node): _node = mc.shadingNode(shaderNode, n=_node, asShader=True) _set = mc.sets(renderable=True, noSurfaceShader=True, em=True, name=_node + 'SG') ATTR.connect("{0}.outColor".format(_node), "{0}.surfaceShader".format(_set)) if directProxy: ATTR.set(_node, 'transparency', 1) ATTR.set(_node, 'ambientColorR', 0) ATTR.set(_node, 'ambientColorG', 0) ATTR.set(_node, 'ambientColorB', 0) ATTR.set(_node, 'transparency', .5) ATTR.set(_node, 'incandescence', 0) else: if controlType == 'puppetmesh': _rgb = [.5, .5, .5] _d = { 'diffuse': .65, 'specularColor': [0.142857, 0.142857, 0.142857] } for a, v in _d.iteritems(): try: ATTR.set(_node, a, v) except Exception, err: log.error(err) else: _color = SHARED._d_side_colors[direction][controlType] _rgb = SHARED._d_colors_to_RGB[_color] ATTR.set(_node, 'diffuse', 1.0) if proxy: #_rgb = [v * .75 for v in _rgb] _hsv = [v for v in get_HSV_fromRGB(_rgb[0], _rgb[1], _rgb[2])] _hsv[1] = .5 _rgb = get_RGB_fromHSV(_hsv[0], _hsv[1], _hsv[2]) ATTR.set(_node, 'diffuse', .75) ATTR.set(_node, 'colorR', _rgb[0]) ATTR.set(_node, 'colorG', _rgb[1]) ATTR.set(_node, 'colorB', _rgb[2]) if transparent: ATTR.set(_node, 'ambientColorR', _rgb[0] * .1) ATTR.set(_node, 'ambientColorG', _rgb[1] * .1) ATTR.set(_node, 'ambientColorB', _rgb[2] * .1) ATTR.set(_node, 'transparency', .5) ATTR.set(_node, 'incandescence', 0)