def set_influence_weights(self, dag_path, components): """Sets all the influence weights. :param dag_path: MDagPath of the deformed geometry. :param components: Component MObject of the deformed components. """ influence_paths = OpenMaya.MDagPathArray() influence_count = self.fn.influenceObjects(influence_paths) elements = OpenMaya.MIntArray() fncomp = OpenMaya.MFnSingleIndexedComponent(components) fncomp.getElements(elements) weights = OpenMaya.MDoubleArray(elements.length() * influence_count) components_per_influence = elements.length() for imported_influence, imported_weights in self.data["weights"].items( ): imported_influence = imported_influence.split("|")[-1] for ii in range(influence_paths.length()): influence_name = influence_paths[ii].partialPathName() influence_without_namespace = shortcuts.remove_namespace_from_name( influence_name) if influence_without_namespace == imported_influence: # Store the imported weights into the MDoubleArray for jj in range(components_per_influence): weights.set(imported_weights[elements[jj]], jj * influence_count + ii) break influence_indices = OpenMaya.MIntArray(influence_count) for ii in range(influence_count): influence_indices.set(ii, ii) self.fn.setWeights(dag_path, components, influence_indices, weights, False)
def set_influence_weights(self, dag_path, components): """Sets all the influence weights. :param dag_path: MDagPath of the deformed geometry. :param components: Component MObject of the deformed components. """ weights = self.__get_current_weights(dag_path, components) influence_paths = OpenMaya.MDagPathArray() influence_count = self.fn.influenceObjects(influence_paths) components_per_influence = weights.length() // influence_count # Keep track of which imported influences aren't used unused_imports = [] # Keep track of which existing influences don't get anything imported no_match = [ influence_paths[ii].partialPathName() for ii in range(influence_paths.length()) ] for imported_influence, imported_weights in self.data["weights"].items( ): imported_influence = imported_influence.split("|")[-1] for ii in range(influence_paths.length()): influence_name = influence_paths[ii].partialPathName() influence_without_namespace = shortcuts.remove_namespace_from_name( influence_name) if influence_without_namespace == imported_influence: # Store the imported weights into the MDoubleArray for jj in range(components_per_influence): weights.set(imported_weights[jj], jj * influence_count + ii) no_match.remove(influence_name) break else: unused_imports.append(imported_influence) if unused_imports and no_match: mapping_dialog = WeightRemapDialog() mapping_dialog.set_influences(unused_imports, no_match) mapping_dialog.exec_() for src, dst in mapping_dialog.mapping.items(): for ii in range(influence_paths.length()): if influence_paths[ii].partialPathName() == dst: for jj in range(components_per_influence): weights.set(self.data["weights"][src][jj], jj * influence_count + ii) break influence_indices = OpenMaya.MIntArray(influence_count) for ii in range(influence_count): influence_indices.set(ii, ii) self.fn.setWeights(dag_path, components, influence_indices, weights, False)
def get_joints_that_need_remapping(joints_in_file): # Make sure all the joints exist unused_joints_from_file = [] joints_that_get_no_weights = set([ shortcuts.remove_namespace_from_name(x) for x in cmds.ls(type="joint") ]) for j in joints_in_file: j = j.split("|")[-1] if j in joints_that_get_no_weights: joints_that_get_no_weights.remove(j) else: unused_joints_from_file.append(j) return unused_joints_from_file, joints_that_get_no_weights
def set_influence_weights(self, dag_path, components): """Sets all the influence weights. :param dag_path: MDagPath of the deformed geometry. :param components: Component MObject of the deformed components. """ weights = self.__get_current_weights(dag_path, components) influence_paths = OpenMaya.MDagPathArray() influence_count = self.fn.influenceObjects(influence_paths) components_per_influence = weights.length() / influence_count # Keep track of which imported influences aren't used unused_imports = [] # Keep track of which existing influences don't get anything imported no_match = [influence_paths[ii].partialPathName() for ii in range(influence_paths.length())] for imported_influence, imported_weights in self.data['weights'].items(): for ii in range(influence_paths.length()): influence_name = influence_paths[ii].partialPathName() influence_without_namespace = shortcuts.remove_namespace_from_name(influence_name) if influence_without_namespace == imported_influence: # Store the imported weights into the MDoubleArray for jj in range(components_per_influence): weights.set(imported_weights[jj], jj*influence_count+ii) no_match.remove(influence_name) break else: unused_imports.append(imported_influence) if unused_imports and no_match: mapping_dialog = WeightRemapDialog() mapping_dialog.set_influences(unused_imports, no_match) mapping_dialog.exec_() for src, dst in mapping_dialog.mapping.items(): for ii in range(influence_paths.length()): if influence_paths[ii].partialPathName() == dst: for jj in range(components_per_influence): weights.set(self.data['weights'][src][jj], jj*influence_count+ii) break influence_indices = OpenMaya.MIntArray(influence_count) for ii in range(influence_count): influence_indices.set(ii, ii) self.fn.setWeights(dag_path, components, influence_indices, weights, False);
def gather_influence_weights(self, dag_path, components): """Gathers all the influence weights :param dag_path: MDagPath of the deformed geometry. :param components: Component MObject of the deformed components. """ weights = self.__get_current_weights(dag_path, components) influence_paths = OpenMaya.MDagPathArray() influence_count = self.fn.influenceObjects(influence_paths) components_per_influence = weights.length() / influence_count for ii in range(influence_paths.length()): influence_name = influence_paths[ii].partialPathName() # We want to store the weights by influence without the namespace so it is easier # to import if the namespace is different influence_without_namespace = shortcuts.remove_namespace_from_name(influence_name) self.data['weights'][influence_without_namespace] = \ [weights[jj*influence_count+ii] for jj in range(components_per_influence)]
def import_skin(file_path=None, shapes=None, to_selected_shapes=False): """Creates a skinCluster on the specified shape if one does not already exist and then import the weight data. """ selected_shapes = cmds.ls(sl=True) if to_selected_shapes else None if file_path is None: file_path = cmds.fileDialog2(dialogStyle=2, fileMode=1, fileFilter='Skin Files (*{0})'.format(EXTENSION)) if not file_path: return if not isinstance(file_path, basestring): file_path = file_path[0] # Read in the file fh = open(file_path, 'rb') data = pickle.load(fh) fh.close() if shapes and not isinstance(shapes, basestring): shapes = [shapes, ] for skin_data in data: shape = skin_data['shape'] if not cmds.objExists(shape): continue if selected_shapes and shape not in selected_shapes: continue elif not to_selected_shapes and shapes and shape not in shapes: continue # Make sure the vertex count is the same mesh_vertex_count = cmds.polyEvaluate(shape, vertex=True) imported_vertex_count = len(skin_data['blendWeights']) if mesh_vertex_count != imported_vertex_count: raise RuntimeError('Vertex counts do not match. %d != %d' % (mesh_vertex_count, imported_vertex_count)) # Check if the shape has a skinCluster skins = get_skin_clusters(shape) if skins: skin_cluster = SkinCluster(skins[0]) else: # Create a new skinCluster joints = skin_data['weights'].keys() # Make sure all the joints exist unused_imports = [] no_match = set([shortcuts.remove_namespace_from_name(x) for x in cmds.ls(type='joint')]) for j in joints: if j in no_match: no_match.remove(j) else: unused_imports.append(j) # If there were unmapped influences ask the user to map them if unused_imports and no_match: mapping_dialog = WeightRemapDialog() mapping_dialog.set_influences(unused_imports, no_match) mapping_dialog.exec_() for src, dst in mapping_dialog.mapping.items(): # Swap the mapping skin_data['weights'][dst] = skin_data['weights'][src] del skin_data['weights'][src] # Create the skinCluster with post normalization so setting the weights does not # normalize all the weights joints = skin_data['weights'].keys() skin = cmds.skinCluster(joints, shape, tsb=True, nw=2, n=skin_data['name'])[0] skin_cluster = SkinCluster(skin) skin_cluster.set_data(skin_data) logging.info('Imported %s', file_path)
def import_skin(file_path=None, shape=None, to_selected_shapes=False): """Creates a skinCluster on the specified shape if one does not already exist and then import the weight data. """ if file_path is None: file_path = cmds.fileDialog2( dialogStyle=2, fileMode=1, fileFilter='Skin Files (*{0})'.format(EXTENSION)) if not file_path: return if not isinstance(file_path, string_types): file_path = file_path[0] # Read in the file with open(file_path, 'r') as fh: data = json.load(fh) if to_selected_shapes: shape = cmds.ls(sl=True) if shape: shape = shape[0] if shape is None: shape = data['shape'] if not cmds.objExists(shape): raise RuntimeError( 'Cannot import skin, {} does not exist'.format(shape)) # Make sure the vertex count is the same mesh_vertex_count = cmds.polyEvaluate(shape, vertex=True) imported_vertex_count = len(data['blendWeights']) if mesh_vertex_count != imported_vertex_count: raise RuntimeError('Vertex counts do not match. Mesh %d != File %d' % (mesh_vertex_count, imported_vertex_count)) # Check if the shape has a skinCluster skins = get_skin_clusters(shape) if skins: skin_cluster = SkinCluster(skins[0]) else: # Create a new skinCluster joints = data['weights'].keys() # Make sure all the joints exist unused_imports = [] no_match = set([ shortcuts.remove_namespace_from_name(x) for x in cmds.ls(type='joint') ]) for j in joints: if j in no_match: no_match.remove(j) else: unused_imports.append(j) # If there were unmapped influences ask the user to map them if unused_imports and no_match: mapping_dialog = WeightRemapDialog() mapping_dialog.set_influences(unused_imports, no_match) mapping_dialog.exec_() for src, dst in mapping_dialog.mapping.items(): # Swap the mapping data['weights'][dst] = data['weights'][src] del data['weights'][src] # Create the skinCluster with post normalization so setting the weights does not # normalize all the weights joints = data['weights'].keys() kwargs = {} if data['maintainMaxInfluences']: kwargs['obeyMaxInfluences'] = True kwargs['maximumInfluences'] = data['maxInfluences'] skin = cmds.skinCluster(joints, shape, tsb=True, nw=2, n=data['name'], **kwargs)[0] skin_cluster = SkinCluster(skin) skin_cluster.set_data(data) logging.info('Imported %s', file_path)
def import_skin(file_path=None, shape=None, to_selected_shapes=False): """Creates a skinCluster on the specified shape if one does not already exist and then import the weight data. """ if file_path is None: file_path = cmds.fileDialog2( dialogStyle=2, fileMode=1, fileFilter="Skin Files (*{0})".format(EXTENSION)) if not file_path: return if not isinstance(file_path, string_types): file_path = file_path[0] # Read in the file with open(file_path, "r") as fh: data = json.load(fh) # Some cases the skinningMethod may have been set to -1 if data.get("skinningMethod", 0) < 0: data["skinningMethod"] = 0 if to_selected_shapes: shape = cmds.ls(sl=True) if shape: shape = shape[0] if shape is None: shape = data["shape"] if not cmds.objExists(shape): logging.warning("Cannot import skin, {} does not exist".format(shape)) return # Make sure the vertex count is the same mesh_vertex_count = cmds.polyEvaluate(shape, vertex=True) imported_vertex_count = len(data["blendWeights"]) if mesh_vertex_count != imported_vertex_count: raise RuntimeError( "Vertex counts do not match. Mesh {} != File {}".format( mesh_vertex_count, imported_vertex_count)) # Check if the shape has a skinCluster skins = get_skin_clusters(shape) if skins: skin_cluster = SkinCluster(skins[0]) else: # Create a new skinCluster joints = data["weights"].keys() # Make sure all the joints exist unused_imports = [] no_match = set([ shortcuts.remove_namespace_from_name(x) for x in cmds.ls(type="joint") ]) for j in joints: j = j.split("|")[-1] if j in no_match: no_match.remove(j) else: unused_imports.append(j) # If there were unmapped influences ask the user to map them if unused_imports and no_match: mapping_dialog = WeightRemapDialog(file_path) mapping_dialog.set_influences(unused_imports, no_match) mapping_dialog.exec_() for src, dst in mapping_dialog.mapping.items(): # Swap the mapping data["weights"][dst] = data["weights"][src] del data["weights"][src] # Create the skinCluster with post normalization so setting the weights does not # normalize all the weights joints = data["weights"].keys() kwargs = {} if data["maintainMaxInfluences"]: kwargs["obeyMaxInfluences"] = True kwargs["maximumInfluences"] = data["maxInfluences"] skin = cmds.skinCluster(joints, shape, tsb=True, nw=2, n=data["name"], **kwargs)[0] skin_cluster = SkinCluster(skin) skin_cluster.set_data(data) logging.info("Imported %s", file_path)