Exemple #1
0
def exportPreset(presetName, visHierarchyTop, locale=DEFAULT_LOCALE):
    '''
	exports a vis hierarchy preset file - this file contains a node hierarchy which represents a vis hierarchy.
	each empty transform node in the group represents a set, and each volume in the structure represents a face
	selection used to determine vis set membership
	'''
    exportDict = api.writeExportDict(TOOL_NAME, TOOL_VERSION)

    #simply returns the
    def getVolumesAndEmptyGroups(node):
        children = cmd.listRelatives(node, type='transform', path=True)
        volumes = []
        groups = []
        for child in children:
            shapes = cmd.listRelatives(child,
                                       shapes=True,
                                       type='nurbsSurface',
                                       path=True)
            if shapes:
                type = int(cmd.getAttr('%s.exportVolume' % child))
                pos = cmd.xform(child, q=True, ws=True, rp=True)
                rot = cmd.xform(child, q=True, ws=True, ro=True)
                scale = cmd.getAttr('%s.s' % child)[0]
                volumes.append((child, type, pos, rot, scale))
            else:
                groups.append(child)

        return volumes, groups

    topVolumes, topGroups = getVolumesAndEmptyGroups(visHierarchyTop)
    parentQueue = [(visHierarchyTop, topGroups)]

    #create the first entry
    toExport = [(visHierarchyTop, None, topVolumes)]

    while True:
        try:
            curNode = topGroups.pop(0)
            curParent = cmd.listRelatives(curNode, p=True)[0]
            curVolumes, curChildren = getVolumesAndEmptyGroups(curNode)
            topGroups.extend(curChildren)
            toExport.append((curNode, curParent, curVolumes))
        except IndexError:
            break

    exportDict['preset'] = toExport
    thePreset = filesystem.Preset(locale, TOOL_NAME, presetName, EXTENSION)
    thePreset.pickle(exportDict)
Exemple #2
0
    def write(self, objects, **kwargs):
        type = self.getType()
        clipDict = api.writeExportDict(TOOL_NAME, VER)
        clipDict[kEXPORT_DICT_CLIP_TYPE] = type
        clipDict[kEXPORT_DICT_OBJECTS] = objects
        clipDict[kEXPORT_DICT_WORLDSPACE] = False

        theClip = self.TYPE_CLASSES[type]()
        theClip.generate(objects, **kwargs)
        clipDict[kEXPORT_DICT_THE_CLIP] = theClip

        #write the preset file to disk
        self.pickle(clipDict)

        #generate the icon for the clip and add it to perforce if appropriate
        icon = generateIcon(self)
	def write( self, objects, **kwargs ):
		type = self.getType()
		clipDict = api.writeExportDict( TOOL_NAME, VER )
		clipDict[ kEXPORT_DICT_CLIP_TYPE ] = type
		clipDict[ kEXPORT_DICT_OBJECTS ] = objects
		clipDict[ kEXPORT_DICT_WORLDSPACE ] = False

		theClip = self.TYPE_CLASSES[ type ]()
		theClip.generate( objects, **kwargs )
		clipDict[ kEXPORT_DICT_THE_CLIP ] = theClip

		#write the preset file to disk
		self.pickle( clipDict )

		#generate the icon for the clip and add it to perforce if appropriate
		icon = generateIcon( self )
def savePostTraceScheme(presetName):
    '''
	stores all post trace commands found in the current scene out to disk
	'''

    #grab a list of transforms with post trace commands on them
    postTraceNodes = cmd.ls("*.%s" % POST_TRACE_ATTR_NAME, r=True)

    postTraceDict = {}
    for n in postTraceNodes:
        noNS = n.split(':')[-1]  #strip the namespace
        noNS = noNS.split('.')[0]  #strip the attribute
        postTraceDict[noNS] = cmd.getAttr(n)

    xportDict = api.writeExportDict(TOOL_NAME, 0)

    p = Preset(GLOBAL, TOOL_NAME, presetName, EXTENSION)
    p.pickle((xportDict, postTraceDict))
Exemple #5
0
def savePostTraceScheme( presetName ):
	'''
	stores all post trace commands found in the current scene out to disk
	'''

	#grab a list of transforms with post trace commands on them
	postTraceNodes = cmd.ls( "*.%s" % POST_TRACE_ATTR_NAME, r=True )

	postTraceDict = {}
	for n in postTraceNodes:
		noNS = n.split( ':' )[ -1 ]  #strip the namespace
		noNS = noNS.split( '.' )[ 0 ]  #strip the attribute
		postTraceDict[ noNS ] = cmd.getAttr( n )

	xportDict = api.writeExportDict( TOOL_NAME, 0 )

	p = Preset( GLOBAL, TOOL_NAME, presetName, EXTENSION )
	p.pickle( (xportDict, postTraceDict) )
def exportPreset( presetName, visHierarchyTop, locale=DEFAULT_LOCALE ):
	'''
	exports a vis hierarchy preset file - this file contains a node hierarchy which represents a vis hierarchy.
	each empty transform node in the group represents a set, and each volume in the structure represents a face
	selection used to determine vis set membership
	'''
	exportDict = api.writeExportDict(TOOL_NAME, TOOL_VERSION)

	#simply returns the
	def getVolumesAndEmptyGroups( node ):
		children = cmd.listRelatives(node, type='transform', path=True)
		volumes = []
		groups = []
		for child in children:
			shapes = cmd.listRelatives(child, shapes=True, type='nurbsSurface', path=True)
			if shapes:
				type = int( cmd.getAttr('%s.exportVolume' % child) )
				pos = cmd.xform(child, q=True, ws=True, rp=True)
				rot = cmd.xform(child, q=True, ws=True, ro=True)
				scale = cmd.getAttr('%s.s' % child)[0]
				volumes.append( (child, type, pos, rot, scale) )
			else: groups.append( child )

		return volumes, groups

	topVolumes, topGroups = getVolumesAndEmptyGroups(visHierarchyTop)
	parentQueue = [(visHierarchyTop, topGroups)]

	#create the first entry
	toExport = [(visHierarchyTop, None, topVolumes)]

	while True:
		try:
			curNode = topGroups.pop(0)
			curParent = cmd.listRelatives(curNode, p=True)[0]
			curVolumes, curChildren = getVolumesAndEmptyGroups(curNode)
			topGroups.extend(curChildren)
			toExport.append( (curNode, curParent, curVolumes) )
		except IndexError: break

	exportDict['preset'] = toExport
	thePreset = filesystem.Preset(locale, TOOL_NAME, presetName, EXTENSION)
	thePreset.pickle(exportDict)
Exemple #7
0
	def write( self, objects, **kwargs ):
		type = self.getType()
		clipDict = api.writeExportDict( TOOL_NAME, VER )
		clipDict[ kEXPORT_DICT_CLIP_TYPE ] = type
		clipDict[ kEXPORT_DICT_OBJECTS ] = objects
		clipDict[ kEXPORT_DICT_WORLDSPACE ] = False

		theClip = self.TYPE_CLASSES[ type ]()
		success = theClip.generate( objects, **kwargs )

		if not success:
			printErrorStr( "Failed to generate clip!" )
			return

		clipDict[ kEXPORT_DICT_THE_CLIP ] = theClip

		#write the preset file to disk
		self.pickle( clipDict )

		#generate the icon for the clip and add it to perforce if appropriate
		icon = generateIcon( self )
		#icon.asP4().add()

		printInfoStr( "Generated clip!" )
Exemple #8
0
    def write(self, objects, **kwargs):
        type = self.getType()
        clipDict = api.writeExportDict(TOOL_NAME, VER)
        clipDict[kEXPORT_DICT_CLIP_TYPE] = type
        clipDict[kEXPORT_DICT_OBJECTS] = objects
        clipDict[kEXPORT_DICT_WORLDSPACE] = False

        theClip = self.TYPE_CLASSES[type]()
        success = theClip.generate(objects, **kwargs)

        if not success:
            printErrorStr("Failed to generate clip!")
            return

        clipDict[kEXPORT_DICT_THE_CLIP] = theClip

        #write the preset file to disk
        self.pickle(clipDict)

        #generate the icon for the clip and add it to perforce if appropriate
        icon = generateIcon(self)
        #icon.asP4().add()

        printInfoStr("Generated clip!")
	def saveMappingToFile( self, filepath ):
		filepath = Path( filepath ).setExtension( EXT )
		filedata = api.writeExportDict( TOOL_NAME, TOOL_VER ), self.getMapping()
		filepath.pickle( filedata )
Exemple #10
0
def saveWeights(geos, filepath=None):
    start = time.clock()
    miscData = api.writeExportDict(TOOL_NAME, TOOL_VERSION)

    #if filepath is None, then generate a default filepath based on the location of the file
    if filepath is None:
        filepath = getDefaultPath()
    else:
        filepath = Path(filepath)

    geoAndData = {}
    skinPercent = cmd.skinPercent
    xform = cmd.xform

    #define teh data we're gathering
    masterJointList = []
    weightData = []

    #data gathering time!
    rigidBindObjects = []
    for geo in geos:
        skinClusters = cmd.ls(cmd.listHistory(geo), type='skinCluster')
        if len(skinClusters) > 1:
            api.melWarning("more than one skinCluster found on %s" % geo)
            continue

        #so the geo isn't skinned in the traditional way - check to see if it is parented to a joint.  if so,
        #stuff it into the rigid bind list to be dealt with outside this loop, and continue
        if not skinClusters:
            dealtWith = False
            for p in iterParents(geo):
                if cmd.nodeType(p) == 'joint':
                    rigidBindObjects.append((geo, p))
                    masterJointList.append(p)
                    masterJointList = removeDupes(masterJointList)
                    dealtWith = True
                    break

            if not dealtWith:
                msg = "cannot find a skinCluster for %s" % geo
                api.melWarning(msg)

            continue

        skinCluster = skinClusters[0]
        masterJointList += cmd.skinCluster(skinCluster, q=True, inf=True)
        masterJointList = removeDupes(masterJointList)

        verts = cmd.ls(cmd.polyListComponentConversion(geo, toVertex=True),
                       fl=True)
        for idx, vert in enumerate(verts):
            jointList = skinPercent(skinCluster,
                                    vert,
                                    ib=1e-4,
                                    q=True,
                                    transform=None)
            weightList = skinPercent(skinCluster,
                                     vert,
                                     ib=1e-4,
                                     q=True,
                                     value=True)
            if jointList is None:
                raise SkinWeightException(
                    "I can't find any joints - sorry.  do you have any post skin cluster history???"
                )

            pos = xform(vert, q=True, ws=True, t=True)
            vertData = VertSkinWeight(pos)
            vertData.populate(geo, idx,
                              [masterJointList.index(j) for j in jointList],
                              weightList)
            weightData.append(vertData)

    #deal with rigid bind objects
    for geo, j in rigidBindObjects:

        verts = cmd.ls(cmd.polyListComponentConversion(geo, toVertex=True),
                       fl=True)
        for idx, vert in enumerate(verts):
            jIdx = masterJointList.index(j)

            pos = xform(vert, q=True, ws=True, t=True)
            vertData = VertSkinWeight(pos)
            vertData.populate(geo, idx, [jIdx], [1])
            weightData.append(vertData)

    #sort the weightData by ascending x values so we can search faster
    weightData.sort()

    #turn the masterJointList into a dict keyed by index
    joints = {}
    for n, j in enumerate(masterJointList):
        joints[n] = j

    #generate joint hierarchy data - so if joints are missing on load we can find the best match
    jointHierarchies = {}
    for n, j in joints.iteritems():
        jointHierarchies[n] = getAllParents(j)

    toWrite = miscData, joints, jointHierarchies, weightData

    filepath = Path(filepath)
    filepath.pickle(toWrite, False)
    melPrint('Weights Successfully Saved to %s: time taken %.02f seconds' %
             (filepath, time.clock() - start))

    return filepath
	def saveMappingToFile( self, filepath ):
		filepath = Path( filepath ).setExtension( EXT )
		filedata = api.writeExportDict( TOOL_NAME, TOOL_VER ), self.getMapping()
		filepath.pickle( filedata )
def saveWeights( geos, filepath=None, mode=kAPPEND ):
	reportUsageToAuthor()
	start = time.clock()
	miscData = api.writeExportDict(TOOL_NAME, TOOL_VERSION)

	#if filepath is None, then generate a default filepath based on the location of the file
	if filepath is None:
		filepath = getDefaultPath()
	else: filepath = Path(filepath)

	geoAndData = {}
	skinPercent = cmd.skinPercent
	xform = cmd.xform

	#define teh data we're gathering
	joints = {}
	jointHierarchies = {}
	weightData = []

	#does the weight file already exist?  if so, load it up and append data if append mode is true
	if filepath.exists and mode == kAPPEND:
		tmpA, joints, jointHierarchies, weightData = filepath.unpickle()

	#data gathering time!
	for geo in geos:
		geoNode = geo
		verts = cmd.ls(cmd.polyListComponentConversion(geo, toVertex=True), fl=True)
		skinClusters = cmd.ls(cmd.listHistory(geo), type='skinCluster')
		if len(skinClusters) > 1:
			api.melWarning("more than one skinCluster found on %s" % geo)
			continue

		try: skinCluster = skinClusters[0]
		except IndexError:
			msg = "cannot find a skinCluster for %s" % geo
			api.melWarning(msg)
			#raise SkinWeightException(msg)

		for idx, vert in enumerate(verts):
			jointList = skinPercent(skinCluster, vert, ib=1e-4, q=True, transform=None)
			weightList = skinPercent(skinCluster, vert, ib=1e-4, q=True, value=True)
			if jointList is None:
				raise SkinWeightException("I can't find any joints - sorry.  do you have any post skin cluster history???")

			#so this is kinda dumb - but using a dict here we can easily remap on restore if joint names
			#are different by storing the dict's value as the joint to use, and the key as the joint that
			#the vert was originally weighted to
			for j in jointList:
				joints[j] = j

			pos = xform(vert, q=True, ws=True, t=True)
			vertData = VertSkinWeight( pos )
			vertData.populate(geo, idx, jointList, weightList)
			weightData.append(vertData)

		#lastly, add an attribute to the object describing where the weight file exists
		dirOfCurFile = Path(cmd.file(q=True,sn=True)).up()
		if geoNode.find('.') != -1: geoNode = geo.split('.')[0]
		if not cmd.objExists('%s.weightSaveFile' % geoNode):
			cmd.addAttr(geoNode, ln='weightSaveFile', dt='string')

		relToCur = filepath.relativeTo(dirOfCurFile)
		if relToCur is None: relToCur = filepath
		cmd.setAttr('%s.weightSaveFile' % geoNode, relToCur.asfile(), type='string')

	#sort the weightData by ascending x values so we can search faster
	weightData = sortByIdx(weightData)

	#generate joint hierarchy data - so if joints are missing on load we can find the best match
	for j in joints.keys():
		jointHierarchies[j] = getAllParents(j)

	toWrite = miscData, joints, jointHierarchies, weightData

	filepath = Path(filepath)
	filepath.pickle(toWrite, False)
	melPrint('Weights Successfully %s to %s: time taken %.02f seconds' % ('Saved' if mode == kREPLACE else 'Appended', filepath.resolve(), time.clock()-start))

	return filepath
def saveWeights( geos, filepath=None ):
	start = time.clock()
	miscData = api.writeExportDict(TOOL_NAME, TOOL_VERSION)

	#if filepath is None, then generate a default filepath based on the location of the file
	if filepath is None:
		filepath = getDefaultPath()
	else: filepath = Path(filepath)

	geoAndData = {}
	skinPercent = cmd.skinPercent
	xform = cmd.xform

	#define teh data we're gathering
	masterJointList = []
	weightData = []

	#data gathering time!
	rigidBindObjects = []
	for geo in geos:
		skinClusters = cmd.ls( cmd.listHistory( geo ), type='skinCluster' )
		if len( skinClusters ) > 1:
			api.melWarning("more than one skinCluster found on %s" % geo)
			continue

		#so the geo isn't skinned in the traditional way - check to see if it is parented to a joint.  if so,
		#stuff it into the rigid bind list to be dealt with outside this loop, and continue
		if not skinClusters:
			dealtWith = False
			for p in iterParents( geo ):
				if cmd.nodeType( p ) == 'joint':
					rigidBindObjects.append( (geo, p) )
					masterJointList.append( p )
					masterJointList = removeDupes( masterJointList )
					dealtWith = True
					break

			if not dealtWith:
				msg = "cannot find a skinCluster for %s" % geo
				api.melWarning(msg)

			continue

		skinCluster = skinClusters[ 0 ]
		masterJointList += cmd.skinCluster( skinCluster, q=True, inf=True )
		masterJointList = removeDupes( masterJointList )

		verts = cmd.ls(cmd.polyListComponentConversion(geo, toVertex=True), fl=True)
		for idx, vert in enumerate(verts):
			jointList = skinPercent(skinCluster, vert, ib=1e-4, q=True, transform=None)
			weightList = skinPercent(skinCluster, vert, ib=1e-4, q=True, value=True)
			if jointList is None:
				raise SkinWeightException("I can't find any joints - sorry.  do you have any post skin cluster history???")

			pos = xform(vert, q=True, ws=True, t=True)
			vertData = VertSkinWeight( pos )
			vertData.populate( geo, idx, [ masterJointList.index( j ) for j in jointList ], weightList )
			weightData.append( vertData )


	#deal with rigid bind objects
	for geo, j in rigidBindObjects:

		verts = cmd.ls( cmd.polyListComponentConversion(geo, toVertex=True), fl=True )
		for idx, vert in enumerate( verts ):
			jIdx = masterJointList.index( j )

			pos = xform( vert, q=True, ws=True, t=True )
			vertData = VertSkinWeight( pos )
			vertData.populate( geo, idx, [jIdx], [1] )
			weightData.append( vertData )


	#sort the weightData by ascending x values so we can search faster
	weightData.sort()

	#turn the masterJointList into a dict keyed by index
	joints = {}
	for n, j in enumerate( masterJointList ):
		joints[ n ] = j

	#generate joint hierarchy data - so if joints are missing on load we can find the best match
	jointHierarchies = {}
	for n, j in joints.iteritems():
		jointHierarchies[ n ] = getAllParents( j )

	toWrite = miscData, joints, jointHierarchies, weightData

	filepath = Path( filepath )
	filepath.pickle( toWrite, False )
	melPrint( 'Weights Successfully Saved to %s: time taken %.02f seconds' % (filepath, time.clock()-start) )

	return filepath
def saveWeights(geos, filepath=None, mode=kAPPEND):
    reportUsageToAuthor()
    start = time.clock()
    miscData = api.writeExportDict(TOOL_NAME, TOOL_VERSION)

    #if filepath is None, then generate a default filepath based on the location of the file
    if filepath is None:
        filepath = getDefaultPath()
    else:
        filepath = Path(filepath)

    geoAndData = {}
    skinPercent = cmd.skinPercent
    xform = cmd.xform

    #define teh data we're gathering
    joints = {}
    jointHierarchies = {}
    weightData = []

    #does the weight file already exist?  if so, load it up and append data if append mode is true
    if filepath.exists and mode == kAPPEND:
        tmpA, joints, jointHierarchies, weightData = filepath.unpickle()

    #data gathering time!
    for geo in geos:
        geoNode = geo
        verts = cmd.ls(cmd.polyListComponentConversion(geo, toVertex=True),
                       fl=True)
        skinClusters = cmd.ls(cmd.listHistory(geo), type='skinCluster')
        if len(skinClusters) > 1:
            api.melWarning("more than one skinCluster found on %s" % geo)
            continue

        try:
            skinCluster = skinClusters[0]
        except IndexError:
            msg = "cannot find a skinCluster for %s" % geo
            api.melWarning(msg)
            #raise SkinWeightException(msg)

        for idx, vert in enumerate(verts):
            jointList = skinPercent(skinCluster,
                                    vert,
                                    ib=1e-4,
                                    q=True,
                                    transform=None)
            weightList = skinPercent(skinCluster,
                                     vert,
                                     ib=1e-4,
                                     q=True,
                                     value=True)
            if jointList is None:
                raise SkinWeightException(
                    "I can't find any joints - sorry.  do you have any post skin cluster history???"
                )

            #so this is kinda dumb - but using a dict here we can easily remap on restore if joint names
            #are different by storing the dict's value as the joint to use, and the key as the joint that
            #the vert was originally weighted to
            for j in jointList:
                joints[j] = j

            pos = xform(vert, q=True, ws=True, t=True)
            vertData = VertSkinWeight(pos)
            vertData.populate(geo, idx, jointList, weightList)
            weightData.append(vertData)

        #lastly, add an attribute to the object describing where the weight file exists
        dirOfCurFile = Path(cmd.file(q=True, sn=True)).up()
        if geoNode.find('.') != -1: geoNode = geo.split('.')[0]
        if not cmd.objExists('%s.weightSaveFile' % geoNode):
            cmd.addAttr(geoNode, ln='weightSaveFile', dt='string')

        relToCur = filepath.relativeTo(dirOfCurFile)
        if relToCur is None: relToCur = filepath
        cmd.setAttr('%s.weightSaveFile' % geoNode,
                    relToCur.asfile(),
                    type='string')

    #sort the weightData by ascending x values so we can search faster
    weightData = sortByIdx(weightData)

    #generate joint hierarchy data - so if joints are missing on load we can find the best match
    for j in joints.keys():
        jointHierarchies[j] = getAllParents(j)

    toWrite = miscData, joints, jointHierarchies, weightData

    filepath = Path(filepath)
    filepath.pickle(toWrite, False)
    melPrint('Weights Successfully %s to %s: time taken %.02f seconds' %
             ('Saved' if mode == kREPLACE else 'Appended', filepath.resolve(),
              time.clock() - start))

    return filepath