def test_AnchorRelativePath(self): r = Ar.GetResolver() self.assertEqual('', r.AnchorRelativePath('', '')) self.assertEqual('RelPath', r.AnchorRelativePath('', 'RelPath')) self.assertEqual('', r.AnchorRelativePath('RelAnchor', '')) self.assertEqual('RelPath', r.AnchorRelativePath('RelAnchor', 'RelPath')) self.assertEqual( '/AbsolutePath', r.AnchorRelativePath('/AbsoluteAnchor', '/AbsolutePath')) self.assertEqual( '/AbsoluteAnchor/Subdir/FileRel.txt', r.AnchorRelativePath('/AbsoluteAnchor/ParentFile.txt', './Subdir/FileRel.txt')) self.assertEqual( '/AbsoluteAnchor/Subdir/FileRel.txt', r.AnchorRelativePath('/AbsoluteAnchor/ParentDir/ParentFile.txt', '../Subdir/FileRel.txt'))
def GetResolverContext(self, usdFile): """ Create and return the ArResolverContext that will be used to Open the Stage for the given usdFile. Base implementation creates a default asset context for the usdFile asset, but derived classes can do more sophisticated resolver and context configuration. Will be called each time a new stage is opened. It is not necessary to create an ArResolverContext for every UsdStage one opens, as the Stage will use reasonable fallback behavior if no context is provided. For usdview, configuring an asset context by default is reasonable, and allows clients that embed usdview to achieve different behavior when needed. """ from pxr import Ar r = Ar.GetResolver() r.ConfigureResolverForAsset(usdFile) return r.CreateDefaultContextForAsset(usdFile)
def test_ResolveSearchPaths(self): testDir = os.path.abspath('test1/test2') if os.path.isdir(testDir): shutil.rmtree(testDir) os.makedirs(testDir) testFileName = 'test_ResolveWithContext.txt' testFilePath = os.path.join(testDir, testFileName) with open(testFilePath, 'w') as ofp: print >> ofp, 'Garbage' resolver = Ar.GetResolver() self.assertPathsEqual( os.path.abspath('test1/test2/test_ResolveWithContext.txt'), resolver.Resolve('test2/test_ResolveWithContext.txt')) self.assertPathsEqual( os.path.abspath('test1/test2/test_ResolveWithContext.txt'), resolver.Resolve('test_ResolveWithContext.txt'))
def test_StageIds(self): # Create a cache with multiple stages, try finding by various elements. cache = Usd.StageCache() # sameRoot1 and sameRoot2 share root layers. sameRoot1 = Usd.Stage.CreateInMemory() sameRoot2 = Usd.Stage.Open(sameRoot1.GetRootLayer()) # same1 and same2 share both root and session layers and have null path # resolver contexts same1 = Usd.Stage.CreateInMemory() same2 = Usd.Stage.Open(same1.GetRootLayer(), same1.GetSessionLayer()) # prDiff1 and prDiff2 share root and session layers, but have different path # resolver contexts. prDiff1 = Usd.Stage.CreateInMemory() assetFile = 'testUsdStageCache/asset.usd' prDiff2 = Usd.Stage.Open( prDiff1.GetRootLayer(), prDiff1.GetSessionLayer(), Ar.GetResolver().CreateDefaultContextForAsset(assetFile)) # Create a cache and insert all the above stages. allStages = [sameRoot1, sameRoot2, same1, same2, prDiff1, prDiff2] cache = Usd.StageCache() ids = list(map(cache.Insert, allStages)) assert all(ids) for stage, i in zip(allStages, ids): assert cache.GetId(stage) == i assert cache.Find(i) == stage # round trip from/to string and int. assert cache.Find(Usd.StageCache.Id.FromLongInt( i.ToLongInt())) == stage assert cache.Find(Usd.StageCache.Id.FromString( i.ToString())) == stage assert all(map(cache.Erase, ids)) assert cache.IsEmpty() and cache.Size() == 0
def test_RefreshContext(self): resolver = Ar.GetResolver() self.writeVersionsDict( "refreshContext.json", { "Buzz" : "1" }) ctx = UsdResolverExample.ResolverContext("refreshContext.json") with Ar.ResolverContextBinder(ctx): self.assertEqual( resolver.Resolve("asset:Buzz/{$VERSION}/Buzz.usd"), Ar.ResolvedPath("asset:Buzz/1/Buzz.usd")) self.writeVersionsDict( "refreshContext.json", { "Buzz" : "latest" }) with Ar.ResolverContextBinder(ctx): self.assertEqual( resolver.Resolve("asset:Buzz/{$VERSION}/Buzz.usd"), Ar.ResolvedPath("asset:Buzz/1/Buzz.usd")) class _Listener(object): def __init__(self): self._key = Tf.Notice.RegisterGlobally( Ar.Notice.ResolverChanged, self._HandleNotice) self.receivedNotice = False def _HandleNotice(self, notice, sender): self.receivedNotice = True l = _Listener() resolver.RefreshContext(ctx) self.assertTrue(l.receivedNotice) with Ar.ResolverContextBinder(ctx): self.assertEqual( resolver.Resolve("asset:Buzz/{$VERSION}/Buzz.usd"), Ar.ResolvedPath("asset:Buzz/latest/Buzz.usd"))
def InitializeResolver(): """Initialize the resolver so that search paths pointing to schema.usda files are resolved to the directories where those files are installed""" from pxr import Ar, Plug # Force the use of the ArDefaultResolver so we can take advantage # of its search path functionality. Ar.SetPreferredResolver('ArDefaultResolver') # Figure out where all the plugins that provide schemas are located # and add their resource directories to the search path prefix list. resourcePaths = set() pr = Plug.Registry() for t in pr.GetAllDerivedTypes('UsdSchemaBase'): plugin = pr.GetPluginForType(t) if plugin: resourcePaths.add(plugin.resourcePath) # The sorting shouldn't matter here, but we do it for consistency # across runs. Ar.DefaultResolver.SetDefaultSearchPath(sorted(list(resourcePaths)))
def test_JoinPackageRelativePath(self): """Test Ar.JoinPackageRelativePath""" self.assertEqual(Ar.JoinPackageRelativePath([""]), "") self.assertEqual(Ar.JoinPackageRelativePath(["foo.pack"]), "foo.pack") self.assertEqual(Ar.JoinPackageRelativePath(["foo.pack", ""]), "foo.pack") self.assertEqual(Ar.JoinPackageRelativePath(["foo.pack", "bar.file"]), "foo.pack[bar.file]") self.assertEqual( Ar.JoinPackageRelativePath(["foo.pack", "bar.pack", "baz.file"]), "foo.pack[bar.pack[baz.file]]") self.assertEqual( Ar.JoinPackageRelativePath(["foo.pack[bar.pack]", "baz.file"]), "foo.pack[bar.pack[baz.file]]") self.assertEqual( Ar.JoinPackageRelativePath(["foo[0].pack", "baz.file"]), "foo[0].pack[baz.file]") # Corner case: ensure delimiter characters in paths are escaped # when enclosed in delimiters by Ar.JoinPackageRelativePath self.assertEqual( Ar.JoinPackageRelativePath( ["foo]a.pack", "bar[b.pack", "baz]c.file"]), "foo]a.pack[bar\\[b.pack[baz\\]c.file]]")
def test_Resolve(self): resolver = Ar.GetResolver() # The test URI resolver handles asset paths of the form "test:..." # and simply returns the path unchanged. We can use this to # verify that our test URI resolver is getting invoked. # These calls to Resolve should hit the default resolver and not # the URI resolver, and since these files don't exist we expect # Resolve would return "" self.assertEqual(resolver.Resolve("doesnotexist"), "") self.assertEqual(resolver.Resolve("doesnotexist.package[foo.file]"), "") # These calls should hit the URI resolver, which should return the # given paths unchanged. self.assertEqual(resolver.Resolve("test://foo"), "test://foo") self.assertEqual(resolver.Resolve("test://foo.package[bar.file]"), "test://foo.package[bar.file]") self.assertEqual(resolver.Resolve("test_other://foo"), "test_other://foo") self.assertEqual( resolver.Resolve("test_other://foo.package[bar.file]"), "test_other://foo.package[bar.file]") # These calls should hit the URI resolver since schemes are # case-insensitive. self.assertEqual(resolver.Resolve("TEST://foo"), "TEST://foo") self.assertEqual(resolver.Resolve("TEST://foo.package[bar.file]"), "TEST://foo.package[bar.file]") self.assertEqual(resolver.Resolve("TEST_OTHER://foo"), "TEST_OTHER://foo") self.assertEqual( resolver.Resolve("TEST_OTHER://foo.package[bar.file]"), "TEST_OTHER://foo.package[bar.file]")
def expandPath(path, parentPath=None, sdf_format_args=None): """ Expand and normalize a path that may have variables in it. Do not use this for URLs with query strings. :Parameters: path : `str` File path parentPath : `str` | None Parent file path this file is defined in relation to. Helps with asset resolution. sdf_format_args : `dict` | None Dictionary of key/value `str` pairs from a path's :SDF_FORMAT_ARGS: :Returns: Normalized path with variables expanded. :Rtype: `str` """ path = os.path.expanduser(os.path.normpath(path)) if resolver is not None: try: resolver.ConfigureResolverForAsset(path) context = resolver.CreateDefaultContextForAsset(path) with Ar.ResolverContextBinder(context): anchoredPath = path if parentPath is None else resolver.AnchorRelativePath( parentPath, path) resolved = resolver.Resolve(anchoredPath) except Exception: logger.warn( "Failed to resolve Asset path {} with parent {}".format( path, parentPath)) else: if resolved: return resolved # Return this best-attempt if all else fails. return os.path.expandvars(path)
def test_SplitPackageRelativePathOuter(self): """Test Ar.SplitPackageRelativePathOuter""" self.assertEqual(Ar.SplitPackageRelativePathOuter(""), ("", "")) self.assertEqual(Ar.SplitPackageRelativePathOuter("foo.file"), ("foo.file", "")) self.assertEqual( Ar.SplitPackageRelativePathOuter("foo.pack[bar.file]"), ("foo.pack", "bar.file")) self.assertEqual( Ar.SplitPackageRelativePathOuter("foo.pack[bar.pack[baz.file]]"), ("foo.pack", "bar.pack[baz.file]")) self.assertEqual( Ar.SplitPackageRelativePathOuter("foo[0].pack[bar.file]"), ("foo[0].pack", "bar.file")) # Corner case: ensure delimiter characters in paths are unescaped # when removed from delimiters by Ar.SplitPackageRelativePathOuter. self.assertEqual( Ar.SplitPackageRelativePathOuter( "foo]a.pack[bar\\[b.pack[baz\\]c.file]]"), ("foo]a.pack", "bar[b.pack[baz\\]c.file]"))
def _findFiles(args): '''Return a 3-tuple of lists: (baseline-only, matching, comparison-only). baseline-only and comparison-only are lists of individual files, while matching is a list of corresponding pairs of files.''' import os import stat from pxr import Ar join = os.path.join basename = os.path.basename exists = os.path.exists def listFiles(dirpath): ret = [] for root, _, files in os.walk(dirpath): ret += [ os.path.relpath(join(root, file), dirpath) for file in files ] return set(ret) # Must have FILE FILE, DIR DIR, DIR FILES... or FILES... DIR. err = ValueError("Error: File arguments must be one of: " "FILE FILE, DIR DIR, DIR FILES..., or FILES... DIR.") if len(args) < 2: raise err # For speed, since filestats can be slow, stat all args once, then reuse that # to determine isdir/isfile resolver = Ar.GetResolver() stats = [] for arg in args: try: st = os.stat(arg) except (OSError, IOError): if not resolver.Resolve(arg): raise ValueError("Error: %s does not exist, and cannot be " "resolved" % arg) st = None stats.append(st) def isdir(st): return st and stat.S_ISDIR(st.st_mode) # if any of the directory forms are used, no paths may be unresolved assets def validateFiles(): for i, st in enumerate(stats): if st is None: raise ValueError( "Error: %s did not exist on disk, and using a " "directory comparison form" % args[i]) # DIR FILES... if isdir(stats[0]) and not any(map(isdir, stats[1:])): validateFiles() dirpath = args[0] files = set(map(os.path.relpath, args[1:])) dirfiles = listFiles(dirpath) return ([], [(join(dirpath, p), p) for p in files & dirfiles], [p for p in files - dirfiles]) # FILES... DIR elif not any(map(isdir, stats[:-1])) and isdir(stats[-1]): validateFiles() dirpath = args[-1] files = set(map(os.path.relpath, args[:-1])) dirfiles = listFiles(dirpath) return ([p for p in files - dirfiles], [(p, join(dirpath, p)) for p in files & dirfiles], []) # FILE FILE or DIR DIR elif len(args) == 2: # DIR DIR if all(map(isdir, stats)): ldir, rdir = args[0], args[1] lhs, rhs = map(listFiles, args) return ( # baseline only sorted([join(ldir, p) for p in lhs - rhs]), # corresponding sorted([(join(ldir, p), join(rdir, p)) for p in lhs & rhs]), # comparison only sorted([join(rdir, p) for p in rhs - lhs])) # FILE FILE elif not any(map(isdir, stats)): return ([], [(args[0], args[1])], []) raise err
if Qt.IsPySide: import pysideuic as uic elif Qt.IsPySide2: import pyside2uic as uic else: uic = Qt._uic from .constants import USD_EXTS # Set up logging. logger = logging.getLogger(__name__) logging.basicConfig() try: from pxr import Ar resolver = Ar.GetResolver() except ImportError: logger.warn( "Unable to create AssetResolver - Asset links may not work correctly") resolver = None def expandPath(path, parentPath=None, sdf_format_args=None): """ Expand and normalize a path that may have variables in it. Do not use this for URLs with query strings. :Parameters: path : `str` File path parentPath : `str` | None Parent file path this file is defined in relation to.
def test(usdfile): print 'test'.center(40, '-') stage_ref = Usd.Stage.Open(usdfile) for prim_ref in stage_ref.Traverse(): print(prim_ref.GetPath()) if prim_ref.HasPayload(): print 'payloads'.center(40, '-') # this is apparently hacky, but it works, yah? payloads = prim_ref.GetMetadata("payload") # so there's lots of lists for x in dir(payloads): if x.endswith('Items'): print x, getattr(payloads, x) for payload in payloads.appendedItems: pathToResolve = payload.assetPath print 'assetPath:', pathToResolve primSpec = prim_ref.GetPrimStack()[0] # get the layer from the prim anchorPath = primSpec.layer.identifier with Ar.ResolverContextBinder( stage_ref.GetPathResolverContext()): resolver = Ar.GetResolver() # relative to layer path? pathToResolve = resolver.AnchorRelativePath( anchorPath, pathToResolve) print 'pathToResolve', pathToResolve # this should probably work, but no resolvedPath = resolver.Resolve(pathToResolve) print 'resolvedPath', resolvedPath if prim_ref.HasAuthoredPayloads(): payloads = prim_ref.GetPayloads() # print payloads """ There is currently no facility for listing the currently authored payloads on a prim... the problem is somewhat ill-defined, and requires some thought. """ # does this prim have variant sets? if prim_ref.HasVariantSets(): print 'variantsets'.center(30, '-') # list all the variant sets avalable on this prim sets = prim_ref.GetVariantSets() # you can't iterate over the sets. # you have to get the name and do a GetVariantSet(<<set name>>) # TypeError: 'VariantSets' object is not iterable # maybe USD 20? for varset in sets.GetNames(): print 'variant set name:', varset # get the variant set by name thisvarset = prim_ref.GetVariantSet(varset) # the available variants print thisvarset.GetVariantNames() # the current variant print thisvarset.GetVariantSelection() print varset # gotta get a clip on each prim and then test it for paths? clips = Usd.ClipsAPI(prim_ref) if clips.GetClipAssetPaths(): print 'CLIPS'.center(30, '-') # dict of clip info. full of everything # key is the clip *name* print clips.GetClips() # this is a good one - resolved asset paths too for path in clips.GetClipAssetPaths(): print path, type(path) print path.resolvedPath print 'end test'.center(40, '-')
def _IsPackageOrPackagedLayer(layer): return layer.GetFileFormat().IsPackage() or \ Ar.IsPackageRelativePath(layer.identifier)
def _getAbsPath(self, path): path = str(path) resolver = Ar.GetResolver() absLayerPath = resolver.AnchorRelativePath(self.layer.realPath, path) return absLayerPath
def _RP(path = None): return Ar.ResolvedPath(os.path.abspath(path or ""))
def GetFlattenedUsdData(filePath): from pxr import Ar, Usd Ar.GetResolver().ConfigureResolverForAsset(filePath) stage = Usd.Stage.Open(filePath) assert stage, 'Failed to open %s' % filePath return stage
def test_ExplicitConstruction(self): # Passing in None or an empty tuple or list to Ar.ResolverContext # should all result in creating an empty Ar.ResolverContext. self.assertEqual(Ar.ResolverContext(None), Ar.ResolverContext()) self.assertEqual(Ar.ResolverContext(()), Ar.ResolverContext()) self.assertEqual(Ar.ResolverContext([]), Ar.ResolverContext()) # Passing in a Python-wrapped context object or tuple or list # containing such objects should create an Ar.ResolverContext holding # these objects. ctxObj = Ar.DefaultResolverContext(["/test"]) ctx = Ar.ResolverContext(ctxObj) self.assertEqual(Ar.ResolverContext(ctxObj), ctx) self.assertEqual(Ar.ResolverContext((ctxObj, )), ctx) self.assertEqual(Ar.ResolverContext([ ctxObj, ]), ctx) # Passing in an object that hasn't been registered as a context object # should result in an error. with self.assertRaises(TypeError): ctx = Ar.ResolverContext(1) with self.assertRaises(TypeError): ctx = Ar.ResolverContext((1, )) with self.assertRaises(TypeError): ctx = Ar.ResolverContext([ 1, ]) with self.assertRaises(TypeError): ctx = Ar.ResolverContext((1, ctxObj)) with self.assertRaises(TypeError): ctx = Ar.ResolverContext([1, ctxObj])
def test_Get(self): ctxObj = Ar.DefaultResolverContext(["/test"]) ctx = Ar.ResolverContext(ctxObj) self.assertEqual(ctx.Get(), [ctxObj])
def GetFlattenedLayerStack(filePath): from pxr import Ar, Sdf, Pcp, Usd, UsdUtils Ar.GetResolver().ConfigureResolverForAsset(filePath) stage = Usd.Stage.Open(filePath, Usd.Stage.LoadNone) return UsdUtils.FlattenLayerStack(stage)
def main(): parser = argparse.ArgumentParser( description='Utility for creating a .usdz ' 'file containging USD assets and for inspecting existing .usdz files.') parser.add_argument('usdzFile', type=str, nargs='?', help='Name of the .usdz file to create or to inspect ' 'the contents of.') parser.add_argument('inputFiles', type=str, nargs='*', help='Files to include in the .usdz file.') parser.add_argument('-r', '--recurse', dest='recurse', action='store_true', help='If specified, files in sub-directories are ' 'recursively added to the package.') parser.add_argument( '-a', '--asset', dest='asset', type=str, help='Resolvable asset path pointing to the root layer ' 'of the asset to be isolated and copied into the ' 'package.') parser.add_argument("--arkitAsset", dest="arkitAsset", type=str, help="Similar to the --asset option, the --arkitAsset " "option packages all of the dependencies of the named " "scene file. Assets targeted at the initial usdz " "implementation in ARKit operate under greater " "constraints than usdz files for more general 'in " "house' uses, and this option attempts to ensure that " "these constraints are honored; this may involve more " "transformations to the data, which may cause loss of " "features such as VariantSets.") parser.add_argument( '-c', '--checkCompliance', dest='checkCompliance', action='store_true', help='Perform compliance checking ' 'of the input files. If the input asset or \"root\" ' 'layer fails any of the compliance checks, the package ' 'is not created and the program fails.') parser.add_argument( '-l', '--list', dest='listTarget', type=str, nargs='?', default=None, const='-', help='List contents of the specified usdz file. If ' 'a file-path argument is provided, the list is output ' 'to a file at the given path. If no argument is ' 'provided or if \'-\' is specified as the argument, the' ' list is output to stdout.') parser.add_argument( '-d', '--dump', dest='dumpTarget', type=str, nargs='?', default=None, const='-', help='Dump contents of the specified usdz file. If ' 'a file-path argument is provided, the contents are ' 'output to a file at the given path. If no argument is ' 'provided or if \'-\' is specified as the argument, the' ' contents are output to stdout.') parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Enable verbose mode, which causes messages ' 'regarding files being added to the package to be ' 'output to stdout.') args = parser.parse_args() usdzFile = args.usdzFile inputFiles = args.inputFiles if args.asset and args.arkitAsset: parser.error("Specify either --asset or --arkitAsset, not both.") elif (args.arkitAsset or args.asset) and len(inputFiles) > 0: parser.error("Specify either inputFiles or an asset (via --asset or " "--arkitAsset, not both.") # If usdzFile is not specified directly as an argument, check if it has been # specified as an argument to the --list or --dump options. In these cases, # output the list or the contents to stdout. if not usdzFile: if args.listTarget and args.listTarget != '-' and \ args.listTarget.endswith('.usdz') and \ os.path.exists(args.listTarget): usdzFile = args.listTarget args.listTarget = '-' elif args.dumpTarget and args.dumpTarget != '-' and \ args.dumpTarget.endswith('.usdz') and \ os.path.exists(args.dumpTarget): usdzFile = args.dumpTarget args.dumpTarget = '-' else: parser.error("No usdz file specified!") # Check if we're in package creation mode and verbose mode is enabled, # print some useful information. if (args.asset or args.arkitAsset or len(inputFiles) > 0): # Ensure that the usdz file has the right extension. if not usdzFile.endswith('.usdz'): usdzFile += '.usdz' if args.verbose: if os.path.exists(usdzFile): print("File at path '%s' already exists. Overwriting file." % usdzFile) if args.inputFiles: print('Creating package \'%s\' with files %s.' % (usdzFile, inputFiles)) if args.asset or args.arkitAsset: Tf.Debug.SetDebugSymbolsByName("USDUTILS_CREATE_USDZ_PACKAGE", 1) if not args.recurse: print('Not recursing into sub-directories.') else: if args.checkCompliance: parser.error( "--checkCompliance should only be specified when " "creatinga usdz package. Please use 'usdchecker' to check " "compliance of an existing .usdz file.") success = True if len(inputFiles) > 0: success = _CreateUsdzPackage(usdzFile, inputFiles, args.recurse, args.checkCompliance, args.verbose) and success elif args.asset: r = Ar.GetResolver() resolvedAsset = r.Resolve(args.asset) if args.checkCompliance: success = _CheckCompliance(resolvedAsset, arkit=False) and success context = r.CreateDefaultContextForAsset(resolvedAsset) with Ar.ResolverContextBinder(context): # Create the package only if the compliance check was passed. success = success and UsdUtils.CreateNewUsdzPackage( Sdf.AssetPath(args.asset), usdzFile) elif args.arkitAsset: r = Ar.GetResolver() resolvedAsset = r.Resolve(args.arkitAsset) if args.checkCompliance: success = _CheckCompliance(resolvedAsset, arkit=True) and success context = r.CreateDefaultContextForAsset(resolvedAsset) with Ar.ResolverContextBinder(context): # Create the package only if the compliance check was passed. success = success and UsdUtils.CreateNewARKitUsdzPackage( Sdf.AssetPath(args.arkitAsset), usdzFile) if args.listTarget or args.dumpTarget: if os.path.exists(usdzFile): zipFile = Usd.ZipFile.Open(usdzFile) if zipFile: if args.dumpTarget: if args.dumpTarget == usdzFile: _Err("The file into which to dump the contents of the " "usdz file '%s' must be different from the file " "itself." % usdzFile) return 1 _DumpContents(args.dumpTarget, zipFile) if args.listTarget: if args.listTarget == usdzFile: _Err("The file into which to list the contents of the " "usdz file '%s' must be different from the file " "itself." % usdzFile) return 1 _ListContents(args.listTarget, zipFile) else: _Err("Failed to open usdz file at path '%s'." % usdzFile) else: _Err("Can't find usdz file at path '%s'." % usdzFile) return 0 if success else 1
def main(): import argparse parser = argparse.ArgumentParser( prog=os.path.basename(sys.argv[0]), description='Convert a usd-readable file to the usd ascii format in \n' 'a temporary location and invoke an editor on it. After \n' 'saving and quitting the editor, the edited file will be \n' 'converted back to the original format and OVERWRITE the \n' 'original file, unless you supply the "-n" (--noeffect) flag, \n' 'in which case no changes will be saved back to the original ' 'file. \n' 'The editor to use will be looked up as follows: \n' ' - USD_EDITOR environment variable \n' ' - EDITOR environment variable \n' ' - emacs in PATH \n' ' - vim in PATH \n' ' - notepad in PATH \n' '\n\n') parser.add_argument('-n', '--noeffect', dest='readOnly', action='store_true', help='Do not edit the file.') parser.add_argument('-f', '--forcewrite', dest='forceWrite', action='store_true', help='Override file permissions to allow writing.') parser.add_argument('-p', '--prefix', dest='prefix', action='store', type=str, default=None, help='Provide a prefix for the temporary file name.') parser.add_argument('usdFileName', help='The usd file to edit.') results = parser.parse_args() # pull args from result map so we don't need to write result. for each readOnly, forceWrite, usdFileName, prefix = (results.readOnly, results.forceWrite, results.usdFileName, results.prefix) # verify our usd file exists, and permissions args are sane if readOnly and forceWrite: sys.exit("Error: Cannot set read only(-n) and force " " write(-f) together.") from pxr import Ar resolvedPath = Ar.GetResolver().Resolve(usdFileName) if not resolvedPath: sys.exit("Error: Cannot find file %s" % usdFileName) # Layers in packages cannot be written using the Sdf API. from pxr import Ar, Sdf (package, packaged) = Ar.SplitPackageRelativePathOuter(resolvedPath) extension = Sdf.FileFormat.GetFileExtension(package) fileFormat = Sdf.FileFormat.FindByExtension(extension) if not fileFormat: sys.exit("Error: Unknown file format") if fileFormat.IsPackage(): print("Warning: Edits cannot be saved to layers in %s files. " "Starting in no-effect mode." % extension) readOnly = True forceWrite = False writable = os.path.isfile(usdFileName) and os.access(usdFileName, os.W_OK) if not (writable or readOnly or forceWrite): sys.exit("Error: File isn't writable, and " "readOnly(-n)/forceWrite(-f) haven't been marked.") # ensure we have both a text editor and usdcat available usdcatCmd, editorCmd = _findEditorTools(usdFileName, readOnly) # generate our temporary file with proper permissions and edit. usdaFile, usdaFileName = _generateTemporaryFile(usdcatCmd, usdFileName, readOnly, prefix) tempFileChanged = _editTemporaryFile(editorCmd, usdaFileName) if (not readOnly or forceWrite) and tempFileChanged: # note that we need not overwrite usdFileName's write permissions # because we will be creating a new layer at that path. if not _writeOutChanges(temporaryFileName=usdaFileName, permanentFileName=usdFileName): sys.exit("Error: Unable to save edits back to the original file %s" ". Your edits can be found in %s. " \ %(usdFileName, usdaFileName)) os.close(usdaFile) os.remove(usdaFileName)
def main(): parser = argparse.ArgumentParser( description= '''Writes the tree structure of a USD file. The default is to inspect a single USD file. Use the --flatten argument to see the flattened (or composed) Stage tree. Special metadata "kind" and "active" are always shown if authored unless --simple is provided.''' ) parser.add_argument('inputPath') parser.add_argument('--unloaded', action='store_true', dest='unloaded', help='Do not load payloads') parser.add_argument('--attributes', '-a', action='store_true', dest='attributes', help='Display authored attributes') parser.add_argument( '--metadata', '-m', action='store_true', dest='metadata', help= 'Display authored metadata (active and kind are part of the label and not shown as individual items)' ) parser.add_argument( '--simple', '-s', action='store_true', dest='simple', help='Only display prim names: no specifier, kind or active state.') parser.add_argument( '--flatten', '-f', action='store_true', help='Compose the stage with the ' 'input file as root layer and write the flattened content.') parser.add_argument( '--flattenLayerStack', action='store_true', help='Flatten the layer stack with the given root layer. ' 'Unlike --flatten, this does not flatten composition arcs (such as references).' ) parser.add_argument('--mask', action='store', dest='populationMask', metavar='PRIMPATH[,PRIMPATH...]', help='Limit stage population to these prims, ' 'their descendants and ancestors. To specify ' 'multiple paths, either use commas with no spaces ' 'or quote the argument and separate paths by ' 'commas and/or spaces. Requires --flatten.') args = parser.parse_args() # split args.populationMask into paths. if args.populationMask: if not args.flatten: # You can only mask a stage, not a layer. _Err("%s: error: --mask requires --flatten" % parser.prog) return 1 args.populationMask = args.populationMask.replace(',', ' ').split() from pxr import Ar resolver = Ar.GetResolver() try: resolver.ConfigureResolverForAsset(args.inputPath) resolverContext = resolver.CreateDefaultContextForAsset(args.inputPath) with Ar.ResolverContextBinder(resolverContext): resolved = resolver.Resolve(args.inputPath) if not resolved or not os.path.exists(resolved): _Err('Cannot resolve inputPath %r' % resolved) return 1 PrintTree(args, resolved) except Exception as e: _Err("Failed to process '%s' - %s" % (args.inputPath, e)) return 1 return 0
def test_ImplicitConversion(self): """Test implicit conversion of a Python-wrapped context object when passed to a C++ function that takes an ArResolverContext.""" # Passing in None or an empty tuple or list should implicitly # convert to an empty Ar.ResolverContext() self.assertEqual(Ar._TestImplicitConversion(None), Ar.ResolverContext()) self.assertEqual(Ar._TestImplicitConversion(()), Ar.ResolverContext()) self.assertEqual(Ar._TestImplicitConversion([]), Ar.ResolverContext()) # Passing in a Python-wrapped context object or tuple or list # containing such objects should implicitly convert to an # Ar.ResolverContext holding these objects. ctxObj = Ar.DefaultResolverContext(["/test"]) ctx = Ar.ResolverContext(ctxObj) self.assertEqual(Ar._TestImplicitConversion(ctxObj), ctx) self.assertEqual(Ar._TestImplicitConversion((ctxObj, )), ctx) self.assertEqual(Ar._TestImplicitConversion([ ctxObj, ]), ctx) # Passing in an object that hasn't been registered as a context object # should result in an error. with self.assertRaises(TypeError): ctx = Ar._TestImplicitConversion(1) with self.assertRaises(TypeError): ctx = Ar._TestImplicitConversion((1, )) with self.assertRaises(TypeError): ctx = Ar._TestImplicitConversion([ 1, ]) with self.assertRaises(TypeError): ctx = Ar._TestImplicitConversion((1, ctxObj)) with self.assertRaises(TypeError): ctx = Ar._TestImplicitConversion([1, ctxObj])
def GetFlattenedUsdData(filePath): from pxr import Ar, Usd Ar.GetResolver().ConfigureResolverForAsset(filePath) return Usd.Stage.Open(filePath)
parser.add_argument('--arkit', dest='arkit', action='store_true') parser.add_argument('--check', dest='check', action='store_true') parser.add_argument('--numFailedChecks', dest='numFailedChecks', default=0, type=int, action='store') parser.add_argument('--numErrors', dest='numErrors', default=0, type=int, action='store') args = parser.parse_args() context = Ar.GetResolver().CreateDefaultContextForAsset(args.assetPath) with Ar.ResolverContextBinder(context): if not args.arkit: assert UsdUtils.CreateNewUsdzPackage( Sdf.AssetPath(args.assetPath), args.usdzFile, args.rename if args.rename else '') else: assert UsdUtils.CreateNewARKitUsdzPackage( Sdf.AssetPath(args.assetPath), args.usdzFile, args.rename if args.rename else '') zipFile = Usd.ZipFile.Open(args.usdzFile) assert zipFile with stream(args.outfile, 'w') as ofp: for fileName in zipFile.GetFileNames():
def main(): parser = argparse.ArgumentParser( description= 'Resolves an asset path using a fully configured USD Asset Resolver.') parser.add_argument( 'inputPath', help="An asset path to be resolved by the USD Asset Resolver.") if _HasConfigureResolverForAsset(): parser.add_argument( '--configureAssetPath', help="Run ConfigureResolverForAsset on the given asset path.") else: subparser = parser.add_mutually_exclusive_group() subparser.add_argument( '--createContextForAsset', help=("Run CreateDefaultContextForAsset with the given asset path " "to create the context used for resolution.")) subparser.add_argument( '--createContextFromString', action='append', help=( "Run CreateContextFromString with the given string to create " "the context used for resolution. This accepts strings like " "[<URI Scheme>:]<Configuration String> and may be specified " "multiple times.\n\n" "ex: usdresolve --createContextFromString 'config_primary' " "--createContextFromString 'my_uri_scheme:config_uri'")) if _HasCreateIdentifier(): parser.add_argument( '--anchorPath', help=("Run CreateIdentifier with the input path and this anchor " "asset path and resolve the result.\n\n" "ex: usdresolve --anchorPath /asset/asset.usd sublayer.usd")) else: parser.add_argument( '--anchorPath', help="Run AnchorRelativePath on the given asset path.") args = parser.parse_args() exitCode = 0 resolver = Ar.GetResolver() try: resolverContext = _ConfigureAssetResolver(args, resolver) with Ar.ResolverContextBinder(resolverContext): inputPath = _AnchorRelativePath(args, resolver) resolved = resolver.Resolve(inputPath) except Exception as e: _Err("Failed to resolve '%s' - %s" % (args.inputPath, e)) exitCode = 1 if not resolved: _Err("Failed to resolve '%s'" % args.inputPath) exitCode = 1 else: print(resolved) return exitCode
# Unless required by applicable law or agreed to in writing, software # distributed under the Apache License with the above modification is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the Apache License for the specific # language governing permissions and limitations under the Apache License. import os, platform, itertools, sys, unittest # Initialize Ar to use Sdf_TestResolver unless a different implementation # is specified via the TEST_SDF_LAYER_RESOLVER to allow testing with other # filesystem-based resolvers. preferredResolver = os.environ.get("TEST_SDF_LAYER_RESOLVER", "Sdf_TestResolver") from pxr import Ar Ar.SetPreferredResolver(preferredResolver) # Import other modules from pxr after Ar to ensure we don't pull on Ar # before the preferred resolver has been specified. from pxr import Sdf, Tf, Plug class TestSdfLayer(unittest.TestCase): @classmethod def setUpClass(cls): # Register dso plugins. testRoot = os.path.join(os.path.dirname(__file__), 'SdfPlugins') testPluginsDso = testRoot + '/lib' testPluginsDsoSearch = testPluginsDso + '/*/Resources/' Plug.Registry().RegisterPlugins(testPluginsDsoSearch)
def convert_to_usd(gltf_file, usd_file, fps, scale, arkit=False, verbose=False, use_euler_rotation=False, optimize_textures=False, generate_texture_transform_texture=True): """Converts a glTF file to USD Arguments: gltf_file {str} -- path to glTF file usd_file {str} -- path to write USD file Keyword Arguments: verbose {bool} -- [description] (default: {False}) """ usd = GLTF2USD( gltf_file=gltf_file, usd_file=usd_file, fps=fps, scale=scale, verbose=verbose, use_euler_rotation=use_euler_rotation, optimize_textures=optimize_textures, generate_texture_transform_texture=generate_texture_transform_texture) if usd.stage: asset = usd.stage.GetRootLayer() usd.logger.info('Conversion complete!') asset.Save() usd.logger.info('created {}'.format(asset.realPath)) if usd_file.endswith('.usdz') or usd_file.endswith('.usdc'): usdc_file = '%s.%s' % (os.path.splitext(usd_file)[0], 'usdc') asset.Export(usdc_file, args=dict(format='usdc')) usd.logger.info('created {}'.format(usdc_file)) if usd_file.endswith('.usdz'): #change to directory of the generated usd files to avoid issues with # relative paths with CreateNewUsdzPackage os.chdir(os.path.dirname(usdc_file)) usd_file = ntpath.basename(usd_file) r = Ar.GetResolver() resolved_asset = r.Resolve(ntpath.basename(usdc_file)) context = r.CreateDefaultContextForAsset(resolved_asset) success = check_usd_compliance(resolved_asset, arkit=arkit) with Ar.ResolverContextBinder(context): if arkit and not success: usd.logger.warning('USD is not ARKit compliant') return success = UsdUtils.CreateNewUsdzPackage( resolved_asset, usd_file) and success if success: usd.logger.info( 'created package {} with contents:'.format(usd_file)) zip_file = Usd.ZipFile.Open(usd_file) file_names = zip_file.GetFileNames() for file_name in file_names: usd.logger.info('\t{}'.format(file_name)) else: usd.logger.error('could not create {}'.format(usd_file))
def walkStagePrims(self, usdfile): # print 'test'.center(40, '-') stage = Usd.Stage.Open(usdfile) for prim in stage.TraverseAll(): # print(prim.GetPath()) # from the docs: """Return a list of PrimSpecs that provide opinions for this prim (i.e. the prim's metadata fields, including composition metadata). specs are ordered from strongest to weakest opinion.""" primStack = prim.GetPrimStack() for spec in primStack: if spec.hasPayloads: payloadList = spec.payloadList for itemlist in [ payloadList.appendedItems, payloadList.explicitItems, payloadList.addedItems, payloadList.prependedItems, payloadList.orderedItems ]: if itemlist: for payload in itemlist: payload_path = payload.assetPath # print payload, payload_path with Ar.ResolverContextBinder( stage.GetPathResolverContext()): resolver = Ar.GetResolver() # we resolve the payload path relative to the primSpec layer path (layer.identifier) # far more likely to be correct. i hope resolvedpath = resolver.AnchorRelativePath( spec.layer.identifier, payload_path) # print 'payload resolvedpath', resolvedpath info = {} info['online'] = os.path.isfile( resolvedpath) info['path'] = resolvedpath info['type'] = 'payload' self.nodes[resolvedpath] = info if spec.layer.identifier != resolvedpath: if not [ spec.layer.identifier, resolvedpath, 'payload' ] in self.edges: self.edges.append([ spec.layer.identifier, resolvedpath, 'payload' ]) # the docs say there's a HasSpecializes method # no, there is not. at least in this build of houdini 18.0.453 # if spec.HasSpecializes: # let's just ignore specialize for the time being """ specializesList = spec.specializesList spec_paths = [] for itemlist in [specializesList.appendedItems, specializesList.explicitItems, specializesList.addedItems, specializesList.prependedItems, specializesList.orderedItems]: if itemlist: for specialize in itemlist: specialize_path = specialize.assetPath with Ar.ResolverContextBinder(stage.GetPathResolverContext()): resolver = Ar.GetResolver() resolvedpath = resolver.AnchorRelativePath(spec.layer.identifier, specialize_path) spec_paths.append(resolvedpath) ret.append(resolvedpath) if spec_paths: print 'specializesList', spec.specializesList """ # references operate the same to payloads if spec.hasReferences: reflist = spec.referenceList for itemlist in [ reflist.appendedItems, reflist.explicitItems, reflist.addedItems, reflist.prependedItems, reflist.orderedItems ]: if itemlist: for reference in itemlist: reference_path = reference.assetPath if reference_path: # print reference_path with Ar.ResolverContextBinder( stage.GetPathResolverContext()): resolver = Ar.GetResolver() # we resolve the payload path relative to the primSpec layer path (layer.identifier) # far more likely to be correct. i hope resolvedpath = resolver.AnchorRelativePath( spec.layer.identifier, reference_path) info = {} info['online'] = os.path.isfile( resolvedpath) info['path'] = resolvedpath info['type'] = 'reference' self.nodes[resolvedpath] = info if spec.layer.identifier != resolvedpath: if not [ spec.layer.identifier, resolvedpath, 'reference' ] in self.edges: self.edges.append([ spec.layer.identifier, resolvedpath, 'reference' ]) if spec.variantSets: for varset in spec.variantSets: thisvarset = prim.GetVariantSet(varset.name) current_variant_name = thisvarset.GetVariantSelection() current_variant = varset.variants[current_variant_name] for variant_name in varset.variants.keys(): variant = varset.variants[variant_name] # todo: put variant info onto layer # for key in variant.GetMetaDataInfoKeys(): # print key, variant.GetInfo(key) # variants that are linked to payloads # variants can have other mechanisms, but sometimes they're a payload payloads = variant.GetInfo('payload') for itemlist in [ payloads.appendedItems, payloads.explicitItems, payloads.addedItems, payloads.prependedItems, payloads.orderedItems ]: for payload in itemlist: pathToResolve = payload.assetPath anchorPath = variant.layer.identifier with Ar.ResolverContextBinder( stage.GetPathResolverContext()): resolver = Ar.GetResolver() resolvedpath = resolver.AnchorRelativePath( anchorPath, pathToResolve) if not [ anchorPath, resolvedpath, 'payload' ] in self.edges: self.edges.append([ anchorPath, resolvedpath, 'payload' ]) # def, over or class # print 'GetSpecifier', spec.specifier # component, # print 'GetKind', spec.kind # print '--' # clips - this seems to be the way to do things # clips are not going to be picked up by the stage layers inspection stuff # apparently they're expensive. whatever. # no prim stack shennanigans for us # gotta get a clip on each prim and then test it for paths? clips = Usd.ClipsAPI(prim) if clips.GetClipAssetPaths(): # print 'CLIPS'.center(30, '-') # dict of clip info. full of everything # key is the clip *name* clip_dict = clips.GetClips() # print clip_dict """ @todo: subframe handling integer frames: path/basename.###.usd subinteger frames: path/basename.##.##.usd. @todo: non-1 increments """ # don't use resolved path in case either the first or last file is missing from disk firstFile = str(clips.GetClipAssetPaths()[0].path) lastFile = str(clips.GetClipAssetPaths()[-1].path) firstFileNum = digitSearch.findall(firstFile)[-1] lastFileNum = digitSearch.findall(lastFile)[-1] digitRange = str(firstFileNum + '-' + lastFileNum) nodeName = '' firstFileParts = firstFile.split(firstFileNum) for i in range(len(firstFileParts) - 1): nodeName += str(firstFileParts[i]) nodeName += digitRange nodeName += firstFileParts[-1] allFilesFound = True for path in clips.GetClipAssetPaths(): if (path.resolvedPath == ''): allFilesFound = False break # TODO : make more efficient - looping over everything currently # TODO: validate presence of all files in the clip seq. bg thread? # GetClipSets seems to be crashing this houdini build - clips.GetClipSets() clip_sets = clips.GetClips().keys() # print 'GetClipManifestAssetPath', clips.GetClipManifestAssetPath().resolvedPath # this is a good one - resolved asset paths too for clipSet in clip_sets: for path in clips.GetClipAssetPaths(clipSet): # print path, type(path) # print path.resolvedPath pass # layer that hosts list clip # but this is the MANIFEST path # not really correct. it'll have to do for now. layer = clips.GetClipManifestAssetPath().resolvedPath if not nodeName in self.nodes: info = {} info['online'] = allFilesFound info['path'] = nodeName info['type'] = 'clip' self.nodes[nodeName] = info if not [layer, nodeName, 'clip'] in self.edges: self.edges.append([layer, nodeName, 'clip'])