def testRanges(self): script = Gaffer.ScriptNode() script["collect"] = GafferScene.CollectScenes() script["box"] = Gaffer.Box() script["box"]["collect"] = GafferScene.CollectScenes() self.assertEqual( list(GafferScene.CollectScenes.Range(script)), [script["collect"]], ) self.assertEqual( list(GafferScene.CollectScenes.RecursiveRange(script)), [script["collect"], script["box"]["collect"]], )
def testSetRespectsRootName(self): light = GafferSceneTest.TestLight() collect1 = GafferScene.CollectScenes() collect1["in"].setInput(light["out"]) collect1["rootNames"].setValue(IECore.StringVectorData(["a", "b"])) self.assertEqual(collect1["out"].set("__lights").value, IECore.PathMatcher(["/a/light", "/b/light"])) collect2 = GafferScene.CollectScenes() collect2["in"].setInput(light["out"]) collect2["rootNames"].setValue(IECore.StringVectorData(["c", "d"])) self.assertEqual(collect2["out"].set("__lights").value, IECore.PathMatcher(["/c/light", "/d/light"]))
def testContextVariableForParent( self ) : sphere = GafferScene.Sphere() sphere["sets"].setValue( "set" ) collect = GafferScene.CollectScenes() collect["rootNames"].setValue( IECore.StringVectorData( [ "a", "b" ] ) ) parent = GafferScene.Parent() parent["in"].setInput( collect["out"] ) parent["children"][0].setInput( sphere["out"] ) parent["parent"].setValue( "${parent}" ) with Gaffer.Context() as context : context["parent"] = "/a" self.assertEqual( parent["out"].set( "set" ).value.paths(), [ "/a/sphere" ] ) context["parent"] = "/b" self.assertEqual( parent["out"].set( "set" ).value.paths(), [ "/b/sphere" ] )
def testCollectInvalidLocation(self): sphere = GafferScene.Sphere() sphere["sets"].setValue("set1") group = GafferScene.Group() group["in"][0].setInput(sphere["out"]) collect = GafferScene.CollectScenes() collect["rootNames"].setValue(IECore.StringVectorData(["A"])) collect["in"].setInput(group["out"]) self.assertSceneValid(collect["out"]) self.assertEqual(collect["out"].childNames("/"), IECore.InternedStringVectorData(["A"])) self.assertEqual(collect["out"].childNames("/A"), IECore.InternedStringVectorData(["group"])) self.assertEqual(collect["out"].childNames("/A/group"), IECore.InternedStringVectorData(["sphere"])) self.assertEqual(collect["out"].childNames("/A/group/sphere"), IECore.InternedStringVectorData()) collect["sourceRoot"].setValue("iDontExist") self.assertSceneValid(collect["out"]) self.assertEqual(collect["out"].childNames("/"), IECore.InternedStringVectorData(["A"])) self.assertEqual(collect["out"].childNames("/A"), IECore.InternedStringVectorData())
def testSignalThreadSafety(self): script = Gaffer.ScriptNode() script["random"] = Gaffer.Random() script["random"]["contextEntry"].setValue("collect:rootName") script["sphere"] = GafferScene.Sphere() script["sphere"]["radius"].setInput(script["random"]["outFloat"]) script["filter"] = GafferScene.PathFilter() script["filter"]["paths"].setValue(IECore.StringVectorData(["/sphere" ])) script["encapsulate"] = GafferScene.Encapsulate() script["encapsulate"]["in"].setInput(script["sphere"]["out"]) script["encapsulate"]["filter"].setInput(script["filter"]["out"]) script["collect"] = GafferScene.CollectScenes() script["collect"]["in"].setInput(script["encapsulate"]["out"]) script["collect"]["rootNames"].setValue( IECore.StringVectorData([str(x) for x in range(0, 100)])) script["fileName"].setValue( os.path.join(self.temporaryDirectory(), "test.gfr")) script.save() # This exposed a crash caused by non-threadsafe access to signals from Capsules. It # will throw if the subprocess crashes. subprocess.check_output([ "gaffer", "stats", script["fileName"].getValue(), "-scene", "collect" ])
def testGlobals( self ) : options = GafferScene.CustomOptions() options["options"].addMember( "user:test", "${collect:rootName}" ) self.assertTrue( "option:user:test" in options["out"]["globals"].getValue() ) collect = GafferScene.CollectScenes() collect["in"].setInput( options["out"] ) collect["rootNames"].setValue( IECore.StringVectorData( [ "a", "b" ] ) ) self.assertEqual( collect["out"]["globals"].getValue()["option:user:test"], IECore.StringData( "a" ) )
def testCacheReuse(self): sphere = GafferScene.Sphere() collect = GafferScene.CollectScenes() collect["in"].setInput(sphere["out"]) collect["rootNames"].setValue(IECore.StringVectorData(["test"])) self.assertPathHashesEqual(sphere["out"], "/sphere", collect["out"], "/test/sphere") self.assertTrue(sphere["out"].object("/sphere", _copy=False).isSame( collect["out"].object("/test/sphere", _copy=False)))
def testMergeGlobals(self): script = Gaffer.ScriptNode() script["options"] = GafferScene.CustomOptions() script["options"]["options"]["test1"] = Gaffer.NameValuePlug( "user:test1", "${collect:rootName}", True) script["options"]["options"]["test2"] = Gaffer.NameValuePlug( "user:test2", "${collect:rootName}", True) script["options"]["options"]["test3"] = Gaffer.NameValuePlug( "user:test3", "${collect:rootName}", True) script["spreadsheet"] = Gaffer.Spreadsheet() script["spreadsheet"]["selector"].setValue("${collect:rootName}") for option in script["options"]["options"].children(): script["spreadsheet"]["rows"].addColumn(option["enabled"], option.getName()) option["enabled"].setInput( script["spreadsheet"]["out"][option.getName()]) for rowName in ("a", "b"): row = script["spreadsheet"]["rows"].addRow() row["name"].setValue(rowName) row["cells"]["test1"]["value"].setValue(rowName == "a") row["cells"]["test2"]["value"].setValue(True) row["cells"]["test3"]["value"].setValue(rowName == "b") script["collect"] = GafferScene.CollectScenes() script["collect"]["in"].setInput(script["options"]["out"]) script["collect"]["rootNames"].setInput( script["spreadsheet"]["activeRowNames"]) # Merging off self.assertEqual( script["collect"]["out"].globals(), IECore.CompoundObject({ "option:user:test1": IECore.StringData("a"), "option:user:test2": IECore.StringData("a"), })) # Merging on script["collect"]["mergeGlobals"].setValue(True) self.assertEqual( script["collect"]["out"].globals(), IECore.CompoundObject({ "option:user:test1": IECore.StringData("a"), "option:user:test2": IECore.StringData("b"), "option:user:test3": IECore.StringData("b"), }))
def testRecursion(self): # Test the generation of a set from a SetFilter referencing # paths generated by an upstream set generated from a recursive # PathFilter. Naive implementations of locked compute can # deadlock here, so make sure our implementation isn't too # naive :) plane = GafferScene.Plane() collectScenes = GafferScene.CollectScenes() collectScenes["in"].setInput(plane["out"]) collectScenes["rootNames"].setValue( IECore.StringVectorData([str(x) for x in range(0, 10000)])) pathFilter = GafferScene.PathFilter() pathFilter["paths"].setValue(IECore.StringVectorData(["/.../plane"])) setA = GafferScene.Set("setA") setA["in"].setInput(collectScenes["out"]) setA["filter"].setInput(pathFilter["out"]) setA["name"].setValue("A") setFilter = GafferScene.SetFilter() setFilter["setExpression"].setValue("A - /0/plane") setB = GafferScene.Set("setB") setB["in"].setInput(setA["out"]) setB["filter"].setInput(setFilter["out"]) setB["name"].setValue("B") with Gaffer.PerformanceMonitor() as pm: self.assertEqual( setB["out"].set("B").value, IECore.PathMatcher( ["/{}/plane".format(x) for x in range(1, 10000)])) self.assertEqual( pm.plugStatistics( setA["__FilterResults"]["__internalOut"]).hashCount, 1) self.assertEqual( pm.plugStatistics( setA["__FilterResults"]["__internalOut"]).computeCount, 1) self.assertEqual( pm.plugStatistics( setB["__FilterResults"]["__internalOut"]).hashCount, 1) self.assertEqual( pm.plugStatistics( setB["__FilterResults"]["__internalOut"]).computeCount, 1)
def testSpreadsheetAndCollect(self): script = Gaffer.ScriptNode() script["sphere"] = GafferScene.Sphere() script["spreadsheet"] = Gaffer.Spreadsheet() script["spreadsheet"]["rows"].addColumn(script["sphere"]["transform"]) script["sphere"]["transform"].setInput( script["spreadsheet"]["out"]["transform"]) script["spreadsheet"]["rows"].addRow()["name"].setValue("sphere1") script["spreadsheet"]["rows"].addRow()["name"].setValue("sphere2") script["spreadsheet"]["selector"].setValue("${collect:rootName}") script["collect"] = GafferScene.CollectScenes() script["collect"]["in"].setInput(script["sphere"]["out"]) script["collect"]["rootNames"].setInput( script["spreadsheet"]["activeRowNames"]) self.assertEqual( script["collect"]["out"].childNames("/"), IECore.InternedStringVectorData(["sphere1", "sphere2"])) view = GafferSceneUI.SceneView() view["in"].setInput(script["collect"]["out"]) tool = GafferSceneUI.TranslateTool(view) tool["active"].setValue(True) GafferSceneUI.ContextAlgo.setSelectedPaths( view.getContext(), IECore.PathMatcher(["/sphere1"])) self.assertEqual( tool.selection()[0].transformPlug, script["spreadsheet"]["rows"][1]["cells"]["transform"]["value"]) GafferSceneUI.ContextAlgo.setSelectedPaths( view.getContext(), IECore.PathMatcher(["/sphere2"])) self.assertEqual( tool.selection()[0].transformPlug, script["spreadsheet"]["rows"][2]["cells"]["transform"]["value"]) # Check that we can work with promoted plugs too box = Gaffer.Box.create( script, Gaffer.StandardSet( [script["collect"], script["sphere"], script["spreadsheet"]])) promotedRowsPlug = Gaffer.PlugAlgo.promote(box["spreadsheet"]["rows"]) self.assertEqual(tool.selection()[0].transformPlug, promotedRowsPlug[2]["cells"]["transform"]["value"])
def testDifferentSetsPerParent(self): sphere = GafferScene.Sphere() sphere["sets"].setValue("roundThings") cube = GafferScene.Cube() cube["sets"].setValue("squareThings") switch = Gaffer.NameSwitch() switch.setup(sphere["out"]) switch["selector"].setValue("${parent}") switch["in"].resize(3) switch["in"][1]["name"].setValue("/a") switch["in"][1]["value"].setInput(sphere["out"]) switch["in"][2]["name"].setValue("/b") switch["in"][2]["value"].setInput(cube["out"]) collect = GafferScene.CollectScenes() collect["rootNames"].setValue(IECore.StringVectorData(["a", "b"])) filter = GafferScene.PathFilter() filter["paths"].setValue(IECore.StringVectorData(["/a", "/b"])) parent = GafferScene.Parent() parent["in"].setInput(collect["out"]) parent["children"][0].setInput(switch["out"]["value"]) parent["filter"].setInput(filter["out"]) parent["parentVariable"].setValue("parent") self.assertEqual(set(str(x) for x in parent["out"].setNames()), {"roundThings", "squareThings"}) self.assertEqual(parent["out"].set("roundThings").value, IECore.PathMatcher(["/a/sphere"])) self.assertEqual(parent["out"].set("squareThings").value, IECore.PathMatcher(["/b/cube"])) cube["name"].setValue("box") self.assertEqual(set(str(x) for x in parent["out"].setNames()), {"roundThings", "squareThings"}) self.assertEqual(parent["out"].set("roundThings").value, IECore.PathMatcher(["/a/sphere"])) self.assertEqual(parent["out"].set("squareThings").value, IECore.PathMatcher(["/b/box"])) sphere["sets"].setValue("balls") self.assertEqual(set(str(x) for x in parent["out"].setNames()), {"balls", "squareThings"}) self.assertEqual(parent["out"].set("balls").value, IECore.PathMatcher(["/a/sphere"])) self.assertEqual(parent["out"].set("squareThings").value, IECore.PathMatcher(["/b/box"]))
def testCollectObject( self ) : sphere = GafferScene.Sphere() sphere["sets"].setValue( "sphereSet" ) collect = GafferScene.CollectScenes() collect["in"].setInput( sphere["out"] ) collect["rootNames"].setValue( IECore.StringVectorData( [ "test" ] ) ) collect["sourceRoot"].setValue( "sphere" ) self.assertPathHashesEqual( sphere["out"], "/sphere", collect["out"], "/test" ) self.assertEqual( sphere["out"].object( "/sphere" ), collect["out"].object( "/test" ) ) self.assertEqual( collect["out"].set( "sphereSet" ).value.paths(), [ "/test" ] )
def testSubstitutions( self ) : sphere = GafferScene.Sphere() primitiveVariables = GafferScene.PrimitiveVariables() primitiveVariables["in"].setInput( sphere["out"] ) primitiveVariables["primitiveVariables"].addMember( "color", "${collect:rootName}" ) collect = GafferScene.CollectScenes() collect["in"].setInput( primitiveVariables["out"] ) collect["rootNames"].setValue( IECore.StringVectorData( [ "red", "green", "blue" ] ) ) for c in ( "red", "green", "blue" ) : self.assertEqual( collect["out"].object( "/{}/sphere".format( c ) )["color"].data.value, c )
def testDuplicateRoots(self): sphere = GafferScene.Sphere() sphere["sets"].setValue("${collect:rootName}") sphereFilter = GafferScene.PathFilter() sphereFilter["paths"].setValue(IECore.StringVectorData(["/sphere"])) attributes = GafferScene.CustomAttributes() attributes["in"].setInput(sphere["out"]) attributes["filter"].setInput(sphereFilter["out"]) attributes["attributes"]["test"] = Gaffer.NameValuePlug( "test", "${collect:rootName}") options = GafferScene.CustomOptions() options["in"].setInput(attributes["out"]) options["options"]["test"] = Gaffer.NameValuePlug( "test", "${collect:rootName}") collect = GafferScene.CollectScenes() collect["in"].setInput(options["out"]) # The user might enter the exact same value multiple times by accident. # And because the values represent paths, they may even enter _different_ # values that map to the same path. collect["rootNames"].setValue( IECore.StringVectorData([ "A/B", "/A/B", "/A/B/", "A/B", ])) # We automatically uniquefy the names, using the first string as the # value of `collect:rootName`. self.assertEqual(collect["out"].childNames("/"), IECore.InternedStringVectorData(["A"])) self.assertEqual(collect["out"].childNames("/A"), IECore.InternedStringVectorData(["B"])) self.assertEqual( collect["out"].attributes("/A/B/sphere"), IECore.CompoundObject({"test": IECore.StringData("A/B")})) self.assertEqual(collect["out"].setNames(), IECore.InternedStringVectorData(["A/B"])) self.assertEqual(collect["out"].set("A/B").value, IECore.PathMatcher(["/A/B/sphere"])) self.assertEqual( collect["out"].globals(), IECore.CompoundObject({"option:test": IECore.StringData("A/B")}))
def testCollaboratePerf(self): # Set up a scene with lots of spheres with different UVs uvSphere = GafferScene.Sphere() uvSphere["divisions"].setValue(imath.V2i(2000)) uvSphere["expression"] = Gaffer.Expression() uvSphere["expression"].setExpression( 'parent["transform"]["translate"]["y"] = 2 * int( context["collect:rootName"] )' ) camera = GafferScene.Camera() camera["projection"].setValue('orthographic') parent = GafferScene.Parent() parent["parent"].setValue('/') parent["children"][0].setInput(camera["out"]) parent["in"].setInput(uvSphere["out"]) sphereFilter = GafferScene.PathFilter() sphereFilter["paths"].setValue(IECore.StringVectorData(['/sphere'])) mapProjection = GafferScene.MapProjection() mapProjection["in"].setInput(parent["out"]) mapProjection["filter"].setInput(sphereFilter["out"]) mapProjection["camera"].setValue('/camera') collectScenes = GafferScene.CollectScenes() collectScenes["in"].setInput(mapProjection["out"]) collectScenes["rootNames"].setValue( IECore.StringVectorData([str(i) for i in range(50)])) allFilter = GafferScene.PathFilter() allFilter["paths"].setValue(IECore.StringVectorData(['/...'])) # Set up query query = GafferScene.UDIMQuery() query["in"].setInput(collectScenes["out"]) query["filter"].setInput(allFilter["out"]) query["outInt"] = Gaffer.IntPlug() query["outIntExpression"] = Gaffer.Expression() query["outIntExpression"].setExpression( 'parent["outInt"] = len( parent["out"] ) + context["iteration"]') with GafferTest.TestRunner.PerformanceScope(): GafferTest.parallelGetValue(query["outInt"], 400, "iteration")
def testNonLeafRoots(self): sphere = GafferScene.Sphere() collect = GafferScene.CollectScenes() collect["in"].setInput(sphere["out"]) collect["rootNames"].setValue( IECore.StringVectorData(["/root", "/root/nested"])) with six.assertRaisesRegex(self, Gaffer.ProcessException, '"/root" contains nested roots'): collect["out"].childNames("/root") collect["rootNames"].setValue( IECore.StringVectorData(["/root/nested", "/root"])) with six.assertRaisesRegex(self, Gaffer.ProcessException, '"/root" contains nested roots'): collect["out"].childNames("/root")
def testDeepRoots(self): sphere = GafferScene.Sphere() sphere["transform"]["translate"].setValue(imath.V3f(1, 2, 3)) sphere["sets"].setValue("setA") collect = GafferScene.CollectScenes() collect["in"].setInput(sphere["out"]) collect["rootNames"].setValue( IECore.StringVectorData([ "/world/ball", "/world/sphere", "/world/marbles/one", "/world/marbles/two", ])) self.assertEqual(collect["out"].childNames("/"), IECore.InternedStringVectorData(["world"])) self.assertEqual( collect["out"].childNames("/world"), IECore.InternedStringVectorData(["ball", "sphere", "marbles"])) self.assertEqual(collect["out"].childNames("/world/marbles"), IECore.InternedStringVectorData(["one", "two"])) self.assertSceneValid(collect["out"]) subTree = GafferScene.SubTree() subTree["in"].setInput(collect["out"]) for root in collect["rootNames"].getValue(): self.assertEqual(collect["out"].transform(root), imath.M44f()) self.assertEqual(collect["out"].attributes(root), IECore.CompoundObject()) subTree["root"].setValue(root) self.assertScenesEqual(subTree["out"], sphere["out"])
def testBadCachePolicyHang(self): # Using the legacy cache policy for OSLImage.shadingPlug creates a hang due to tbb task stealing, # though it's a bit hard to actually demonstrate constant = GafferImage.Constant() constant["format"].setValue(GafferImage.Format(128, 128, 1.000)) # Need a slow to compute OSL code in order to trigger hang mandelbrotCode = self.mandelbrotNode() # In order to trigger the hang, we need to mix threads which are stuck waiting for an expression which # uses the Standard policy with threads that are actually finishing, so that tbb tries to start up new # threads while we're waiting for the expression result. To do this, we use the "var" context variable # to create two versions of this OSLCode mandelbrotCode["varExpression"] = Gaffer.Expression() mandelbrotCode["varExpression"].setExpression( 'parent.parameters.iterations = 100000 + context( "var", 0 );', "OSL") oslImage = GafferOSL.OSLImage() oslImage["channels"].addChild( Gaffer.NameValuePlug( "", Gaffer.Color3fPlug( "value", defaultValue=imath.Color3f(1, 1, 1), flags=Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic, ), True, "channel", Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic)) oslImage["in"].setInput(constant["out"]) oslImage["channels"]["channel"]["value"][0].setInput( mandelbrotCode["out"]["outFloat"]) oslImage["channels"]["channel"]["value"][1].setInput( mandelbrotCode["out"]["outFloat"]) oslImage["channels"]["channel"]["value"][2].setInput( mandelbrotCode["out"]["outFloat"]) # This imageStats is use to create non-blocking slow calculations imageStats = GafferImage.ImageStats() imageStats["in"].setInput(oslImage["out"]) imageStats["area"].setValue( imath.Box2i(imath.V2i(0, 0), imath.V2i(64, 64))) # This box does the non-blocking slow calculation, followed by a blocking slow calculation. # This ensures that tasks which do just the non-block calculation will start finishing while # the blocking slow calculation is still running, allowing tbb to try running more threads # on the blocking calcluation, realizing they can't run, and stealing tasks onto those threads # which can hit the Standard policy lock on the expression upstream and deadlock, unless the # OSLImage isolates its threads correctly expressionBox = Gaffer.Box() expressionBox.addChild( Gaffer.FloatVectorDataPlug("inChannelData", defaultValue=IECore.FloatVectorData( []))) expressionBox.addChild(Gaffer.FloatPlug("inStat")) expressionBox.addChild( Gaffer.FloatPlug("out", direction=Gaffer.Plug.Direction.Out)) expressionBox["inChannelData"].setInput(oslImage["out"]["channelData"]) expressionBox["inStat"].setInput(imageStats["average"]["r"]) expressionBox["contextVariables"] = Gaffer.ContextVariables() expressionBox["contextVariables"].setup( Gaffer.FloatVectorDataPlug("in", defaultValue=IECore.FloatVectorData( []))) expressionBox["contextVariables"]["variables"].addChild( Gaffer.NameValuePlug("image:tileOrigin", Gaffer.V2iPlug("value"), True, "member1")) expressionBox["contextVariables"]["variables"].addChild( Gaffer.NameValuePlug("image:channelName", Gaffer.StringPlug("value", defaultValue='R'), True, "member2")) expressionBox["contextVariables"]["variables"].addChild( Gaffer.NameValuePlug("var", Gaffer.IntPlug("value", defaultValue=1), True, "member3")) expressionBox["contextVariables"]["in"].setInput( expressionBox["inChannelData"]) expressionBox["expression"] = Gaffer.Expression() expressionBox["expression"].setExpression( inspect.cleandoc(""" d = parent["contextVariables"]["out"] parent["out"] = d[0] + parent["inStat"] """)) # Create a switch to mix which tasks perform the non-blocking or blocking calculation - we need a mixture # to trigger the hang switch = Gaffer.Switch() switch.setup(Gaffer.IntPlug( "in", defaultValue=0, )) switch["in"][0].setInput(expressionBox["out"]) switch["in"][1].setInput(imageStats["average"]["r"]) switch["switchExpression"] = Gaffer.Expression() switch["switchExpression"].setExpression( 'parent.index = ( stoi( context( "testContext", "0" ) ) % 10 ) > 5;', "OSL") # In order to evaluate this expression a bunch of times at once with different values of "testContext", # we set up a simple scene that can be evaluated with GafferSceneTest.traversScene. # In theory, we could use a simple function that used a parallel_for to evaluate switch["out"], but for # some reason we don't entirely understand, this does not trigger the hang import GafferSceneTest import GafferScene sphere = GafferScene.Sphere() pathFilter = GafferScene.PathFilter() pathFilter["paths"].setValue(IECore.StringVectorData(['/sphere'])) customAttributes = GafferScene.CustomAttributes() customAttributes["attributes"].addChild( Gaffer.NameValuePlug("foo", Gaffer.FloatPlug("value"), True, "member1")) customAttributes["attributes"]["member1"]["value"].setInput( switch["out"]) customAttributes["in"].setInput(sphere["out"]) customAttributes["filter"].setInput(pathFilter["out"]) collectScenes = GafferScene.CollectScenes() collectScenes["in"].setInput(customAttributes["out"]) collectScenes["rootNames"].setValue( IECore.StringVectorData([str(i) for i in range(1000)])) collectScenes["rootNameVariable"].setValue('testContext') # When OSLImage.shadingPlug is not correctly isolated, and grain size on ShadingEngine is smaller than the # image tile size, this fails about 50% of the time. Running it 5 times makes the failure pretty consistent. for i in range(5): Gaffer.ValuePlug.clearCache() Gaffer.ValuePlug.clearHashCache() GafferSceneTest.traverseScene(collectScenes["out"])
def test( self ) : # - groupA # - group1 # - sphere # - cube # - group2 # - sphere # - cube # - sometimesCube # - group3 # - sphere # - cube # - group4 # - sphere # - cube box = Gaffer.Node() box["sphere"] = GafferScene.Sphere() box["sphere"]["sets"].setValue( "sphereSet" ) box["cube"] = GafferScene.Cube() box["cube"]["sets"].setValue( "cubeSet" ) box["sometimesCube"] = GafferScene.Cube() box["sometimesCube"]["name"].setValue( "sometimesCube" ) box["sometimesCube"]["sets"].setValue( "cubeSet" ) box["group"] = GafferScene.Group() box["group"]["in"][0].setInput( box["sphere"]["out"] ) box["group"]["in"][1].setInput( box["cube"]["out"] ) box["group"]["in"][1].setInput( box["sometimesCube"]["out"] ) box["e"] = Gaffer.Expression() box["e"].setExpression( inspect.cleandoc( """ n = context["collect:rootName"] i = int( n[-1] ) - 1 parent["sphere"]["radius"] = 1 + i * 0.1 parent["sphere"]["transform"]["translate"] = imath.V3f( 1 + i, 0, 0 ) parent["cube"]["transform"]["translate"] = imath.V3f( 0, 1 + i, 0 ) parent["sometimesCube"]["enabled"] = n == "group2" parent["group"]["transform"]["translate"] = imath.V3f( 0, 0, 1 + i ) """ ) ) collect = GafferScene.CollectScenes() collect["in"].setInput( box["group"]["out"] ) collect["rootNames"].setValue( IECore.StringVectorData( [ "group1", "group2", "group3", "group4" ] ) ) collect["sourceRoot"].setValue( "/group" ) groupA = GafferScene.Group() groupA["name"].setValue( "groupA" ) groupA["in"][0].setInput( collect["out"] ) encapsulateFilter = GafferScene.PathFilter() encapsulateFilter["paths"].setValue( IECore.StringVectorData( [ "/groupA/*" ] ) ) encapsulateCollect = GafferScene.Encapsulate() encapsulateCollect["in"].setInput( groupA["out"] ) encapsulateCollect["filter"].setInput( encapsulateFilter["out"] ) preEncapsulateFilter = GafferScene.PathFilter() preEncapsulateFilter["paths"].setValue( IECore.StringVectorData( [ "/group" ] ) ) preEncapsulate = GafferScene.Encapsulate() preEncapsulate["in"].setInput( box["group"]["out"] ) preEncapsulate["filter"].setInput( preEncapsulateFilter["out"] ) collectEncapsulate = GafferScene.CollectScenes() collectEncapsulate["in"].setInput( preEncapsulate["out"] ) collectEncapsulate["rootNames"].setValue( IECore.StringVectorData( [ "group1", "group2", "group3", "group4" ] ) ) collectEncapsulate["sourceRoot"].setValue( "/group" ) collectEncapsulateGroup = GafferScene.Group() collectEncapsulateGroup["name"].setValue( "groupA" ) collectEncapsulateGroup["in"][0].setInput( collectEncapsulate["out"] ) unencapsulateFilter = GafferScene.PathFilter() unencapsulate1 = GafferScene.Unencapsulate() unencapsulate1["in"].setInput( encapsulateCollect["out"] ) unencapsulate1["filter"].setInput( unencapsulateFilter["out"] ) unencapsulate2 = GafferScene.Unencapsulate() unencapsulate2["in"].setInput( collectEncapsulateGroup["out"] ) unencapsulate2["filter"].setInput( unencapsulateFilter["out"] ) # We can reverse the encapsulate by unencapsulating everything unencapsulateFilter["paths"].setValue( IECore.StringVectorData( [ "..." ] ) ) self.assertScenesEqual( groupA["out"], unencapsulate1["out"] ) # Unencapsulate should work the same whether the capsules come from before or after the collect self.assertScenesEqual( unencapsulate1["out"], unencapsulate2["out"] ) # Or just unencapsulate one thing unencapsulateFilter["paths"].setValue( IECore.StringVectorData( [ "/groupA/group3" ] ) ) self.assertScenesEqual( encapsulateCollect["out"], unencapsulate1["out"], pathsToPrune = [ "/groupA/group3" ] ) self.assertScenesEqual( groupA["out"], unencapsulate1["out"], pathsToPrune = [ "/groupA/group1", "/groupA/group2", "/groupA/group4" ] ) # Whichever place we encapsulate, we still get the same results, except that the capsule objects themselves # which weren't encapsulated will appear different ( because they were computed in different places, and # reference different source plugs self.assertScenesEqual( unencapsulate1["out"], unencapsulate2["out"], checks = self.allSceneChecks - { "object" } ) self.assertScenesEqual( unencapsulate1["out"], unencapsulate2["out"], pathsToPrune = [ "/groupA/group1", "/groupA/group2", "/groupA/group4" ] ) unencapsulateFilter["paths"].setValue( IECore.StringVectorData( [ "..." ] ) ) # Test modifying the hierarchy after making capsules by duplicating a location duplicate = GafferScene.Duplicate() duplicate["target"].setValue( "/groupA/group3" ) duplicate["in"].setInput( collectEncapsulateGroup["out"] ) unencapsulateDuplicated = GafferScene.Unencapsulate() unencapsulateDuplicated["in"].setInput( duplicate["out"] ) unencapsulateDuplicated["filter"].setInput( unencapsulateFilter["out"] ) # This copies group3 as group5 self.assertEqual( unencapsulateDuplicated["out"].fullTransform( "/groupA/group5/sphere" ), groupA["out"].fullTransform( "/groupA/group3/sphere" ) ) # Sanity check that groups do have unique transforms self.assertNotEqual( unencapsulateDuplicated["out"].fullTransform( "/groupA/group5/sphere" ), groupA["out"].fullTransform( "/groupA/group4/sphere" ) ) # This should be same result as copying group3 to group5 without any encapsulation preDuplicate = GafferScene.Duplicate() preDuplicate["target"].setValue( "/groupA/group3" ) preDuplicate["in"].setInput( groupA["out"] ) self.assertScenesEqual( unencapsulateDuplicated["out"], preDuplicate["out"] ) # Some tests where we merge an extra location into the scene amongst the capsules, # which should give the same result whether it's done before or after unencapsulating extraSphere = GafferScene.Sphere() extraSphere["name"].setValue( "extra" ) extraSphere["sets"].setValue( "sphereSet" ) extraSpherePostParent = GafferScene.Parent() extraSpherePostParent["in"].setInput( unencapsulate2["out"] ) extraSpherePostParent["children"][0].setInput( extraSphere["out"] ) extraSpherePreParent = GafferScene.Parent() extraSpherePreParent["in"].setInput( collectEncapsulateGroup["out"] ) extraSpherePreParent["children"][0].setInput( extraSphere["out"] ) unencapsulateAfter = GafferScene.Unencapsulate() unencapsulateAfter["in"].setInput( extraSpherePreParent["out"] ) unencapsulateAfter["filter"].setInput( unencapsulateFilter["out"] ) # Test parenting in a sphere at a the same level as a capsule extraSpherePostParent["parent"].setValue( "/groupA" ) extraSpherePreParent["parent"].setValue( "/groupA" ) self.assertScenesEqual( extraSpherePostParent["out"], unencapsulateAfter["out"], checks = self.allSceneChecks - { "childNames" } ) # Test a weird case: parenting the sphere under a capsule, so that when the capsule is expanded, # it gets merged with the children of the capsule. It's arguable that this shouldn't need to # work, and maybe there would be some extra optimizations available if it wasn't allowed, but for # the moment, it works extraSpherePostParent["parent"].setValue( "/groupA/group2" ) extraSpherePreParent["parent"].setValue( "/groupA/group2" ) self.assertScenesEqual( extraSpherePostParent["out"], unencapsulateAfter["out"], checks = self.allSceneChecks - { "childNames" } )
def testDefaultScene(self): collect = GafferScene.CollectScenes() self.assertScenesEqual(collect["out"], GafferScene.ScenePlug())
def __init__(self, name="__CameraSetup"): GafferScene.FilteredSceneProcessor.__init__(self, name) # Public plugs self["cameraGroup"] = Gaffer.StringPlug("cameraGroup", Gaffer.Plug.Direction.In, "__TEXTUREBAKE_CAMERAS") self["bakeDirectory"] = Gaffer.StringPlug("bakeDirectory", Gaffer.Plug.Direction.In, "") self["defaultFileName"] = Gaffer.StringPlug( "defaultFileName", Gaffer.Plug.Direction.In, "${bakeDirectory}/<AOV>/<AOV>.<UDIM>.exr") self["defaultResolution"] = Gaffer.IntPlug( "defaultResolution", Gaffer.Plug.Direction.In, 512) self["uvSet"] = Gaffer.StringPlug("uvSet", Gaffer.Plug.Direction.In, "uv") self["normalOffset"] = Gaffer.FloatPlug("normalOffset", Gaffer.Plug.Direction.In, 0.1) self["aovs"] = Gaffer.StringPlug("aovs", Gaffer.Plug.Direction.In, "beauty:rgba") self["tasks"] = Gaffer.IntPlug("tasks", Gaffer.Plug.Direction.In, 1) self["taskIndex"] = Gaffer.IntPlug("taskIndex", Gaffer.Plug.Direction.In, 0) # Output self["renderFileList"] = Gaffer.StringVectorDataPlug( "renderFileList", Gaffer.Plug.Direction.Out, defaultValue=IECore.StringVectorData()) self["renderFileList"].setFlags(Gaffer.Plug.Flags.Serialisable, False) # Private internal network self["__udimQuery"] = GafferScene.UDIMQuery() self["__udimQuery"]["in"].setInput(self["in"]) self["__udimQuery"]["uvSet"].setInput(self["uvSet"]) self["__udimQuery"]["attributes"].setValue( "bake:resolution bake:fileName") self["__udimQuery"]["filter"].setInput(self["filter"]) self["__chunkedBakeInfo"] = Gaffer.CompoundObjectPlug( "__chunkedBakeInfo", Gaffer.Plug.Direction.In, IECore.CompoundObject()) self["__chunkedBakeInfo"].setFlags(Gaffer.Plug.Flags.Serialisable, False) self["__chunkExpression"] = Gaffer.Expression() self["__chunkExpression"].setExpression( inspect.cleandoc(""" # Locate the next point in the list of files to bake where we can split the list into chunks without # seperating two files that need to get combined into the same texture def nextChunkBreak( i, l ): while i > 0 and i < len( l ) and ( l[i - 1].get("udim") == l[i].get("udim") and l[i - 1].get("fileName") == l[i].get("fileName") ): i += 1 return i rawInfo = parent["__udimQuery"]["out"] defaultFileName = parent["defaultFileName"] defaultResolution = parent["defaultResolution"] listInfo = [] for udim, meshes in rawInfo.items(): for mesh, extraAttributes in meshes.items(): resolution = defaultResolution if "bake:resolution" in extraAttributes: resolution = extraAttributes["bake:resolution"].value fileName = defaultFileName if "bake:fileName" in extraAttributes: fileName = extraAttributes["bake:fileName"].value listInfo.append( { "udim" : int( udim ), "mesh" : mesh, "resolution" : resolution, "fileName" : fileName } ) listInfo.sort( key = lambda i: (i["fileName"], i["udim"] ) ) info = IECore.CompoundObject() numTasks = parent["tasks"] taskIndex = parent["taskIndex"] chunkStart = nextChunkBreak( ( taskIndex * len( listInfo ) ) / numTasks, listInfo ) chunkEnd = nextChunkBreak( ( ( taskIndex + 1 ) * len( listInfo ) ) / numTasks, listInfo ) dupeCount = 0 prevFileName = "" for i in listInfo[chunkStart:chunkEnd]: o = IECore.CompoundObject() o["mesh"] = IECore.StringData( i["mesh"] ) o["udim"] = IECore.IntData( i["udim"] ) o["resolution"] = IECore.IntData( i["resolution"] ) udimStr = str( i["udim"] ) fileName = i["fileName"].replace( "<UDIM>", udimStr ) if fileName == prevFileName: dupeCount += 1 fileName = fileName + ".layer" + str( dupeCount ) else: prevFileName = fileName dupeCount = 0 o["fileName"] = IECore.StringData( fileName ) name = o["mesh"].value.replace( "/", "_" ) + "." + udimStr info[ name ] = o parent["__chunkedBakeInfo"] = info fileList = [] for name, i in info.items(): fileName = i["fileName"].value for nameAndAov in parent["aovs"].strip( " " ).split( " " ): fileList.append( i["fileName"].value.replace( "<AOV>", nameAndAov.split(":")[0] ) ) parent["renderFileList"] = IECore.StringVectorData( fileList ) """), "python") self["__parent"] = GafferScene.Parent() self["__parent"]["parent"].setValue("/") for c in [ 'bound', 'transform', 'attributes', 'object', 'childNames', 'setNames', 'set' ]: self["__parent"]["in"][c].setInput(self["in"][c]) self["__outputExpression"] = Gaffer.Expression() self["__outputExpression"].setExpression( inspect.cleandoc(""" import IECoreScene # Transfer all input globals except for outputs inGlobals = parent["in"]["globals"] outGlobals = IECore.CompoundObject() for key, value in inGlobals.items(): if not key.startswith( "output:" ): outGlobals[key] = value # Make our own outputs info = parent["__chunkedBakeInfo"] for cameraName, i in info.items(): params = IECore.CompoundData() fileName = i["fileName"].value params["camera"] = IECore.StringData( "/" + parent["cameraGroup"] + "/" + cameraName ) for nameAndAov in parent["aovs"].strip( " " ).split( " " ): tokens = nameAndAov.split( ":" ) if len( tokens ) != 2: raise RuntimeError( "Invalid bake aov specification: %s It should contain a : between name and data." ) ( aovName, aov ) = tokens aovFileName = fileName.replace( "<AOV>", aovName ) outGlobals["output:" + cameraName + "." + aov] = IECoreScene.Output( aovFileName, "exr", aov + " RGBA", params ) parent["__parent"]["in"]["globals"] = outGlobals """), "python") self["__camera"] = GafferScene.Camera() self["__camera"]["projection"].setValue("orthographic") self["__cameraTweaks"] = GafferScene.CameraTweaks() self["__cameraTweaks"]["in"].setInput(self["__camera"]["out"]) self["__cameraTweaks"]["tweaks"][ "projection"] = GafferScene.TweakPlug("projection", "uv_camera") self["__cameraTweaks"]["tweaks"][ "resolution"] = GafferScene.TweakPlug("resolution", imath.V2i(0)) self["__cameraTweaks"]["tweaks"][ "u_offset"] = GafferScene.TweakPlug("u_offset", 0.0) self["__cameraTweaks"]["tweaks"][ "v_offset"] = GafferScene.TweakPlug("v_offset", 0.0) self["__cameraTweaks"]["tweaks"]["mesh"] = GafferScene.TweakPlug( "mesh", "") self["__cameraTweaks"]["tweaks"]["uv_set"] = GafferScene.TweakPlug( "uv_set", "") self["__cameraTweaks"]["tweaks"][ "extend_edges"] = GafferScene.TweakPlug("extend_edges", False) self["__cameraTweaks"]["tweaks"]["offset"] = GafferScene.TweakPlug( "offset", 0.1) self["__cameraTweaks"]["tweaks"]["offset"]["value"].setInput( self["normalOffset"]) self["__cameraTweaksFilter"] = GafferScene.PathFilter() self["__cameraTweaksFilter"]["paths"].setValue( IECore.StringVectorData(['/camera'])) self["__cameraTweaks"]["filter"].setInput( self["__cameraTweaksFilter"]["out"]) self["__collectScenes"] = GafferScene.CollectScenes() self["__collectScenes"]["sourceRoot"].setValue("/camera") self["__collectScenes"]["rootNameVariable"].setValue( "collect:cameraName") self["__collectScenes"]["in"].setInput( self["__cameraTweaks"]["out"]) self["__group"] = GafferScene.Group() self["__group"]["in"][0].setInput(self["__collectScenes"]["out"]) self["__group"]["name"].setInput(self["cameraGroup"]) self["__parent"]["children"][0].setInput(self["__group"]["out"]) self["__collectSceneRootsExpression"] = Gaffer.Expression() self["__collectSceneRootsExpression"].setExpression( inspect.cleandoc(""" info = parent["__chunkedBakeInfo"] parent["__collectScenes"]["rootNames"] = IECore.StringVectorData( info.keys() ) """), "python") self["__cameraSetupExpression"] = Gaffer.Expression() self["__cameraSetupExpression"].setExpression( inspect.cleandoc(""" cameraName = context["collect:cameraName"] info = parent["__chunkedBakeInfo"] i = info[cameraName] udimOffset = i["udim"].value - 1001 parent["__cameraTweaks"]["tweaks"]["resolution"]["value"] = imath.V2i( i["resolution"].value ) parent["__cameraTweaks"]["tweaks"]["u_offset"]["value"] = -( udimOffset % 10 ) parent["__cameraTweaks"]["tweaks"]["v_offset"]["value"] = -( udimOffset / 10 ) parent["__cameraTweaks"]["tweaks"]["mesh"]["value"] = i["mesh"].value parent["__cameraTweaks"]["tweaks"]["uv_set"]["value"] = parent["uvSet"] if parent["uvSet"] != "uv" else "" """), "python") self["out"].setFlags(Gaffer.Plug.Flags.Serialisable, False) self["out"].setInput(self["__parent"]["out"])
def testMerging(self): allFilter = GafferScene.PathFilter() allFilter["paths"].setValue(IECore.StringVectorData(['/...'])) plane = GafferScene.Plane() plane["divisions"].setValue(imath.V2i(20, 20)) # Assign a basic gradient shader uvGradientCode = GafferOSL.OSLCode() uvGradientCode["out"].addChild( Gaffer.Color3fPlug("out", direction=Gaffer.Plug.Direction.Out)) uvGradientCode["code"].setValue('out = color( u, v, 0.5 );') shaderAssignment = GafferScene.ShaderAssignment() shaderAssignment["in"].setInput(plane["out"]) shaderAssignment["filter"].setInput(allFilter["out"]) shaderAssignment["shader"].setInput(uvGradientCode["out"]["out"]) # Set up a random id from 0 - 3 on each face randomCode = GafferOSL.OSLCode() randomCode["out"].addChild( Gaffer.IntPlug("randomId", direction=Gaffer.Plug.Direction.Out)) randomCode["code"].setValue( 'randomId = int(cellnoise( P * 100 ) * 4);') outInt = GafferOSL.OSLShader() outInt.loadShader("ObjectProcessing/OutInt") outInt["parameters"]["name"].setValue('randomId') outInt["parameters"]["value"].setInput(randomCode["out"]["randomId"]) outObject = GafferOSL.OSLShader() outObject.loadShader("ObjectProcessing/OutObject") outObject["parameters"]["in0"].setInput( outInt["out"]["primitiveVariable"]) oSLObject = GafferOSL.OSLObject() oSLObject["in"].setInput(shaderAssignment["out"]) oSLObject["filter"].setInput(allFilter["out"]) oSLObject["shader"].setInput(outObject["out"]) oSLObject["interpolation"].setValue(2) # Create 4 meshes by picking each of the 4 ids deleteContextVariables = Gaffer.DeleteContextVariables() deleteContextVariables.setup(GafferScene.ScenePlug()) deleteContextVariables["variables"].setValue('collect:rootName') deleteContextVariables["in"].setInput(oSLObject["out"]) pickCode = GafferOSL.OSLCode() pickCode["parameters"].addChild(Gaffer.IntPlug("targetId")) pickCode["out"].addChild( Gaffer.IntPlug("cull", direction=Gaffer.Plug.Direction.Out)) pickCode["code"].setValue( 'int randomId; getattribute( "randomId", randomId ); cull = randomId != targetId;' ) expression = Gaffer.Expression() pickCode.addChild(expression) expression.setExpression( 'parent.parameters.targetId = stoi( context( "collect:rootName", "0" ) );', "OSL") outInt1 = GafferOSL.OSLShader() outInt1.loadShader("ObjectProcessing/OutInt") outInt1["parameters"]["name"].setValue('deleteFaces') outInt1["parameters"]["value"].setInput(pickCode["out"]["cull"]) outObject1 = GafferOSL.OSLShader() outObject1.loadShader("ObjectProcessing/OutObject") outObject1["parameters"]["in0"].setInput( outInt1["out"]["primitiveVariable"]) oSLObject1 = GafferOSL.OSLObject() oSLObject1["in"].setInput(deleteContextVariables["out"]) oSLObject1["filter"].setInput(allFilter["out"]) oSLObject1["shader"].setInput(outObject1["out"]) oSLObject1["interpolation"].setValue(2) deleteFaces = GafferScene.DeleteFaces() deleteFaces["in"].setInput(oSLObject1["out"]) deleteFaces["filter"].setInput(allFilter["out"]) collectScenes = GafferScene.CollectScenes() collectScenes["in"].setInput(deleteFaces["out"]) collectScenes["rootNames"].setValue( IECore.StringVectorData(['0', '1', '2', '3'])) collectScenes["sourceRoot"].setValue('/plane') # First variant: bake everything, covering the whole 1001 UDIM customAttributes1 = GafferScene.CustomAttributes() customAttributes1["attributes"].addMember( 'bake:fileName', IECore.StringData( '${bakeDirectory}/complete/<AOV>/<AOV>.<UDIM>.exr')) customAttributes1["in"].setInput(collectScenes["out"]) # Second vaiant: bake just 2 of the 4 meshes, leaving lots of holes that will need filling pruneFilter = GafferScene.PathFilter() pruneFilter["paths"].setValue(IECore.StringVectorData(['/2', '/3'])) prune = GafferScene.Prune() prune["in"].setInput(collectScenes["out"]) prune["filter"].setInput(pruneFilter["out"]) customAttributes2 = GafferScene.CustomAttributes() customAttributes2["attributes"].addMember( 'bake:fileName', IECore.StringData( '${bakeDirectory}/incomplete/<AOV>/<AOV>.<UDIM>.exr')) customAttributes2["in"].setInput(prune["out"]) # Third variant: bake everything, but with one mesh at a higher resolution customAttributes3 = GafferScene.CustomAttributes() customAttributes3["attributes"].addMember( 'bake:fileName', IECore.StringData( '${bakeDirectory}/mismatch/<AOV>/<AOV>.<UDIM>.exr')) customAttributes3["in"].setInput(collectScenes["out"]) pathFilter2 = GafferScene.PathFilter() pathFilter2["paths"].setValue(IECore.StringVectorData(['/2'])) customAttributes = GafferScene.CustomAttributes() customAttributes["attributes"].addMember('bake:resolution', IECore.IntData(200)) customAttributes["filter"].setInput(pathFilter2["out"]) customAttributes["in"].setInput(customAttributes3["out"]) # Merge the 3 variants mergeGroup = GafferScene.Group() mergeGroup["in"][-1].setInput(customAttributes["out"]) mergeGroup["in"][-1].setInput(customAttributes1["out"]) mergeGroup["in"][-1].setInput(customAttributes2["out"]) arnoldTextureBake = GafferArnold.ArnoldTextureBake() arnoldTextureBake["in"].setInput(mergeGroup["out"]) arnoldTextureBake["filter"].setInput(allFilter["out"]) arnoldTextureBake["bakeDirectory"].setValue(self.temporaryDirectory() + '/bakeMerge/') arnoldTextureBake["defaultResolution"].setValue(128) # We want to check the intermediate results arnoldTextureBake["cleanupIntermediateFiles"].setValue(False) # Dispatch the bake script = Gaffer.ScriptNode() script.addChild(arnoldTextureBake) dispatcher = GafferDispatch.LocalDispatcher() dispatcher["jobsDirectory"].setValue(self.temporaryDirectory()) dispatcher.dispatch([arnoldTextureBake]) # Check results imageReader = GafferImage.ImageReader() outLayer = GafferOSL.OSLShader() outLayer.loadShader("ImageProcessing/OutLayer") outLayer["parameters"]["layerColor"].setInput( uvGradientCode["out"]["out"]) outImage = GafferOSL.OSLShader() outImage.loadShader("ImageProcessing/OutImage") outImage["parameters"]["in0"].setInput(outLayer["out"]["layer"]) oSLImage = GafferOSL.OSLImage() oSLImage["in"].setInput(imageReader["out"]) oSLImage["shader"].setInput(outImage["out"]) merge3 = GafferImage.Merge() merge3["in"]["in0"].setInput(oSLImage["out"]) merge3["in"]["in1"].setInput(imageReader["out"]) merge3["operation"].setValue(10) edgeDetect = self.SimpleEdgeDetect() edgeDetect["in"].setInput(imageReader["out"]) edgeStats = GafferImage.ImageStats() edgeStats["in"].setInput(edgeDetect["out"]) refDiffStats = GafferImage.ImageStats() refDiffStats["in"].setInput(merge3["out"]) oneLayerReader = GafferImage.ImageReader() grade = GafferImage.Grade() grade["in"].setInput(oneLayerReader["out"]) grade["channels"].setValue('[A]') grade["blackPoint"].setValue(imath.Color4f(0, 0, 0, 0.999899983)) copyChannels = GafferImage.CopyChannels() copyChannels["in"]["in0"].setInput(merge3["out"]) copyChannels["in"]["in1"].setInput(grade["out"]) copyChannels["channels"].setValue('[A]') premultiply = GafferImage.Premultiply() premultiply["in"].setInput(copyChannels["out"]) refDiffCoveredStats = GafferImage.ImageStats() refDiffCoveredStats["in"].setInput(premultiply["out"]) # We are testing 3 different cases: # complete : Should be an exact match. # incomplete : Expect some mild variance of slopes and some error, because we have to # reconstruct a lot of missing data. # mismatch : We should get a larger image, sized to the highest override on any mesh. # Match won't be as perfect, because we're combining source images at # different resolutions for name, expectedSize, maxEdge, maxRefDiff, maxMaskedDiff in [ ("complete", 128, 0.01, 0.000001, 0.000001), ("incomplete", 128, 0.05, 0.15, 0.000001), ("mismatch", 200, 0.01, 0.01, 0.01) ]: imageReader["fileName"].setValue(self.temporaryDirectory() + "/bakeMerge/" + name + "/beauty/beauty.1001.tx") oneLayerReader["fileName"].setValue(self.temporaryDirectory() + "/bakeMerge/" + name + "/beauty/beauty.1001.exr") self.assertEqual(imageReader["out"]["format"].getValue().width(), expectedSize) self.assertEqual(imageReader["out"]["format"].getValue().height(), expectedSize) edgeStats["area"].setValue( imath.Box2i(imath.V2i(1), imath.V2i(expectedSize - 1))) refDiffStats["area"].setValue( imath.Box2i(imath.V2i(1), imath.V2i(expectedSize - 1))) refDiffCoveredStats["area"].setValue( imath.Box2i(imath.V2i(0), imath.V2i(expectedSize))) # Blue channel is constant, so everything should line up perfectly self.assertEqual(0, edgeStats["max"].getValue()[2]) self.assertEqual(0, refDiffStats["max"].getValue()[2]) self.assertEqual(0, refDiffCoveredStats["max"].getValue()[2]) for i in range(2): # Make sure we've got actual data, by checking that we have some error ( we're not expecting # to perfectly reconstruct the gradient when the input is incomplete ) self.assertGreater(edgeStats["max"].getValue()[i], 0.005) if name == "incomplete": self.assertGreater(edgeStats["max"].getValue()[i], 0.03) self.assertGreater(refDiffStats["max"].getValue()[i], 0.06) self.assertLess(edgeStats["max"].getValue()[i], maxEdge) self.assertLess(refDiffStats["max"].getValue()[i], maxRefDiff) self.assertLess(refDiffCoveredStats["max"].getValue()[i], maxMaskedDiff)
def test(self): # Make a few input scenes script = Gaffer.ScriptNode() script["sphere"] = GafferScene.Sphere() script["sphere"]["sets"].setValue("spheres") script["cube"] = GafferScene.Cube() script["cube"]["sets"].setValue("cubes") script["group"] = GafferScene.Group() script["group"]["in"][0].setInput(script["sphere"]["out"]) script["group"]["in"][1].setInput(script["cube"]["out"]) script["switch"] = Gaffer.Switch() script["switch"].setup(GafferScene.ScenePlug()) script["switch"]["in"][0].setInput(script["sphere"]["out"]) script["switch"]["in"][1].setInput(script["cube"]["out"]) script["switch"]["in"][2].setInput(script["group"]["out"]) # Make an empty CollectScenes script["collect"] = GafferScene.CollectScenes() script["collect"]["in"].setInput(script["switch"]["out"]) self.assertSceneValid(script["collect"]["out"]) self.assertEqual(script["collect"]["out"].childNames("/"), IECore.InternedStringVectorData()) # Configure it to collect the input scenes script["collect"]["rootNames"].setValue( IECore.StringVectorData(["sphere", "cube", "group"])) script["expression"] = Gaffer.Expression() script["expression"].setExpression( inspect.cleandoc(""" scenes = parent["collect"]["rootNames"] parent["switch"]["index"] = scenes.index( context.get( "collect:rootName", "sphere" ) ) """)) # Check we get what we expect self.assertEqual( script["collect"]["out"].childNames("/"), IECore.InternedStringVectorData(["sphere", "cube", "group"])) self.assertSceneValid(script["collect"]["out"]) script["subTree"] = GafferScene.SubTree() script["subTree"]["in"].setInput(script["collect"]["out"]) script["subTree"]["root"].setValue("/sphere") self.assertScenesEqual(script["subTree"]["out"], script["sphere"]["out"]) script["subTree"]["root"].setValue("/cube") self.assertScenesEqual(script["subTree"]["out"], script["cube"]["out"]) script["subTree"]["root"].setValue("/group") self.assertScenesEqual(script["subTree"]["out"], script["group"]["out"]) # Check the sets too self.assertEqual(script["collect"]["out"]["setNames"].getValue(), IECore.InternedStringVectorData(["spheres", "cubes"])) self.assertEqual( script["collect"]["out"].set("spheres").value, IECore.PathMatcher(["/sphere/sphere", "/group/group/sphere"])) self.assertEqual( script["collect"]["out"].set("cubes").value, IECore.PathMatcher(["/cube/cube", "/group/group/cube"]))
def testInPlug(self): c = GafferScene.CollectScenes() self.assertIsInstance(c["in"], GafferScene.ScenePlug)
def testRoot(self): script = Gaffer.ScriptNode() script["sphere"] = GafferScene.Sphere() script["sphere"]["sets"].setValue("sphereSet") script["group"] = GafferScene.Group() script["group"]["in"][0].setInput(script["sphere"]["out"]) script["cube"] = GafferScene.Cube() script["cube"]["sets"].setValue("cubeSet") script["switch"] = Gaffer.Switch() script["switch"].setup(GafferScene.ScenePlug()) script["switch"]["in"][0].setInput(script["group"]["out"]) script["switch"]["in"][1].setInput(script["cube"]["out"]) script["collect"] = GafferScene.CollectScenes() script["collect"]["in"].setInput(script["switch"]["out"]) script["collect"]["rootNames"].setValue( IECore.StringVectorData(["0", "1", "2", "3"])) script["expression"] = Gaffer.Expression() script["expression"].setExpression( inspect.cleandoc(""" root = context.get( "collect:rootName", "0" ) parent["switch"]["index"] = int( root ) > 1 parent["collect"]["sourceRoot"] = { "0" : "", "1" : "/group", "2" : "/", "3" : "/cube" }[root] """)) self.assertEqual(script["collect"]["out"].childNames("/"), IECore.InternedStringVectorData(["0", "1", "2", "3"])) self.assertEqual(script["collect"]["out"].childNames("/0"), IECore.InternedStringVectorData(["group"])) self.assertEqual(script["collect"]["out"].childNames("/1"), IECore.InternedStringVectorData(["sphere"])) self.assertEqual(script["collect"]["out"].childNames("/2"), IECore.InternedStringVectorData(["cube"])) self.assertEqual(script["collect"]["out"].childNames("/3"), IECore.InternedStringVectorData()) self.assertEqual(script["collect"]["out"].object("/0"), IECore.NullObject()) self.assertEqual(script["collect"]["out"].object("/1"), IECore.NullObject()) self.assertEqual(script["collect"]["out"].object("/2"), IECore.NullObject()) self.assertEqual(script["collect"]["out"].object("/3"), script["cube"]["out"].object("/cube")) self.assertEqual(script["collect"]["out"].childNames("/0/group"), IECore.InternedStringVectorData(["sphere"])) self.assertEqual(script["collect"]["out"].childNames("/1/sphere"), IECore.InternedStringVectorData()) self.assertEqual(script["collect"]["out"].childNames("/2/cube"), IECore.InternedStringVectorData()) self.assertEqual(script["collect"]["out"].object("/0/group"), IECore.NullObject()) self.assertEqual(script["collect"]["out"].object("/1/sphere"), script["sphere"]["out"].object("/sphere")) self.assertEqual(script["collect"]["out"].object("/2/cube"), script["cube"]["out"].object("/cube")) self.assertEqual( script["collect"]["out"].childNames("/0/group/sphere"), IECore.InternedStringVectorData()) self.assertEqual(script["collect"]["out"].object("/0/group/sphere"), script["sphere"]["out"].object("/sphere")) self.assertEqual( script["collect"]["out"]["setNames"].getValue(), IECore.InternedStringVectorData(["sphereSet", "cubeSet"])) self.assertEqual( set(script["collect"]["out"].set("sphereSet").value.paths()), { "/0/group/sphere", "/1/sphere", }) self.assertEqual( set(script["collect"]["out"].set("cubeSet").value.paths()), { "/2/cube", "/3", })
def testParentContextVariable(self): # Parent a sphere at `/a` and a grid at `/b`. sphere = GafferScene.Sphere() sphere["transform"]["translate"]["x"].setValue(1) sphere["sets"].setValue("set1") grid = GafferScene.Grid() grid["transform"]["translate"]["x"].setValue(2) switch = Gaffer.NameSwitch() switch.setup(sphere["out"]) switch["selector"].setValue("${parent}") switch["in"].resize(3) switch["in"][1]["name"].setValue("/a") switch["in"][1]["value"].setInput(sphere["out"]) switch["in"][2]["name"].setValue("/b") switch["in"][2]["value"].setInput(grid["out"]) collect = GafferScene.CollectScenes() collect["rootNames"].setValue(IECore.StringVectorData(["a", "b"])) filter = GafferScene.PathFilter() filter["paths"].setValue(IECore.StringVectorData(["/a", "/b"])) parent = GafferScene.Parent() parent["in"].setInput(collect["out"]) parent["children"][0].setInput(switch["out"]["value"]) parent["filter"].setInput(filter["out"]) parent["parentVariable"].setValue("parent") # Check the scene is as we expect self.assertSceneValid(parent["out"]) self.assertEqual(parent["out"].childNames("/a"), IECore.InternedStringVectorData(["sphere"])) self.assertEqual(parent["out"].childNames("/a/sphere"), IECore.InternedStringVectorData()) self.assertEqual(parent["out"].childNames("/b"), IECore.InternedStringVectorData(["grid"])) self.assertEqual( parent["out"].childNames("/b/grid"), IECore.InternedStringVectorData( ["gridLines", "centerLines", "borderLines"])) self.assertScenesEqual(sphere["out"], parent["out"], scenePlug2PathPrefix="/a") self.assertPathHashesEqual( sphere["out"], "/sphere", parent["out"], "/a/sphere", ) self.assertScenesEqual( grid["out"], parent["out"], scenePlug2PathPrefix="/b", # Don't want to check sets, because the grid has no sets. checks=self.allSceneChecks - {"sets"}) for path in [ "/grid", "/grid/centerLines", "/grid/gridLines", "/grid/borderLines" ]: self.assertPathHashesEqual( grid["out"], path, parent["out"], "/b" + path, ) # Rename the parent variable. This should dirty all the output plugs and make the NameSwitch # output an empty scene. cs = GafferTest.CapturingSlot(parent.plugDirtiedSignal()) parent["parentVariable"].setValue("x") self.assertLessEqual( # Equivalent to `assertTrue( a.issubset( b ) )`, but with more informative errors { parent["out"][n] for n in [ "bound", "transform", "attributes", "object", "childNames", "set" ] }, { x[0] for x in cs } ) self.assertSceneValid(parent["out"]) self.assertEqual(parent["out"].childNames("/a"), IECore.InternedStringVectorData()) self.assertEqual(parent["out"].childNames("/b"), IECore.InternedStringVectorData())
def __init__( self, name = "__CameraSetup" ) : GafferScene.FilteredSceneProcessor.__init__( self, name ) # Public plugs self["cameraGroup"] = Gaffer.StringPlug( "cameraGroup", Gaffer.Plug.Direction.In, "__TEXTUREBAKE_CAMERAS" ) self["bakeDirectory"] = Gaffer.StringPlug( "bakeDirectory", Gaffer.Plug.Direction.In, "" ) self["defaultFileName"] = Gaffer.StringPlug( "defaultFileName", Gaffer.Plug.Direction.In, "${bakeDirectory}/<AOV>/<AOV>.<UDIM>.exr" ) self["defaultResolution"] = Gaffer.IntPlug( "defaultResolution", Gaffer.Plug.Direction.In, 512 ) self["uvSet"] = Gaffer.StringPlug( "uvSet", Gaffer.Plug.Direction.In, "uv" ) self["udims"] = Gaffer.StringPlug( "udims", Gaffer.Plug.Direction.In, "" ) self["normalOffset"] = Gaffer.FloatPlug( "normalOffset", Gaffer.Plug.Direction.In, 0.1 ) self["aovs"] = Gaffer.StringPlug( "aovs", Gaffer.Plug.Direction.In, "beauty:rgba" ) self["tasks"] = Gaffer.IntPlug( "tasks", Gaffer.Plug.Direction.In, 1 ) self["taskIndex"] = Gaffer.IntPlug( "taskIndex", Gaffer.Plug.Direction.In, 0 ) # Output self["renderFileList"] = Gaffer.StringVectorDataPlug( "renderFileList", Gaffer.Plug.Direction.Out, defaultValue = IECore.StringVectorData() ) self["renderFileList"].setFlags( Gaffer.Plug.Flags.Serialisable, False ) # Private internal network self["__udimQuery"] = GafferScene.UDIMQuery() self["__udimQuery"]["in"].setInput( self["in"] ) self["__udimQuery"]["uvSet"].setInput( self["uvSet"] ) self["__udimQuery"]["attributes"].setValue( "bake:resolution bake:fileName" ) self["__udimQuery"]["filter"].setInput( self["filter"] ) self["__chunkedBakeInfo"] = Gaffer.CompoundObjectPlug( "__chunkedBakeInfo", Gaffer.Plug.Direction.In, IECore.CompoundObject() ) self["__chunkedBakeInfo"].setFlags( Gaffer.Plug.Flags.Serialisable, False ) self["__chunkExpression"] = Gaffer.Expression() self["__chunkExpression"].setExpression( inspect.cleandoc( """ import collections import re rawInfo = parent["__udimQuery"]["out"] defaultFileName = parent["defaultFileName"] defaultResolution = parent["defaultResolution"] selectUdimsStr = parent["udims"] # FrameList really ought to take care of this check, instead of just doing # something obviously wrong if re.match( ".*[0-9] +[0-9].*", selectUdimsStr ): raise RuntimeError( "ArnoldTextureBake : Udim list must be comma separated." ) selectUdims = set( IECore.FrameList.parse( selectUdimsStr ).asList() ) allMeshes = collections.defaultdict( lambda : [] ) for udim, meshes in rawInfo.items(): if selectUdims and not int( udim ) in selectUdims: continue for mesh, extraAttributes in meshes.items(): resolution = defaultResolution if "bake:resolution" in extraAttributes: resolution = extraAttributes["bake:resolution"].value fileName = defaultFileName if "bake:fileName" in extraAttributes: fileName = extraAttributes["bake:fileName"].value allMeshes[ (fileName, udim) ].append( { "mesh" : mesh, "resolution" : resolution } ) fileList = sorted( allMeshes.keys() ) info = IECore.CompoundObject() numTasks = min( parent["tasks"], len( fileList ) ) taskIndex = parent["taskIndex"] if taskIndex < numTasks: chunkStart = ( taskIndex * len( fileList ) ) // numTasks chunkEnd = ( ( taskIndex + 1 ) * len( fileList ) ) // numTasks dupeCount = 0 prevFileName = "" for fileNameTemplate, udim in fileList[chunkStart:chunkEnd]: for meshData in allMeshes[(fileNameTemplate, udim)]: o = IECore.CompoundObject() o["mesh"] = IECore.StringData( meshData["mesh"] ) o["udim"] = IECore.IntData( int( udim ) ) o["resolution"] = IECore.IntData( meshData["resolution"] ) udimStr = str( udim ) fileName = fileNameTemplate.replace( "<UDIM>", udimStr ) if fileName == prevFileName: dupeCount += 1 fileName = fileName + ".layer" + str( dupeCount ) else: prevFileName = fileName dupeCount = 0 o["fileName"] = IECore.StringData( fileName ) name = o["mesh"].value.replace( "/", "_" ) + "." + udimStr info[ name ] = o parent["__chunkedBakeInfo"] = info fileList = [] for name, i in info.items(): fileName = i["fileName"].value for nameAndAov in parent["aovs"].strip( " " ).split( " " ): fileList.append( i["fileName"].value.replace( "<AOV>", nameAndAov.split(":")[0] ) ) parent["renderFileList"] = IECore.StringVectorData( fileList ) """ ), "python" ) self["__parent"] = GafferScene.Parent() self["__parent"]["parent"].setValue( "/" ) for c in ['bound', 'transform', 'attributes', 'object', 'childNames', 'setNames', 'set']: self["__parent"]["in"][c].setInput( self["in"][c] ) self["__outputExpression"] = Gaffer.Expression() self["__outputExpression"].setExpression( inspect.cleandoc( """ import IECoreScene # Transfer all input globals except for outputs inGlobals = parent["in"]["globals"] outGlobals = IECore.CompoundObject() for key, value in inGlobals.items(): if not key.startswith( "output:" ): outGlobals[key] = value # Make our own outputs info = parent["__chunkedBakeInfo"] for cameraName, i in info.items(): params = IECore.CompoundData() fileName = i["fileName"].value params["camera"] = IECore.StringData( "/" + parent["cameraGroup"] + "/" + cameraName ) for nameAndAov in parent["aovs"].strip( " " ).split( " " ): tokens = nameAndAov.split( ":" ) if len( tokens ) != 2: raise RuntimeError( "Invalid bake aov specification: %s It should contain a : between name and data." ) ( aovName, aov ) = tokens aovFileName = fileName.replace( "<AOV>", aovName ) outGlobals["output:" + cameraName + "." + aov] = IECoreScene.Output( aovFileName, "exr", aov + " RGBA", params ) parent["__parent"]["in"]["globals"] = outGlobals """ ), "python" ) self["__camera"] = GafferScene.Camera() self["__camera"]["projection"].setValue( "orthographic" ) self["__cameraTweaks"] = GafferScene.CameraTweaks() self["__cameraTweaks"]["in"].setInput( self["__camera"]["out"] ) self["__cameraTweaks"]["tweaks"]["projection"] = GafferScene.TweakPlug( "projection", "uv_camera" ) self["__cameraTweaks"]["tweaks"]["resolution"] = GafferScene.TweakPlug( "resolution", imath.V2i( 0 ) ) self["__cameraTweaks"]["tweaks"]["u_offset"] = GafferScene.TweakPlug( "u_offset", 0.0 ) self["__cameraTweaks"]["tweaks"]["v_offset"] = GafferScene.TweakPlug( "v_offset", 0.0 ) self["__cameraTweaks"]["tweaks"]["mesh"] = GafferScene.TweakPlug( "mesh", "" ) self["__cameraTweaks"]["tweaks"]["uv_set"] = GafferScene.TweakPlug( "uv_set", "" ) self["__cameraTweaks"]["tweaks"]["extend_edges"] = GafferScene.TweakPlug( "extend_edges", False ) self["__cameraTweaks"]["tweaks"]["offset"] = GafferScene.TweakPlug( "offset", 0.1 ) self["__cameraTweaks"]["tweaks"]["offset"]["value"].setInput( self["normalOffset"] ) self["__cameraTweaksFilter"] = GafferScene.PathFilter() self["__cameraTweaksFilter"]["paths"].setValue( IECore.StringVectorData( [ '/camera' ] ) ) self["__cameraTweaks"]["filter"].setInput( self["__cameraTweaksFilter"]["out"] ) self["__collectScenes"] = GafferScene.CollectScenes() self["__collectScenes"]["sourceRoot"].setValue( "/camera" ) self["__collectScenes"]["rootNameVariable"].setValue( "collect:cameraName" ) self["__collectScenes"]["in"].setInput( self["__cameraTweaks"]["out"] ) self["__group"] = GafferScene.Group() self["__group"]["in"][0].setInput( self["__collectScenes"]["out"] ) self["__group"]["name"].setInput( self["cameraGroup"] ) self["__parent"]["children"][0].setInput( self["__group"]["out"] ) self["__collectSceneRootsExpression"] = Gaffer.Expression() self["__collectSceneRootsExpression"].setExpression( inspect.cleandoc( """ info = parent["__chunkedBakeInfo"] parent["__collectScenes"]["rootNames"] = IECore.StringVectorData( info.keys() ) """ ), "python" ) self["__cameraSetupExpression"] = Gaffer.Expression() self["__cameraSetupExpression"].setExpression( inspect.cleandoc( """ cameraName = context["collect:cameraName"] info = parent["__chunkedBakeInfo"] i = info[cameraName] udimOffset = i["udim"].value - 1001 parent["__cameraTweaks"]["tweaks"]["resolution"]["value"] = imath.V2i( i["resolution"].value ) parent["__cameraTweaks"]["tweaks"]["u_offset"]["value"] = -( udimOffset % 10 ) parent["__cameraTweaks"]["tweaks"]["v_offset"]["value"] = -( udimOffset // 10 ) parent["__cameraTweaks"]["tweaks"]["mesh"]["value"] = i["mesh"].value parent["__cameraTweaks"]["tweaks"]["uv_set"]["value"] = parent["uvSet"] if parent["uvSet"] != "uv" else "" """ ), "python" ) self["out"].setFlags( Gaffer.Plug.Flags.Serialisable, False ) self["out"].setInput( self["__parent"]["out"] )