def testDefaultValueClashes(self): # export a reference where a promoted plug is not at # its default value. s = Gaffer.ScriptNode() s["b"] = Gaffer.Box() s["b"]["n"] = GafferTest.AddNode() p = s["b"].promotePlug(s["b"]["n"]["op1"]) p.setValue(10) s["b"].exportForReference("/tmp/test.grf") # reference it in to a new script, set the value back to # its default, and save the script. s2 = Gaffer.ScriptNode() s2["r"] = Gaffer.Reference() s2["r"].load("/tmp/test.grf") p2 = s2["r"].descendant(p.relativeName(s["b"])) self.assertEqual(p2.getValue(), 10) p2.setToDefault() self.assertEqual(p2.getValue(), p2.defaultValue()) s2["fileName"].setValue("/tmp/test.gfr") s2.save() # load the script, and check that the value is at the default. s3 = Gaffer.ScriptNode() s3["fileName"].setValue("/tmp/test.gfr") s3.load() p3 = s3["r"].descendant(p.relativeName(s["b"])) self.assertEqual(p3.getValue(), p3.defaultValue())
def testConnectNode( self ) : s = Gaffer.ScriptNode() s["add1"] = GafferTest.AddNode() s["add2"] = GafferTest.AddNode() ng = GafferUI.NodeGraph( s ) g = ng.graphGadget() # check we can connect to a top level plug g.getLayout().connectNode( g, s["add2"], Gaffer.StandardSet( [ s["add1"] ] ) ) self.assertTrue( s["add2"]["op1"].getInput().isSame( s["add1"]["sum"] ) ) # check we can connect to a nested plug, but only provided it is represented # in the node graph by a nodule for that exact plug. s["compound"] = GafferTest.CompoundPlugNode() g.getLayout().connectNode( g, s["compound"], Gaffer.StandardSet( [ s["add2"] ] ) ) self.assertEqual( s["compound"]["p"]["f"].getInput(), None ) GafferUI.Nodule.registerNodule( GafferTest.CompoundPlugNode, "p", GafferUI.CompoundNodule ) s["compound2"] = GafferTest.CompoundPlugNode() g.getLayout().connectNode( g, s["compound2"], Gaffer.StandardSet( [ s["add2"] ] ) ) self.assertTrue( s["compound2"]["p"]["f"].getInput().isSame( s["add2"]["sum"] ) ) # check we can connect from a nested plug, but only provided it is represented # in the node graph by a nodule for that exact plug. s["add3"] = GafferTest.AddNode() g.getLayout().connectNode( g, s["add3"], Gaffer.StandardSet( [ s["compound2"] ] ) ) self.assertEqual( s["add3"]["op1"].getInput(), None ) GafferUI.Nodule.registerNodule( GafferTest.CompoundPlugNode, "o", GafferUI.CompoundNodule ) s["compound3"] = GafferTest.CompoundPlugNode() g.getLayout().connectNode( g, s["add3"], Gaffer.StandardSet( [ s["compound3"] ] ) ) self.assertTrue( s["add3"]["op1"].getInput().isSame( s["compound3"]["o"]["f"] ) )
def __visitationGraph(self): # L1_1 L1_2 # | |\ # | | \ # | | \ # L2_1 L2_2 L2_3 # |\ | / # | \ | / # | \ | / # | \ |/ # L3_1 L3_2 s = Gaffer.ScriptNode() s["L1_1"] = GafferTest.MultiplyNode() s["L1_2"] = GafferTest.AddNode() s["L2_1"] = GafferTest.AddNode() s["L2_2"] = GafferTest.MultiplyNode() s["L2_3"] = GafferTest.AddNode() s["L3_1"] = GafferTest.AddNode() s["L3_2"] = GafferTest.MultiplyNode() s["L3_2"]["op3"] = Gaffer.IntPlug() s["L2_1"]["op1"].setInput(s["L1_1"]["product"]) s["L2_2"]["op1"].setInput(s["L1_2"]["sum"]) s["L2_3"]["op1"].setInput(s["L1_2"]["sum"]) s["L3_1"]["op1"].setInput(s["L2_1"]["sum"]) s["L3_2"]["op1"].setInput(s["L2_1"]["sum"]) s["L3_2"]["op2"].setInput(s["L2_2"]["product"]) s["L3_2"]["op3"].setInput(s["L2_3"]["sum"]) return s
def testUpstreamNodeGadgets( self ) : script = Gaffer.ScriptNode() # a -> b -> c -> e -> f # ^ # | # d script["a"] = GafferTest.AddNode() script["b"] = GafferTest.AddNode() script["c"] = GafferTest.AddNode() script["d"] = GafferTest.AddNode() script["e"] = GafferTest.AddNode() script["f"] = GafferTest.AddNode() script["b"]["op1"].setInput( script["a"]["sum"] ) script["c"]["op1"].setInput( script["b"]["sum"] ) script["c"]["op2"].setInput( script["d"]["sum"] ) script["e"]["op1"].setInput( script["c"]["sum"] ) script["f"]["op1"].setInput( script["e"]["sum"] ) g = GafferUI.GraphGadget( script ) u = [ x.node().relativeName( script ) for x in g.upstreamNodeGadgets( script["c"] ) ] self.assertEqual( len( u ), 3 ) self.assertEqual( set( u ), set( [ "a", "b", "d" ] ) ) u = [ x.node().relativeName( script ) for x in g.upstreamNodeGadgets( script["f"] ) ] self.assertEqual( len( u ), 5 ) self.assertEqual( set( u ), set( [ "a", "b", "d", "c", "e" ] ) ) # filtered nodes should be ignored g.setFilter( Gaffer.StandardSet( [ script["f"], script["e"], script["a"] ] ) ) u = [ x.node().relativeName( script ) for x in g.upstreamNodeGadgets( script["f"] ) ] self.assertEqual( u, [ "e" ] )
def testHash(self): n = GafferTest.MultiplyNode() self.assertHashesValid(n)
def testCacheReuse(self): # Send an image to the catalogue, and also # capture the display driver that we used to # send it. c = GafferImage.Catalogue() c["directory"].setValue( os.path.join(self.temporaryDirectory(), "catalogue")) drivers = GafferTest.CapturingSlot( GafferImage.Display.driverCreatedSignal()) r = GafferImage.ImageReader() r["fileName"].setValue( "${GAFFER_ROOT}/python/GafferImageTest/images/checker.exr") self.sendImage(r["out"], c) self.assertEqual(len(drivers), 1) # The image will have been saved to disk so it can persist between sessions, # and the Catalogue should have dropped any reference it has to the driver, # in order to save memory. self.assertEqual(len(c["images"]), 1) self.assertEqual( os.path.dirname(c["images"][0]["fileName"].getValue()), c["directory"].getValue()) self.assertEqual(drivers[0][0].refCount(), 1) # But we don't want the Catalogue to immediately reload the image from # disk, because for large images with many AOVs this is a huge overhead. # We want to temporarily reuse the cache entries that were created from # the data in the display driver. These should be identical to a regular # Display node containing the same driver. display = GafferImage.Display() display.setDriver(drivers[0][0]) self.assertEqual(display["out"].channelDataHash("R", imath.V2i(0)), c["out"].channelDataHash("R", imath.V2i(0))) self.assertTrue(display["out"].channelData( "R", imath.V2i(0), _copy=False).isSame(c["out"].channelData("R", imath.V2i(0), _copy=False))) # This applies to copies too c["images"].addChild( GafferImage.Catalogue.Image(flags=Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic)) self.assertEqual(len(c["images"]), 2) c["images"][1].copyFrom(c["images"][0]) c["imageIndex"].setValue(1) self.assertEqual(display["out"].channelDataHash("R", imath.V2i(0)), c["out"].channelDataHash("R", imath.V2i(0))) self.assertTrue(display["out"].channelData( "R", imath.V2i(0), _copy=False).isSame(c["out"].channelData("R", imath.V2i(0), _copy=False)))
def testManyContexts( self ) : GafferTest.testManyContexts()
def testConnectedNodeGadgets( self ) : script = Gaffer.ScriptNode() # a -> b -> c -> e -> f # | # v # d script["a"] = GafferTest.AddNode() script["b"] = GafferTest.AddNode() script["c"] = GafferTest.AddNode() script["d"] = GafferTest.AddNode() script["e"] = GafferTest.AddNode() script["f"] = GafferTest.AddNode() script["b"]["op1"].setInput( script["a"]["sum"] ) script["c"]["op1"].setInput( script["b"]["sum"] ) script["d"]["op1"].setInput( script["c"]["sum"] ) script["e"]["op1"].setInput( script["c"]["sum"] ) script["f"]["op1"].setInput( script["e"]["sum"] ) g = GafferUI.GraphGadget( script ) # test traversing in both directions u = [ x.node().relativeName( script ) for x in g.connectedNodeGadgets( script["b"] ) ] self.assertEqual( set( u ), set( [ "a", "c", "d", "e", "f" ] ) ) u = [ x.node().relativeName( script ) for x in g.connectedNodeGadgets( script["e"] ) ] self.assertEqual( set( u ), set( [ "a", "b", "c", "d", "f" ] ) ) u = [ x.node().relativeName( script ) for x in g.connectedNodeGadgets( script["c"], degreesOfSeparation = 1 ) ] self.assertEqual( set( u ), set( [ "b", "d", "e" ] ) ) # test traversing upstream u = [ x.node().relativeName( script ) for x in g.connectedNodeGadgets( script["c"], direction = Gaffer.Plug.Direction.In ) ] self.assertEqual( set( u ), set( [ "a", "b" ] ) ) u = [ x.node().relativeName( script ) for x in g.connectedNodeGadgets( script["c"], direction = Gaffer.Plug.Direction.In, degreesOfSeparation = 1 ) ] self.assertEqual( set( u ), set( [ "b" ] ) ) # test traversing downstream u = [ x.node().relativeName( script ) for x in g.connectedNodeGadgets( script["c"], direction = Gaffer.Plug.Direction.Out ) ] self.assertEqual( set( u ), set( [ "d", "e", "f" ] ) ) u = [ x.node().relativeName( script ) for x in g.connectedNodeGadgets( script["c"], direction = Gaffer.Plug.Direction.Out, degreesOfSeparation = 1 ) ] self.assertEqual( set( u ), set( [ "d", "e" ] ) ) # test that invisible nodes are ignored g.setFilter( Gaffer.StandardSet( [ script["f"], script["e"], script["c"] ] ) ) u = [ x.node().relativeName( script ) for x in g.connectedNodeGadgets( script["e"] ) ] self.assertEqual( set( u ), set( [ "f", "c" ] ) ) u = [ x.node().relativeName( script ) for x in g.connectedNodeGadgets( script["e"], direction = Gaffer.Plug.Direction.In ) ] self.assertEqual( set( u ), set( [ "c" ] ) ) u = [ x.node().relativeName( script ) for x in g.connectedNodeGadgets( script["e"], direction = Gaffer.Plug.Direction.Out ) ] self.assertEqual( set( u ), set( [ "f" ] ) )
def createWriter(text): node = GafferTest.TextWriter() node["mode"].setValue("a") node["fileName"].setValue(fileName) node["text"].setValue(text + " on ${frame};") return node
def testContentionForOneItemTaskParallel( self ) : GafferTest.testLRUCacheContentionForOneItem( "taskParallel" )
def testContentionForOneItemSerial( self ) : GafferTest.testLRUCacheContentionForOneItem( "serial" )
def test(self): # call through to c++ test. GafferTest.testDownstreamIterator()
def testAcquireOr( self ) : GafferTest.testTaskMutexAcquireOr()
def testWorkerRecursion( self ) : GafferTest.testTaskMutexWorkerRecursion()
def testHeavyContentionWithoutWorkAcceptance( self ) : GafferTest.testTaskMutexHeavyContention( False )
def testThreading(self): GafferTest.testComputeNodeThreading()
def testRecursionParallel( self ) : GafferTest.testLRUCacheRecursion( "parallel", numIterations = 100000, numValues = 10000, maxCost = 10000 )
class ArnoldTextureBakeTest( GafferSceneTest.SceneTestCase ) : class SimpleEdgeDetect( GafferImage.ImageProcessor ): def __init__( self, name = "SimpleEdgeDetect" ) : GafferImage.ImageProcessor.__init__( self, name ) self["HorizTransform"] = GafferImage.ImageTransform() self["HorizTransform"]["in"].setInput( self["in"] ) self["HorizTransform"]["transform"]["translate"].setValue( imath.V2f( 1, 0 ) ) self["HorizDiff"] = GafferImage.Merge() self["HorizDiff"]["in"]["in0"].setInput( self["HorizTransform"]["out"] ) self["HorizDiff"]["in"]["in1"].setInput( self["in"] ) self["HorizDiff"]["operation"].setValue( 10 ) self["VertTransform"] = GafferImage.ImageTransform() self["VertTransform"]["in"].setInput( self["in"] ) self["VertTransform"]["transform"]["translate"].setValue( imath.V2f( 0, 1 ) ) self["VertDiff"] = GafferImage.Merge() self["VertDiff"]["in"]["in0"].setInput( self["VertTransform"]["out"] ) self["VertDiff"]["in"]["in1"].setInput( self["in"] ) self["VertDiff"]["operation"].setValue( 10 ) self["Max"] = GafferImage.Merge() self["Max"]["in"]["in0"].setInput( self["HorizDiff"]["out"] ) self["Max"]["in"]["in1"].setInput( self["VertDiff"]["out"] ) self["Max"]["operation"].setValue( 13 ) self["out"].setInput( self["Max"]["out"] ) self["out"].setFlags( Gaffer.Plug.Flags.Serialisable, False ) def testManyImages( self ): allFilter = GafferScene.PathFilter() allFilter["paths"].setValue( IECore.StringVectorData( [ '/...' ] ) ) sphere = GafferScene.Sphere() sphere["transform"]["translate"].setValue( imath.V3f( -3, 0, 0 ) ) standardSurface = GafferArnold.ArnoldShader() standardSurface.loadShader( "standard_surface" ) shaderAssignment = GafferScene.ShaderAssignment() shaderAssignment["in"].setInput( sphere["out"] ) shaderAssignment["filter"].setInput( allFilter["out"] ) shaderAssignment["shader"].setInput( standardSurface["out"] ) uvScaleCode = GafferOSL.OSLCode() uvScaleCode["out"].addChild( Gaffer.V3fPlug( "uvScaled", direction = Gaffer.Plug.Direction.Out ) ) uvScaleCode["code"].setValue( 'uvScaled = vector( u * 2, v * 2, 0 );' ) outUV = GafferOSL.OSLShader() outUV.loadShader( "ObjectProcessing/OutUV" ) outUV["parameters"]["value"].setInput( uvScaleCode["out"]["uvScaled"] ) outObject2 = GafferOSL.OSLShader() outObject2.loadShader( "ObjectProcessing/OutObject" ) outObject2["parameters"]["in0"].setInput( outUV["out"]["primitiveVariable"] ) uvScaleOSL = GafferOSL.OSLObject() uvScaleOSL["in"].setInput( shaderAssignment["out"] ) uvScaleOSL["filter"].setInput( allFilter["out"] ) uvScaleOSL["shader"].setInput( outObject2["out"]["out"] ) uvScaleOSL["interpolation"].setValue( 5 ) mapOffset = GafferScene.MapOffset() mapOffset["in"].setInput( uvScaleOSL["out"] ) mapOffset["filter"].setInput( allFilter["out"] ) mapOffset["udim"].setValue( 1033 ) offsetGroup = GafferScene.Group() offsetGroup["in"]["in0"].setInput( mapOffset["out"] ) offsetGroup["name"].setValue( 'offset' ) offsetGroup["transform"]["translate"].setValue( imath.V3f( 6, 0, 3 ) ) combineGroup = GafferScene.Group() combineGroup["in"]["in0"].setInput( uvScaleOSL["out"] ) combineGroup["in"]["in1"].setInput( offsetGroup["out"] ) lights = [] for color, rotate in [ ( ( 1, 0, 0 ), ( 0, 0, 0) ), ( ( 0, 1, 0 ), ( 0, 90, 0 ) ), ( ( 0, 0, 1 ), ( -90, 0, 0 ) ) ] : light = GafferArnold.ArnoldLight() light.loadShader( "distant_light" ) light["parameters"]["color"].setValue( imath.Color3f( *color ) ) light["transform"]["rotate"].setValue( imath.V3f( *rotate ) ) combineGroup["in"][-1].setInput( light["out"] ) lights.append( light ) arnoldTextureBake = GafferArnold.ArnoldTextureBake() arnoldTextureBake["in"].setInput( combineGroup["out"] ) arnoldTextureBake["filter"].setInput( allFilter["out"] ) arnoldTextureBake["bakeDirectory"].setValue( self.temporaryDirectory() + '/bakeSpheres/' ) arnoldTextureBake["defaultResolution"].setValue( 32 ) arnoldTextureBake["aovs"].setValue( 'beauty:RGBA diffuse:diffuse' ) arnoldTextureBake["tasks"].setValue( 3 ) arnoldTextureBake["cleanupIntermediateFiles"].setValue( True ) # Dispatch the bake script = Gaffer.ScriptNode() script.addChild( arnoldTextureBake ) dispatcher = GafferDispatch.LocalDispatcher() dispatcher["jobsDirectory"].setValue( self.temporaryDirectory() ) dispatcher.dispatch( [ arnoldTextureBake ] ) # Test that we are writing all expected files, and that we have cleaned up all temp files expectedUdims = [ i + j for j in [ 1001, 1033 ] for i in [ 0, 1, 10, 11 ] ] self.assertEqual( sorted( os.listdir( self.temporaryDirectory() + '/bakeSpheres/' ) ), [ "beauty", "diffuse" ] ) self.assertEqual( sorted( os.listdir( self.temporaryDirectory() + '/bakeSpheres/beauty' ) ), [ "beauty.%i.tx"%i for i in expectedUdims ] ) self.assertEqual( sorted( os.listdir( self.temporaryDirectory() + '/bakeSpheres/diffuse' ) ), [ "diffuse.%i.tx"%i for i in expectedUdims ] ) # Read back in the 4 udim tiles of a sphere reader = GafferImage.ImageReader() imageTransform = GafferImage.ImageTransform() imageTransform["in"].setInput( reader["out"] ) exprBox = Gaffer.Box() expression = Gaffer.Expression() exprBox.addChild( reader ) exprBox.addChild( imageTransform ) exprBox.addChild( expression ) expression.setExpression( inspect.cleandoc( """ i = context.get( "loop:index", 0 ) layer = context.get( "collect:layerName", "beauty" ) x = i % 2 y = i // 2 parent["ImageReader"]["fileName"] = '""" + self.temporaryDirectory() + """/bakeSpheres/%s/%s.%i.tx' % ( layer, layer, 1001 + x + y * 10 ) parent["ImageTransform"]["transform"]["translate"] = imath.V2f( 32 * x, 32 * y ) """ ), "python" ) udimLoop = Gaffer.Loop() udimLoop.setup( GafferImage.ImagePlug() ) udimLoop["iterations"].setValue( 4 ) udimMerge = GafferImage.Merge() udimMerge["in"]["in0"].setInput( imageTransform["out"] ) udimMerge["in"]["in1"].setInput( udimLoop["previous"] ) udimLoop["next"].setInput( udimMerge["out"] ) aovCollect = GafferImage.CollectImages() aovCollect["in"].setInput( udimLoop["out"] ) aovCollect["rootLayers"].setValue( IECore.StringVectorData( [ 'beauty', 'diffuse' ] ) ) # We have a little reference image for how the diffuse should look imageReaderRef = GafferImage.ImageReader() imageReaderRef["fileName"].setValue( os.path.dirname( __file__ ) + "/images/sphereLightBake.exr" ) resizeRef = GafferImage.Resize() resizeRef["in"].setInput( imageReaderRef["out"] ) resizeRef["format"].setValue( GafferImage.Format( 64, 64, 1.000 ) ) shuffleRef = GafferImage.Shuffle() shuffleRef["in"].setInput( resizeRef["out"] ) for layer in [ "beauty", "diffuse" ]: for channel in [ "R", "G", "B" ]: shuffleRef["channels"].addChild( GafferImage.Shuffle.ChannelPlug() ) shuffleRef["channels"][-1]["in"].setValue( channel ) shuffleRef["channels"][-1]["out"].setValue( layer + "." + channel ) differenceMerge = GafferImage.Merge() differenceMerge["in"]["in0"].setInput( aovCollect["out"] ) differenceMerge["in"]["in1"].setInput( shuffleRef["out"] ) differenceMerge["operation"].setValue( GafferImage.Merge.Operation.Difference ) stats = GafferImage.ImageStats() stats["in"].setInput( differenceMerge["out"] ) stats["area"].setValue( imath.Box2i( imath.V2i( 0, 0 ), imath.V2i( 64, 64 ) ) ) # We should get a very close match to our single tile low res reference bake stats["channels"].setValue( IECore.StringVectorData( [ 'diffuse.R', 'diffuse.G', 'diffuse.B', 'diffuse.A' ] ) ) for i in range( 3 ): self.assertLess( stats["average"].getValue()[i], 0.002 ) self.assertLess( stats["max"].getValue()[i], 0.02 ) # The beauty should be mostly a close match, but with a high max difference due to the spec pings stats["channels"].setValue( IECore.StringVectorData( [ 'beauty.R', 'beauty.G', 'beauty.B', 'beauty.A' ] ) ) for i in range( 3 ): self.assertLess( stats["average"].getValue()[i], 0.1 ) self.assertGreater( stats["max"].getValue()[i], 0.3 ) def testTasks( self ): allFilter = GafferScene.PathFilter() allFilter["paths"].setValue( IECore.StringVectorData( [ '/...' ] ) ) sphere = GafferScene.Sphere() sphere["transform"]["translate"].setValue( imath.V3f( -3, 0, 0 ) ) uvScaleCode = GafferOSL.OSLCode() uvScaleCode["out"].addChild( Gaffer.V3fPlug( "uvScaled", direction = Gaffer.Plug.Direction.Out ) ) uvScaleCode["code"].setValue( 'uvScaled = vector( u * 2, v * 2, 0 );' ) outUV = GafferOSL.OSLShader() outUV.loadShader( "ObjectProcessing/OutUV" ) outUV["parameters"]["value"].setInput( uvScaleCode["out"]["uvScaled"] ) outObject2 = GafferOSL.OSLShader() outObject2.loadShader( "ObjectProcessing/OutObject" ) outObject2["parameters"]["in0"].setInput( outUV["out"]["primitiveVariable"] ) uvScaleOSL = GafferOSL.OSLObject() uvScaleOSL["in"].setInput( sphere["out"] ) uvScaleOSL["filter"].setInput( allFilter["out"] ) uvScaleOSL["shader"].setInput( outObject2["out"]["out"] ) uvScaleOSL["interpolation"].setValue( 5 ) mapOffset = GafferScene.MapOffset() mapOffset["in"].setInput( uvScaleOSL["out"] ) mapOffset["filter"].setInput( allFilter["out"] ) mapOffset["udim"].setValue( 1033 ) offsetGroup = GafferScene.Group() offsetGroup["in"]["in0"].setInput( mapOffset["out"] ) offsetGroup["name"].setValue( 'offset' ) offsetGroup["transform"]["translate"].setValue( imath.V3f( 6, 0, 3 ) ) combineGroup = GafferScene.Group() combineGroup["in"]["in0"].setInput( uvScaleOSL["out"] ) combineGroup["in"]["in1"].setInput( offsetGroup["out"] ) arnoldTextureBake = GafferArnold.ArnoldTextureBake() arnoldTextureBake["in"].setInput( combineGroup["out"] ) arnoldTextureBake["filter"].setInput( allFilter["out"] ) arnoldTextureBake["bakeDirectory"].setValue( self.temporaryDirectory() + '/bakeSpheres/' ) arnoldTextureBake["defaultResolution"].setValue( 1 ) arnoldTextureBake["aovs"].setValue( 'beauty:RGBA diffuse:diffuse' ) arnoldTextureBake["tasks"].setValue( 3 ) arnoldTextureBake["cleanupIntermediateFiles"].setValue( False ) # Dispatch the bake script = Gaffer.ScriptNode() script.addChild( arnoldTextureBake ) dispatcher = GafferDispatch.LocalDispatcher() dispatcher["jobsDirectory"].setValue( self.temporaryDirectory() ) dispatcher.dispatch( [ arnoldTextureBake ] ) self.assertEqual( sorted( os.listdir( self.temporaryDirectory() + '/bakeSpheres/' ) ), [ "BAKE_FILE_INDEX_0.0001.txt", "BAKE_FILE_INDEX_1.0001.txt", "BAKE_FILE_INDEX_2.0001.txt", "beauty", "diffuse" ] ) # Make sure the 16 images that need writing get divided into very approximate thirds for i in range( 3 ): l = len( open( self.temporaryDirectory() + '/bakeSpheres/BAKE_FILE_INDEX_%i.0001.txt'%i ).readlines() ) self.assertGreater( l, 2 ) self.assertLess( l, 8 ) @unittest.skipIf( GafferTest.inCI() or os.environ.get( "ARNOLD_LICENSE_ORDER" ) == "none", "Arnold license not available" ) def testMerging( self ): allFilter = GafferScene.PathFilter() allFilter["paths"].setValue( IECore.StringVectorData( [ '/...' ] ) ) plane = GafferScene.Plane() plane["divisions"].setValue( imath.V2i( 20, 20 ) ) # Assign a basic gradient shader uvGradientCode = GafferOSL.OSLCode() uvGradientCode["out"].addChild( Gaffer.Color3fPlug( "out", direction = Gaffer.Plug.Direction.Out ) ) uvGradientCode["code"].setValue( 'out = color( u, v, 0.5 );' ) shaderAssignment = GafferScene.ShaderAssignment() shaderAssignment["in"].setInput( plane["out"] ) shaderAssignment["filter"].setInput( allFilter["out"] ) shaderAssignment["shader"].setInput( uvGradientCode["out"]["out"] ) # Set up a random id from 0 - 3 on each face randomCode = GafferOSL.OSLCode() randomCode["out"].addChild( Gaffer.IntPlug( "randomId", direction = Gaffer.Plug.Direction.Out ) ) randomCode["code"].setValue( 'randomId = int(cellnoise( P * 100 ) * 4);' ) outInt = GafferOSL.OSLShader() outInt.loadShader( "ObjectProcessing/OutInt" ) outInt["parameters"]["name"].setValue( 'randomId' ) outInt["parameters"]["value"].setInput( randomCode["out"]["randomId"] ) outObject = GafferOSL.OSLShader() outObject.loadShader( "ObjectProcessing/OutObject" ) outObject["parameters"]["in0"].setInput( outInt["out"]["primitiveVariable"] ) oSLObject = GafferOSL.OSLObject() oSLObject["in"].setInput( shaderAssignment["out"] ) oSLObject["filter"].setInput( allFilter["out"] ) oSLObject["shader"].setInput( outObject["out"]["out"] ) oSLObject["interpolation"].setValue( 2 ) # Create 4 meshes by picking each of the 4 ids deleteContextVariables = Gaffer.DeleteContextVariables() deleteContextVariables.setup( GafferScene.ScenePlug() ) deleteContextVariables["variables"].setValue( 'collect:rootName' ) deleteContextVariables["in"].setInput( oSLObject["out"] ) pickCode = GafferOSL.OSLCode() pickCode["parameters"].addChild( Gaffer.IntPlug( "targetId" ) ) pickCode["out"].addChild( Gaffer.IntPlug( "cull", direction = Gaffer.Plug.Direction.Out ) ) pickCode["code"].setValue( 'int randomId; getattribute( "randomId", randomId ); cull = randomId != targetId;' ) expression = Gaffer.Expression() pickCode.addChild( expression ) expression.setExpression( 'parent.parameters.targetId = stoi( context( "collect:rootName", "0" ) );', "OSL" ) outInt1 = GafferOSL.OSLShader() outInt1.loadShader( "ObjectProcessing/OutInt" ) outInt1["parameters"]["name"].setValue( 'deleteFaces' ) outInt1["parameters"]["value"].setInput( pickCode["out"]["cull"] ) outObject1 = GafferOSL.OSLShader() outObject1.loadShader( "ObjectProcessing/OutObject" ) outObject1["parameters"]["in0"].setInput( outInt1["out"]["primitiveVariable"] ) oSLObject1 = GafferOSL.OSLObject() oSLObject1["in"].setInput( deleteContextVariables["out"] ) oSLObject1["filter"].setInput( allFilter["out"] ) oSLObject1["shader"].setInput( outObject1["out"]["out"] ) oSLObject1["interpolation"].setValue( 2 ) deleteFaces = GafferScene.DeleteFaces() deleteFaces["in"].setInput( oSLObject1["out"] ) deleteFaces["filter"].setInput( allFilter["out"] ) collectScenes = GafferScene.CollectScenes() collectScenes["in"].setInput( deleteFaces["out"] ) collectScenes["rootNames"].setValue( IECore.StringVectorData( [ '0', '1', '2', '3' ] ) ) collectScenes["sourceRoot"].setValue( '/plane' ) # First variant: bake everything, covering the whole 1001 UDIM customAttributes1 = GafferScene.CustomAttributes() customAttributes1["attributes"].addChild( Gaffer.NameValuePlug( 'bake:fileName', IECore.StringData( '${bakeDirectory}/complete/<AOV>/<AOV>.<UDIM>.exr' ) ) ) customAttributes1["in"].setInput( collectScenes["out"] ) # Second vaiant: bake just 2 of the 4 meshes, leaving lots of holes that will need filling pruneFilter = GafferScene.PathFilter() pruneFilter["paths"].setValue( IECore.StringVectorData( [ '/2', '/3' ] ) ) prune = GafferScene.Prune() prune["in"].setInput( collectScenes["out"] ) prune["filter"].setInput( pruneFilter["out"] ) customAttributes2 = GafferScene.CustomAttributes() customAttributes2["attributes"].addChild( Gaffer.NameValuePlug( 'bake:fileName', IECore.StringData( '${bakeDirectory}/incomplete/<AOV>/<AOV>.<UDIM>.exr' ) ) ) customAttributes2["in"].setInput( prune["out"] ) # Third variant: bake everything, but with one mesh at a higher resolution customAttributes3 = GafferScene.CustomAttributes() customAttributes3["attributes"].addChild( Gaffer.NameValuePlug( 'bake:fileName', IECore.StringData( '${bakeDirectory}/mismatch/<AOV>/<AOV>.<UDIM>.exr' ) ) ) customAttributes3["in"].setInput( collectScenes["out"] ) pathFilter2 = GafferScene.PathFilter() pathFilter2["paths"].setValue( IECore.StringVectorData( [ '/2' ] ) ) customAttributes = GafferScene.CustomAttributes() customAttributes["attributes"].addChild( Gaffer.NameValuePlug( 'bake:resolution', IECore.IntData( 200 ) ) ) customAttributes["filter"].setInput( pathFilter2["out"] ) customAttributes["in"].setInput( customAttributes3["out"] ) # Merge the 3 variants mergeGroup = GafferScene.Group() mergeGroup["in"][-1].setInput( customAttributes["out"] ) mergeGroup["in"][-1].setInput( customAttributes1["out"] ) mergeGroup["in"][-1].setInput( customAttributes2["out"] ) arnoldTextureBake = GafferArnold.ArnoldTextureBake() arnoldTextureBake["in"].setInput( mergeGroup["out"] ) arnoldTextureBake["filter"].setInput( allFilter["out"] ) arnoldTextureBake["bakeDirectory"].setValue( self.temporaryDirectory() + '/bakeMerge/' ) arnoldTextureBake["defaultResolution"].setValue( 128 ) # We want to check the intermediate results arnoldTextureBake["cleanupIntermediateFiles"].setValue( False ) # Dispatch the bake script = Gaffer.ScriptNode() script.addChild( arnoldTextureBake ) dispatcher = GafferDispatch.LocalDispatcher() dispatcher["jobsDirectory"].setValue( self.temporaryDirectory() ) dispatcher.dispatch( [ arnoldTextureBake ] ) # Check results imageReader = GafferImage.ImageReader() outLayer = GafferOSL.OSLShader() outLayer.loadShader( "ImageProcessing/OutLayer" ) outLayer["parameters"]["layerColor"].setInput( uvGradientCode["out"]["out"] ) outImage = GafferOSL.OSLShader() outImage.loadShader( "ImageProcessing/OutImage" ) outImage["parameters"]["in0"].setInput( outLayer["out"]["layer"] ) oSLImage = GafferOSL.OSLImage() oSLImage["in"].setInput( imageReader["out"] ) oSLImage["shader"].setInput( outImage["out"]["out"] ) merge3 = GafferImage.Merge() merge3["in"]["in0"].setInput( oSLImage["out"] ) merge3["in"]["in1"].setInput( imageReader["out"] ) merge3["operation"].setValue( 10 ) edgeDetect = self.SimpleEdgeDetect() edgeDetect["in"].setInput( imageReader["out"] ) edgeStats = GafferImage.ImageStats() edgeStats["in"].setInput( edgeDetect["out"] ) refDiffStats = GafferImage.ImageStats() refDiffStats["in"].setInput( merge3["out"] ) oneLayerReader = GafferImage.ImageReader() grade = GafferImage.Grade() grade["in"].setInput( oneLayerReader["out"] ) grade["channels"].setValue( '[A]' ) grade["blackPoint"].setValue( imath.Color4f( 0, 0, 0, 0.999899983 ) ) copyChannels = GafferImage.CopyChannels() copyChannels["in"]["in0"].setInput( merge3["out"] ) copyChannels["in"]["in1"].setInput( grade["out"] ) copyChannels["channels"].setValue( '[A]' ) premultiply = GafferImage.Premultiply() premultiply["in"].setInput( copyChannels["out"] ) refDiffCoveredStats = GafferImage.ImageStats() refDiffCoveredStats["in"].setInput( premultiply["out"] ) # We are testing 3 different cases: # complete : Should be an exact match. # incomplete : Expect some mild variance of slopes and some error, because we have to # reconstruct a lot of missing data. # mismatch : We should get a larger image, sized to the highest override on any mesh. # Match won't be as perfect, because we're combining source images at # different resolutions for name, expectedSize, maxEdge, maxRefDiff, maxMaskedDiff in [ ( "complete", 128, 0.01, 0.000001, 0.000001 ), ( "incomplete", 128, 0.05, 0.15, 0.000001 ), ( "mismatch", 200, 0.01, 0.01, 0.01 ) ]: imageReader["fileName"].setValue( self.temporaryDirectory() + "/bakeMerge/" + name + "/beauty/beauty.1001.tx" ) oneLayerReader["fileName"].setValue( self.temporaryDirectory() + "/bakeMerge/" + name + "/beauty/beauty.1001.exr" ) self.assertEqual( imageReader["out"]["format"].getValue().width(), expectedSize ) self.assertEqual( imageReader["out"]["format"].getValue().height(), expectedSize ) edgeStats["area"].setValue( imath.Box2i( imath.V2i( 1 ), imath.V2i( expectedSize - 1 ) ) ) refDiffStats["area"].setValue( imath.Box2i( imath.V2i( 1 ), imath.V2i( expectedSize - 1 ) ) ) refDiffCoveredStats["area"].setValue( imath.Box2i( imath.V2i( 0 ), imath.V2i( expectedSize ) ) ) # Blue channel is constant, so everything should line up perfectly self.assertEqual( 0, edgeStats["max"].getValue()[2] ) self.assertEqual( 0, refDiffStats["max"].getValue()[2] ) self.assertEqual( 0, refDiffCoveredStats["max"].getValue()[2] ) for i in range(2): # Make sure we've got actual data, by checking that we have some error ( we're not expecting # to perfectly reconstruct the gradient when the input is incomplete ) self.assertGreater( edgeStats["max"].getValue()[i], 0.005 ) if name == "incomplete": self.assertGreater( edgeStats["max"].getValue()[i], 0.03 ) self.assertGreater( refDiffStats["max"].getValue()[i], 0.06 ) self.assertLess( edgeStats["max"].getValue()[i], maxEdge ) self.assertLess( refDiffStats["max"].getValue()[i], maxRefDiff ) self.assertLess( refDiffCoveredStats["max"].getValue()[i], maxMaskedDiff )
def testRecursionWithEvictionsSerial( self ) : GafferTest.testLRUCacheRecursion( "serial", numIterations = 100000, numValues = 1000, maxCost = 100 )
def testManyEnvironmentSubstitutions( self ) : GafferTest.testManyEnvironmentSubstitutions()
def testRecursionWithEvictionsTaskParallel( self ) : GafferTest.testLRUCacheRecursion( "taskParallel", numIterations = 100000, numValues = 1000, maxCost = 100 )
def testDispatchThroughSubgraphs(self): dispatcher = Gaffer.Dispatcher.create("testDispatcher") dispatcher["framesMode"].setValue( Gaffer.Dispatcher.FramesMode.CustomRange) frameList = IECore.FrameList.parse("2-6x2") dispatcher["frameRange"].setValue(str(frameList)) fileName = "/tmp/dispatcherTest/result.txt" s = Gaffer.ScriptNode() s["n1"] = GafferTest.TextWriter() s["n1"]["mode"].setValue("a") s["n1"]["fileName"].setValue(fileName) s["n1"]["text"].setValue("n1 on ${frame};") s["b"] = Gaffer.Box() s["b"]["n2"] = GafferTest.TextWriter() s["b"]["n2"]["mode"].setValue("a") s["b"]["n2"]["fileName"].setValue(fileName) s["b"]["n2"]["text"].setValue("n2 on ${frame};") s["b"]["n3"] = GafferTest.TextWriter(requiresSequenceExecution=True) s["b"]["n3"]["mode"].setValue("a") s["b"]["n3"]["fileName"].setValue(fileName) s["b"]["n3"]["text"].setValue("n3 on ${frame};") s["n4"] = GafferTest.TextWriter() s["n4"]["mode"].setValue("a") s["n4"]["fileName"].setValue(fileName) s["n4"]["text"].setValue("n4 on ${frame};") s["b"].promotePlug(s["b"]["n3"]["requirements"]["requirement0"]) s["b"]["requirements_requirement0"].setInput(s["n1"]['requirement']) s["b"]["n3"]["requirements"][1].setInput(s["b"]["n2"]['requirement']) s["b"].promotePlug(s["b"]["n3"]['requirement']) s["n4"]['requirements'][0].setInput(s["b"]['requirement']) # export a reference too s["b"].exportForReference("/tmp/dispatcherTest/test.grf") s["r"] = Gaffer.Reference() s["r"].load("/tmp/dispatcherTest/test.grf") s["r"]["requirements_requirement0"].setInput(s["n1"]['requirement']) # dispatch an Executable that requires a Box self.assertEqual(os.path.isfile(fileName), False) dispatcher.dispatch([s["n4"]]) shutil.rmtree(dispatcher.jobDirectory()) self.assertEqual(os.path.isfile(fileName), True) with file(fileName, "r") as f: text = f.read() # all frames of n1 and n2 interleaved, followed by the n3 sequence, followed by n4 on all frames expectedText = "n1 on 2;n2 on 2;n1 on 4;n2 on 4;n1 on 6;n2 on 6;n3 on 2;n3 on 4;n3 on 6;n4 on 2;n4 on 4;n4 on 6;" self.assertEqual(text, expectedText) # dispatch the box directly os.remove(fileName) self.assertEqual(os.path.isfile(fileName), False) dispatcher.dispatch([s["b"]]) shutil.rmtree(dispatcher.jobDirectory()) self.assertEqual(os.path.isfile(fileName), True) with file(fileName, "r") as f: text = f.read() # all frames of n1 and n2 interleaved, followed by the n3 sequence expectedText = "n1 on 2;n2 on 2;n1 on 4;n2 on 4;n1 on 6;n2 on 6;n3 on 2;n3 on 4;n3 on 6;" self.assertEqual(text, expectedText) # only the promoted requirement dispatches s["b"]["n3"]["requirements"][1].setInput(None) os.remove(fileName) self.assertEqual(os.path.isfile(fileName), False) dispatcher.dispatch([s["b"]]) shutil.rmtree(dispatcher.jobDirectory()) self.assertEqual(os.path.isfile(fileName), True) with file(fileName, "r") as f: text = f.read() # all frames of n1, followed by the n3 sequence expectedText = "n1 on 2;n1 on 4;n1 on 6;n3 on 2;n3 on 4;n3 on 6;" self.assertEqual(text, expectedText) # promoting a requirement doesn't dispatch unless it's connected s["b"]["out2"] = s["b"]["n2"]['requirement'].createCounterpart( "out2", Gaffer.Plug.Direction.Out) os.remove(fileName) self.assertEqual(os.path.isfile(fileName), False) dispatcher.dispatch([s["b"]]) shutil.rmtree(dispatcher.jobDirectory()) self.assertEqual(os.path.isfile(fileName), True) with file(fileName, "r") as f: text = f.read() # all frames of n1, followed by the n3 sequence expectedText = "n1 on 2;n1 on 4;n1 on 6;n3 on 2;n3 on 4;n3 on 6;" self.assertEqual(text, expectedText) # multiple promoted requirements will dispatch s["b"]["out3"] = s["b"]["n2"]['requirement'].createCounterpart( "out3", Gaffer.Plug.Direction.Out) s["b"]["out3"].setInput(s["b"]["n2"]["requirement"]) os.remove(fileName) self.assertEqual(os.path.isfile(fileName), False) dispatcher.dispatch([s["b"]]) shutil.rmtree(dispatcher.jobDirectory()) self.assertEqual(os.path.isfile(fileName), True) with file(fileName, "r") as f: text = f.read() # all frames of n1, followed by the n3 sequence, followed by all frames of n2 expectedText = "n1 on 2;n1 on 4;n1 on 6;n3 on 2;n3 on 4;n3 on 6;n2 on 2;n2 on 4;n2 on 6;" self.assertEqual(text, expectedText) # dispatch an Executable that requires a Reference os.remove(fileName) s["n4"]['requirements'][0].setInput(s["r"]['requirement']) self.assertEqual(os.path.isfile(fileName), False) dispatcher.dispatch([s["n4"]]) shutil.rmtree(dispatcher.jobDirectory()) self.assertEqual(os.path.isfile(fileName), True) with file(fileName, "r") as f: text = f.read() # all frames of n1, n2, n3, and n4 interleaved # note that n3 is now interleaved because TextWriter isn't serializing # the requiresSequenceExecution value, so s['r']['n3'] is now parallel. expectedText = "n1 on 2;n2 on 2;n3 on 2;n4 on 2;n1 on 4;n2 on 4;n3 on 4;n4 on 4;n1 on 6;n2 on 6;n3 on 6;n4 on 6;" self.assertEqual(text, expectedText) # dispatch the Reference directly os.remove(fileName) self.assertEqual(os.path.isfile(fileName), False) dispatcher.dispatch([s["r"]]) shutil.rmtree(dispatcher.jobDirectory()) self.assertEqual(os.path.isfile(fileName), True) with file(fileName, "r") as f: text = f.read() # all frames of n1, n2, and n3 interleaved # note that n3 is now interleaved because TextWriter isn't serializing # the requiresSequenceExecution value, so s['r']['n3'] is now parallel. expectedText = "n1 on 2;n2 on 2;n3 on 2;n1 on 4;n2 on 4;n3 on 4;n1 on 6;n2 on 6;n3 on 6;" self.assertEqual(text, expectedText)
def testRecursionOnOneItemSerial( self ) : GafferTest.testLRUCacheRecursionOnOneItem( "serial" )
def testEncapsulateDeformationBlur( self ) : s = Gaffer.ScriptNode() # Make a sphere where the red channel has the value of the current frame. s["sphere"] = GafferScene.Sphere() s["sphereFilter"] = GafferScene.PathFilter() s["sphereFilter"]["paths"].setValue( IECore.StringVectorData( [ "/sphere" ] ) ) s["frame"] = GafferTest.FrameNode() s["flat"] = GafferArnold.ArnoldShader() s["flat"].loadShader( "flat" ) s["flat"]["parameters"]["color"].setValue( imath.Color3f( 0 ) ) s["flat"]["parameters"]["color"]["r"].setInput( s["frame"]["output"] ) s["assignment"] = GafferScene.ShaderAssignment() s["assignment"]["in"].setInput( s["sphere"]["out"] ) s["assignment"]["shader"].setInput( s["flat"]["out"] ) s["assignment"]["filter"].setInput( s["sphereFilter"]["out"] ) # Put the sphere in a capsule. s["group"] = GafferScene.Group() s["group"]["in"][0].setInput( s["assignment"]["out"] ) s["groupFilter"] = GafferScene.PathFilter() s["groupFilter"]["paths"].setValue( IECore.StringVectorData( [ "/group" ] ) ) s["encapsulate"] = GafferScene.Encapsulate() s["encapsulate"]["in"].setInput( s["group"]["out"] ) s["encapsulate"]["filter"].setInput( s["groupFilter"]["out"] ) # Do a render at frame 1, with deformation blur off. s["outputs"] = GafferScene.Outputs() s["outputs"].addOutput( "beauty", IECoreScene.Output( os.path.join( self.temporaryDirectory(), "deformationBlurOff.exr" ), "exr", "rgba", { } ) ) s["outputs"]["in"].setInput( s["encapsulate"]["out"] ) s["options"] = GafferScene.StandardOptions() s["options"]["in"].setInput( s["outputs"]["out"] ) s["arnoldOptions"] = GafferArnold.ArnoldOptions() s["arnoldOptions"]["in"].setInput( s["options"]["out"] ) s["arnoldOptions"]["options"]["aaSamples"]["enabled"].setValue( True ) s["arnoldOptions"]["options"]["aaSamples"]["value"].setValue( 6 ) s["render"] = GafferArnold.ArnoldRender() s["render"]["in"].setInput( s["arnoldOptions"]["out"] ) s["render"]["task"].execute() # Do another render at frame 1, but with deformation blur on. s["options"]["options"]["deformationBlur"]["enabled"].setValue( True ) s["options"]["options"]["deformationBlur"]["value"].setValue( True ) s["options"]["options"]["shutter"]["enabled"].setValue( True ) s["options"]["options"]["shutter"]["value"].setValue( imath.V2f( -0.5, 0.5 ) ) s["outputs"]["outputs"][0]["fileName"].setValue( os.path.join( self.temporaryDirectory(), "deformationBlurOn.exr" ) ) s["render"]["task"].execute() # Check that the renders are the same. s["deformationOff"] = GafferImage.ImageReader() s["deformationOff"]["fileName"].setValue( os.path.join( self.temporaryDirectory(), "deformationBlurOff.exr" ) ) s["deformationOn"] = GafferImage.ImageReader() s["deformationOn"]["fileName"].setValue( os.path.join( self.temporaryDirectory(), "deformationBlurOn.exr" ) ) # The `maxDifference` is huge to account for noise and watermarks, but is still low enough to check what # we want, since if the Encapsulate was sampled at shutter open and not the frame, the difference would be # 0.5. self.assertImagesEqual( s["deformationOff"]["out"], s["deformationOn"]["out"], maxDifference = 0.25, ignoreMetadata = True )
def testRecursionOnOneItemTaskParallel( self ) : GafferTest.testLRUCacheRecursionOnOneItem( "taskParallel" )
def testClearFromGetSerial( self ) : GafferTest.testLRUCacheClearFromGet( "serial" )
def testHeavyContentionWithWorkAcceptance( self ) : GafferTest.testTaskMutexHeavyContention( True )
def testClearFromGetTaskParallel( self ) : GafferTest.testLRUCacheClearFromGet( "taskParallel" )
def testPlugNotSet(self): n = GafferTest.BadNode() self.assertRaises(RuntimeError, n["out3"].getValue)
def testExceptionsSerial( self ) : GafferTest.testLRUCacheExceptions( "serial" )
def testHashForPythonDerivedClasses(self): n = GafferTest.AddNode() self.assertHashesValid(n)
def testExceptionsParallel( self ) : GafferTest.testLRUCacheExceptions( "parallel" )
def testThreading( self ) : GafferTest.testMetadataThreading()
def testExceptionsTaskParallel( self ) : GafferTest.testLRUCacheExceptions( "taskParallel" )
import Gaffer import GafferArnold import GafferImage import GafferImageUI import GafferScene import GafferSceneTest import GafferTest import GafferUI import GafferUITest import GafferImageTest from Qt import QtCore @unittest.skipIf(GafferTest.inCI(), "Performance not relevant on CI platform") class InteractiveArnoldRenderPerformanceTest(GafferUITest.TestCase): # Arnold outputs licensing warnings that would cause failures failureMessageLevel = IECore.MessageHandler.Level.Error def runInteractive(self, useUI, useBlur, resolution): script = Gaffer.ScriptNode() script["Camera"] = GafferScene.Camera() script["Camera"]["transform"]["translate"]["z"].setValue(6) script["Sphere"] = GafferScene.Sphere("Sphere") script["Sphere"]["radius"].setValue(10)
def test90PercentOfWorkingSetParallel( self ) : GafferTest.testLRUCache( "parallel", numIterations = 100000, numValues = 100, maxCost = 90 )
def testManySubstitutions( self ) : GafferTest.testManySubstitutions()
def test2PercentOfWorkingSetSerial( self ) : GafferTest.testLRUCache( "serial", numIterations = 100000, numValues = 100, maxCost = 2 )
def testEditableScope( self ) : GafferTest.testEditableScope()
def test2PercentOfWorkingSetTaskParallel( self ) : GafferTest.testLRUCache( "taskParallel", numIterations = 10000, numValues = 100, maxCost = 2 )
def testRemovalCallbackSerial( self ) : GafferTest.testLRUCacheRemovalCallback( "serial" )
def testUndoMerging(self): s = Gaffer.ScriptNode() s["n"] = Gaffer.Node() s["n"]["p"] = Gaffer.IntPlug() self.assertEqual(s["n"]["p"].getValue(), 0) self.assertFalse(s.undoAvailable()) cs = GafferTest.CapturingSlot(s["n"].plugSetSignal()) with Gaffer.UndoScope(s, mergeGroup="test"): s["n"]["p"].setValue(1) self.assertEqual(len(cs), 1) self.assertEqual(s["n"]["p"].getValue(), 1) self.assertTrue(s.undoAvailable()) with Gaffer.UndoScope(s, mergeGroup="test"): s["n"]["p"].setValue(2) self.assertEqual(len(cs), 2) self.assertEqual(s["n"]["p"].getValue(), 2) self.assertTrue(s.undoAvailable()) with Gaffer.UndoScope(s, mergeGroup="test2"): s["n"]["p"].setValue(3) self.assertEqual(len(cs), 3) self.assertEqual(s["n"]["p"].getValue(), 3) self.assertTrue(s.undoAvailable()) s.undo() self.assertEqual(len(cs), 4) self.assertEqual(s["n"]["p"].getValue(), 2) self.assertTrue(s.undoAvailable()) s.undo() self.assertEqual(len(cs), 5) self.assertEqual(s["n"]["p"].getValue(), 0) self.assertFalse(s.undoAvailable()) s.redo() self.assertEqual(len(cs), 6) self.assertEqual(s["n"]["p"].getValue(), 2) self.assertTrue(s.undoAvailable()) s.undo() self.assertEqual(len(cs), 7) self.assertEqual(s["n"]["p"].getValue(), 0) self.assertFalse(s.undoAvailable()) s.redo() s.redo() self.assertEqual(len(cs), 9) self.assertEqual(s["n"]["p"].getValue(), 3) self.assertTrue(s.undoAvailable()) s.undo() s.undo() self.assertEqual(len(cs), 11) self.assertEqual(s["n"]["p"].getValue(), 0) self.assertFalse(s.undoAvailable())
def testRemovalCallbackTaskParallel( self ) : GafferTest.testLRUCacheRemovalCallback( "taskParallel" )
def testUnsavedChanges(self): s = Gaffer.ScriptNode() self.assertEqual(s["unsavedChanges"].getValue(), False) # the unsaved changes flag only reacts to undoable changes # so this shouldn't set the flag s["nonUndoableNode"] = GafferTest.AddNode() self.assertEqual(s["unsavedChanges"].getValue(), False) # but this should. with Gaffer.UndoScope(s): s["node"] = GafferTest.AddNode() self.assertEqual(s["unsavedChanges"].getValue(), True) s["fileName"].setValue(self.temporaryDirectory() + "/test.gfr") s.save() self.assertEqual(s["unsavedChanges"].getValue(), False) with Gaffer.UndoScope(s): s["node"]["op1"].setValue(10) self.assertEqual(s["unsavedChanges"].getValue(), True) s.save() self.assertEqual(s["unsavedChanges"].getValue(), False) with Gaffer.UndoScope(s): s["node"]["op1"].setValue(20) self.assertEqual(s["unsavedChanges"].getValue(), True) s.save() self.assertEqual(s["unsavedChanges"].getValue(), False) s.undo() self.assertEqual(s["unsavedChanges"].getValue(), True) s.save() self.assertEqual(s["unsavedChanges"].getValue(), False) s.redo() self.assertEqual(s["unsavedChanges"].getValue(), True) s.save() self.assertEqual(s["unsavedChanges"].getValue(), False) with Gaffer.UndoScope(s): s["node2"] = GafferTest.AddNode() self.assertEqual(s["unsavedChanges"].getValue(), True) s.save() self.assertEqual(s["unsavedChanges"].getValue(), False) with Gaffer.UndoScope(s): s["node2"]["op1"].setInput(s["node"]["sum"]) self.assertEqual(s["unsavedChanges"].getValue(), True) s.save() self.assertEqual(s["unsavedChanges"].getValue(), False) s.load() self.assertEqual(s["unsavedChanges"].getValue(), False)
def testClearAndGetSerial( self ) : GafferTest.testLRUCache( "serial", numIterations = 100000, numValues = 1000, maxCost = 90, clearFrequency = 20 )
def testAffects( self ) : f = GafferScene.PathFilter() cs = GafferTest.CapturingSlot( f.plugDirtiedSignal() ) f["paths"].setValue( IECore.StringVectorData( [ "/a" ] ) ) self.assertTrue( f["out"] in [ x[0] for x in cs ] )
def testClearAndGetTaskParallel( self ) : GafferTest.testLRUCache( "taskParallel", numIterations = 10000, numValues = 1000, maxCost = 90, clearFrequency = 20 )
def testDependencyNode(self): s = Gaffer.ScriptNode() # Make a reference, and check it's a DependencyNode s["r"] = Gaffer.Reference() self.assertTrue(isinstance(s["r"], Gaffer.DependencyNode)) self.assertTrue(s["r"].isInstanceOf( Gaffer.DependencyNode.staticTypeId())) self.assertTrue(isinstance(s["r"], Gaffer.SubGraph)) self.assertTrue(s["r"].isInstanceOf(Gaffer.SubGraph.staticTypeId())) # create a box with a promoted output: s["b"] = Gaffer.Box() s["b"]["n"] = GafferTest.AddNode() s["b"].promotePlug(s["b"]["n"]["sum"]) s["b"].exportForReference(self.temporaryDirectory() + "/test.grf") # load onto reference: s["r"].load(self.temporaryDirectory() + "/test.grf") self.assertEqual(s["r"].correspondingInput(s["r"]["sum"]), None) self.assertEqual(s["r"].enabledPlug(), None) # Wire it up to support enabledPlug() and correspondingInput() s["b"].promotePlug(s["b"]["n"]["op1"]) s["b"]["n"]["op2"].setValue(10) s["b"].exportForReference(self.temporaryDirectory() + "/test.grf") # reload reference and test: s["r"].load(self.temporaryDirectory() + "/test.grf") self.assertEqual(s["r"].correspondingInput(s["r"]["sum"]), None) self.assertEqual(s["r"].enabledPlug(), None) # add an enabled plug: s["b"]["enabled"] = Gaffer.BoolPlug(flags=Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic) s["b"].exportForReference(self.temporaryDirectory() + "/test.grf") # reload reference and test that's now visible via enabledPlug(): s["r"].load(self.temporaryDirectory() + "/test.grf") self.assertEqual(s["r"].correspondingInput(s["r"]["sum"]), None) self.assertTrue(s["r"].enabledPlug().isSame(s["r"]["enabled"])) # hook up the enabled plug inside the box: s["b"]["n"]["enabled"].setInput(s["b"]["enabled"]) s["b"].exportForReference(self.temporaryDirectory() + "/test.grf") # reload reference and test that's now visible via enabledPlug(): s["r"].load(self.temporaryDirectory() + "/test.grf") self.assertTrue(s["r"].enabledPlug().isSame(s["r"]["enabled"])) self.assertTrue(s["r"].correspondingInput(s["r"]["sum"]).isSame( s["r"]["op1"])) # Connect it into a network, delete it, and check that we get nice auto-reconnect behaviour s["a"] = GafferTest.AddNode() s["r"]["op1"].setInput(s["a"]["sum"]) s["c"] = GafferTest.AddNode() s["c"]["op1"].setInput(s["r"]["sum"]) s.deleteNodes(filter=Gaffer.StandardSet([s["r"]])) self.assertTrue(s["c"]["op1"].getInput().isSame(s["a"]["sum"]))
def testSubstitutionsFromExpressionInput(self): s = Gaffer.ScriptNode() # Should output a substituted version of the input. s["substitionsOn"] = GafferTest.StringInOutNode() # Should pass through the input directly, without substitutions. s["substitionsOff"] = GafferTest.StringInOutNode( substitutions=Gaffer.Context.Substitutions.NoSubstitutions) # The third case is trickier. The "in" plug on the node # itself requests no substitutions, but it receives its # input via an indirect connection with substitutions # turned on. We resolve this by defining substitutions # to occur only when observing a value inside a compute, # and to always be determined by the plug used to access # the value. A chain of connections can be thought of as # carrying an unsubstituted string all the way along # internally, with each plug along the way determining # the substitutions applied when peeking in to see the value # at that point. # # In practice this works best because typically it is only # nodes that know when a substitution is relevant, and the # user shouldn't be burdened with the job of thinking about # them when making intermediate connections to that node. s["substitionsOnIndirectly"] = GafferTest.StringInOutNode( substitutions=Gaffer.Context.Substitutions.NoSubstitutions) s["substitionsOnIndirectly"]["user"]["in"] = Gaffer.StringPlug() s["substitionsOnIndirectly"]["in"].setInput( s["substitionsOnIndirectly"]["user"]["in"]) # All three nodes above receive their input from this expression # which outputs a sequence value to be substituted (or not). s["e"] = Gaffer.Expression() s["e"].setExpression( inspect.cleandoc(""" parent["substitionsOn"]["in"] = "test.#.exr" parent["substitionsOff"]["in"] = "test.#.exr" parent["substitionsOnIndirectly"]["user"]["in"] = "test.#.exr" """)) with Gaffer.Context() as c: # Frame 1 ######### c.setFrame(1) # The output of the expression itself is not substituted. # Substitutions occur only on input plugs. self.assertEqual(s["substitionsOn"]["in"].getInput().getValue(), "test.#.exr") self.assertEqual(s["substitionsOff"]["in"].getInput().getValue(), "test.#.exr") self.assertEqual( s["substitionsOnIndirectly"]["user"] ["in"].getInput().getValue(), "test.#.exr") # We should get frame numbers out of the substituting node. self.assertEqual(s["substitionsOn"]["out"].getValue(), "test.1.exr") substitutionsOnHash1 = s["substitionsOn"]["out"].hash() self.assertEqual( s["substitionsOn"]["out"].getValue( _precomputedHash=substitutionsOnHash1), "test.1.exr") # We should get sequences out of the non-substituting node. self.assertEqual(s["substitionsOff"]["out"].getValue(), "test.#.exr") substitutionsOffHash1 = s["substitionsOff"]["out"].hash() self.assertEqual( s["substitionsOff"]["out"].getValue( _precomputedHash=substitutionsOffHash1), "test.#.exr") self.assertNotEqual(substitutionsOnHash1, substitutionsOffHash1) # We shouldn't get frame numbers out of the third node, because the # requirements of the node (no substitutions) trump any upstream opinions. # Substitutions are performed by the plug during value access, and do not # affect the actual data flow. self.assertEqual(s["substitionsOnIndirectly"]["out"].getValue(), "test.#.exr") substitionsOnIndirectlyHash1 = s["substitionsOnIndirectly"][ "out"].hash() self.assertEqual( s["substitionsOnIndirectly"]["out"].getValue( _precomputedHash=substitionsOnIndirectlyHash1), "test.#.exr") # Frame 2 ######### c.setFrame(2) # The output of the expression itself is not substituted. # Substitutions occur only on input plugs. self.assertEqual(s["substitionsOn"]["in"].getInput().getValue(), "test.#.exr") self.assertEqual(s["substitionsOff"]["in"].getInput().getValue(), "test.#.exr") self.assertEqual( s["substitionsOnIndirectly"]["user"] ["in"].getInput().getValue(), "test.#.exr") # We should get frame numbers out of the substituting node. # The hash must has changed to make this possible. self.assertEqual(s["substitionsOn"]["out"].getValue(), "test.2.exr") substitutionsOnHash2 = s["substitionsOn"]["out"].hash() self.assertEqual( s["substitionsOn"]["out"].getValue( _precomputedHash=substitutionsOnHash2), "test.2.exr") self.assertNotEqual(substitutionsOnHash2, substitutionsOnHash1) # We should still get sequences out of the non-substituting node, # and it should have the same output hash as it had on frame 1. self.assertEqual(s["substitionsOff"]["out"].getValue(), "test.#.exr") substitutionsOffHash2 = s["substitionsOff"]["out"].hash() self.assertEqual( s["substitionsOff"]["out"].getValue( _precomputedHash=substitutionsOffHash2), "test.#.exr") self.assertEqual(substitutionsOffHash1, substitutionsOffHash2) self.assertNotEqual(substitutionsOnHash2, substitutionsOffHash2) # The third node should still be non-substituting. self.assertEqual(s["substitionsOnIndirectly"]["out"].getValue(), "test.#.exr") substitionsOnIndirectlyHash2 = s["substitionsOnIndirectly"][ "out"].hash() self.assertEqual( s["substitionsOnIndirectly"]["out"].getValue( _precomputedHash=substitionsOnIndirectlyHash2), "test.#.exr") self.assertEqual(substitionsOnIndirectlyHash2, substitionsOnIndirectlyHash1)
def testMixedImmediateAndBackground( self ) : preCs = GafferTest.CapturingSlot( GafferDispatch.LocalDispatcher.preDispatchSignal() ) self.assertEqual( len( preCs ), 0 ) dispatchCs = GafferTest.CapturingSlot( GafferDispatch.LocalDispatcher.dispatchSignal() ) self.assertEqual( len( dispatchCs ), 0 ) postCs = GafferTest.CapturingSlot( GafferDispatch.LocalDispatcher.postDispatchSignal() ) self.assertEqual( len( postCs ), 0 ) fileName = self.temporaryDirectory() + "/result.txt" def createWriter( text ) : node = GafferDispatchTest.TextWriter() node["mode"].setValue( "a" ) node["fileName"].setValue( fileName ) node["text"].setValue( text + " on ${frame};" ) return node s = Gaffer.ScriptNode() # Create a tree of dependencies for execution: # n1 requires: # - n2 requires: # -n2a # -n2b # - n3 s = Gaffer.ScriptNode() s["n1"] = createWriter( "n1" ) s["n2"] = createWriter( "n2" ) # force the entire n2 tree to execute in the foreground s["n2"]["dispatcher"]["immediate"].setValue( True ) s["n2a"] = createWriter( "n2a" ) s["n2b"] = createWriter( "n2b" ) s["n3"] = createWriter( "n3" ) s["n1"]["preTasks"][0].setInput( s["n2"]["task"] ) s["n1"]["preTasks"][1].setInput( s["n3"]["task"] ) s["n2"]["preTasks"][0].setInput( s["n2a"]["task"] ) s["n2"]["preTasks"][1].setInput( s["n2b"]["task"] ) dispatcher = self.__createLocalDispatcher() dispatcher["executeInBackground"].setValue( True ) dispatcher["framesMode"].setValue( GafferDispatch.Dispatcher.FramesMode.CustomRange ) frameList = IECore.FrameList.parse( "2-6x2" ) dispatcher["frameRange"].setValue( str(frameList) ) dispatcher.dispatch( [ s["n1"] ] ) # the dispatching started and finished self.assertEqual( len( preCs ), 1 ) self.assertEqual( len( dispatchCs ), 1 ) self.assertEqual( len( postCs ), 1 ) # all the foreground execution has finished self.assertEqual( os.path.isfile( fileName ), True ) with file( fileName, "r" ) as f : text = f.read() expectedText = "" for frame in frameList.asList() : context = Gaffer.Context( s.context() ) context.setFrame( frame ) expectedText += context.substitute( "n2a on ${frame};n2b on ${frame};n2 on ${frame};" ) self.assertEqual( text, expectedText ) # wait long enough for background execution to finish self.assertEqual( len(dispatcher.jobPool().jobs()), 1 ) dispatcher.jobPool().waitForAll() self.assertEqual( len(dispatcher.jobPool().jobs()), 0 ) self.assertEqual( os.path.isfile( fileName ), True ) with file( fileName, "r" ) as f : text = f.read() # don't reset the expectedText since we're still appending for frame in frameList.asList() : context = Gaffer.Context( s.context() ) context.setFrame( frame ) expectedText += context.substitute( "n3 on ${frame};n1 on ${frame};" ) self.assertEqual( text, expectedText )
import unittest import imath import inspect import IECore import IECoreScene import IECoreGL import Gaffer import GafferTest import GafferImage import GafferScene import GafferSceneTest @unittest.skipIf(GafferTest.inCI(), "OpenGL not set up") class OpenGLShaderTest(GafferSceneTest.SceneTestCase): def test(self): s = GafferScene.OpenGLShader() s.loadShader("Texture") self.assertEqual(len(s["parameters"]), 3) self.assertTrue(isinstance(s["parameters"]["mult"], Gaffer.FloatPlug)) self.assertTrue(isinstance(s["parameters"]["tint"], Gaffer.Color4fPlug)) self.assertTrue( isinstance(s["parameters"]["texture"], GafferImage.ImagePlug)) s["parameters"]["mult"].setValue(0.5) s["parameters"]["tint"].setValue(imath.Color4f(1, 0.5, 0.25, 1))
def testJoiningOuterTasks( self ) : GafferTest.testTaskMutexJoiningOuterTasks()
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import os import unittest import Gaffer import GafferTest import GafferScene import GafferSceneTest import GafferOSL import GafferDelight @unittest.skipIf(GafferTest.inCI(), "No license available in cloud") class InteractiveDelightRenderTest(GafferSceneTest.InteractiveRenderTest): # Temporarily disable this test (which is implemented in the # base class) because it fails. The issue is that we're automatically # instancing the geometry for the two lights, and that appears to # trigger a bug in 3delight where the sampling goes awry. @unittest.skip("Awaiting feedback from 3delight developers") def testAddLight(self): pass # Disable this test for now as we don't have light linking support in # 3Delight, yet. @unittest.skip("No light linking support just yet") def testLightLinking(self):
def testThreading( self ) : GafferTest.testComputeNodeThreading()
def testCreate( self ) : s = Gaffer.ScriptNode() s["n1"] = GafferTest.AddNode() s["n2"] = GafferTest.AddNode() s["n3"] = GafferTest.AddNode() s["n4"] = GafferTest.AddNode() s["n2"]["op1"].setInput( s["n1"]["sum"] ) s["n2"]["op2"].setInput( s["n1"]["sum"] ) s["n3"]["op1"].setInput( s["n2"]["sum"] ) s["n4"]["op1"].setInput( s["n3"]["sum"] ) s["n4"]["op2"].setInput( s["n3"]["sum"] ) def assertPreConditions() : self.assertTrue( "Box" not in s ) self.assertTrue( s["n2"]["op1"].getInput().isSame( s["n1"]["sum"] ) ) self.assertTrue( s["n2"]["op2"].getInput().isSame( s["n1"]["sum"] ) ) self.assertTrue( s["n3"]["op1"].getInput().isSame( s["n2"]["sum"] ) ) self.assertTrue( s["n4"]["op1"].getInput().isSame( s["n3"]["sum"] ) ) self.assertTrue( s["n4"]["op2"].getInput().isSame( s["n3"]["sum"] ) ) assertPreConditions() with Gaffer.UndoContext( s ) : b = Gaffer.Box.create( s, Gaffer.StandardSet( [ s["n2"], s["n3"] ] ) ) def assertPostConditions() : self.assertTrue( isinstance( b, Gaffer.Box ) ) self.assertTrue( b.parent().isSame( s ) ) self.assertTrue( "n2" not in s ) self.assertTrue( "n3" not in s ) self.assertTrue( "n2" in b ) self.assertTrue( "n3" in b ) self.assertTrue( b["n3"]["op1"].getInput().isSame( b["n2"]["sum"] ) ) self.assertTrue( b["n2"]["op1"].getInput().node().isSame( b ) ) self.assertTrue( b["n2"]["op2"].getInput().node().isSame( b ) ) self.assertTrue( b["n2"]["op1"].getInput().getInput().isSame( s["n1"]["sum"] ) ) self.assertTrue( b["n2"]["op2"].getInput().getInput().isSame( s["n1"]["sum"] ) ) self.assertTrue( b["n2"]["op1"].getInput().isSame( b["n2"]["op2"].getInput() ) ) self.assertTrue( s["n4"]["op1"].getInput().node().isSame( b ) ) self.assertTrue( s["n4"]["op2"].getInput().node().isSame( b ) ) self.assertTrue( s["n4"]["op1"].getInput().isSame( s["n4"]["op2"].getInput() ) ) assertPostConditions() s.undo() assertPreConditions() s.redo() assertPostConditions()
def testWrongPlugSet(self): n = GafferTest.BadNode() self.assertRaises(RuntimeError, n["out1"].getValue)