def testOnePixelBlur( self ) : constant = GafferImage.Constant() constant["color"].setValue( IECore.Color4f( 1 ) ) crop = GafferImage.Crop() crop["in"].setInput( constant["out"] ) crop["area"].setValue( IECore.Box2i( IECore.V2i( 10 ), IECore.V2i( 11 ) ) ) crop["affectDisplayWindow"].setValue( False ) blur = GafferImage.Blur() blur["in"].setInput( crop["out"] ) blur["radius"].setValue( IECore.V2f( 1 ) ) blur["expandDataWindow"].setValue( True ) sampler = GafferImage.Sampler( blur["out"], "R", IECore.Box2i( IECore.V2i( 0 ), IECore.V2i( 20 ) ) ) # Centre is brightest self.assertGreater( sampler.sample( 10, 10 ), sampler.sample( 11, 10 ) ) # Corners are least bright self.assertGreater( sampler.sample( 11, 10 ), sampler.sample( 11, 11 ) ) self.assertGreater( sampler.sample( 11, 11 ), 0 ) # Shape is symmetrical self.assertEqual( sampler.sample( 11, 10 ), sampler.sample( 9, 10 ) ) self.assertEqual( sampler.sample( 10, 11 ), sampler.sample( 10, 9 ) ) self.assertEqual( sampler.sample( 10, 11 ), sampler.sample( 10, 9 ) ) self.assertEqual( sampler.sample( 9, 9 ), sampler.sample( 11, 9 ) ) self.assertEqual( sampler.sample( 9, 9 ), sampler.sample( 11, 11 ) ) self.assertEqual( sampler.sample( 9, 9 ), sampler.sample( 9, 11 ) )
def testDestroyWhileProcessing(self): s = Gaffer.ScriptNode() s["c"] = GafferImage.Constant() s["c"]["format"].setValue(GafferImage.Format(2000, 2000)) s["b"] = GafferImage.Blur() s["b"]["in"].setInput(s["c"]["out"]) s["b"]["radius"].setValue(imath.V2f(400)) g = GafferImageUI.ImageGadget() g.setImage(s["b"]["out"]) with GafferUI.Window() as w: GafferUI.GadgetWidget(g) w.setVisible(True) # If this computer doesn't support floating point textures, the ImageGadget will warn about # this the first time it tries to render. Don't fail because of this with IECore.CapturingMessageHandler() as mh: self.waitForIdle(1000) if len(mh.messages): self.assertEqual(len(mh.messages), 1) self.assertEqual(mh.messages[0].context, "ImageGadget") self.assertEqual( mh.messages[0].message, "Could not find supported floating point texture format in OpenGL. GPU image viewer path will be low quality, recommend switching to CPU display transform, or resolving graphics driver issue." ) del g, w del s
def testEnergyPreservation(self): constant = GafferImage.Constant() constant["color"].setValue(IECore.Color4f(1)) crop = GafferImage.Crop() crop["in"].setInput(constant["out"]) crop["area"].setValue(IECore.Box2i(IECore.V2i(10), IECore.V2i(11))) crop["affectDisplayWindow"].setValue(False) blur = GafferImage.Blur() blur["in"].setInput(crop["out"]) blur["expandDataWindow"].setValue(True) stats = GafferImage.ImageStats() stats["in"].setInput(blur["out"]) stats["regionOfInterest"].setValue( IECore.Box2i(IECore.V2i(5), IECore.V2i(15))) for i in range(0, 10): blur["radius"].setValue(IECore.V2f(i * 0.5)) self.assertAlmostEqual(stats["average"]["r"].getValue(), 1 / 100., delta=0.0001)
def testBlurRange( self ): constant = GafferImage.Constant() constant["format"].setValue( GafferImage.Format( 5, 5, 1.000 ) ) constant["color"].setValue( IECore.Color4f( 1, 1, 1, 1 ) ) cropDot = GafferImage.Crop() cropDot["area"].setValue( IECore.Box2i( IECore.V2i( 2, 2 ), IECore.V2i( 3, 3 ) ) ) cropDot["affectDisplayWindow"].setValue( False ) cropDot["in"].setInput( constant["out"] ) blur = GafferImage.Blur() blur["expandDataWindow"].setValue( True ) blur["in"].setInput( cropDot["out"] ) blur["radius"]["y"].setInput( blur["radius"]["x"] ) expression = Gaffer.Expression() blur.addChild( expression ) expression.setExpression( 'parent["radius"]["x"] = context[ "loop:index" ] * 0.2', "python" ) loopInit = GafferImage.Constant() loopInit["format"].setValue( GafferImage.Format( 5, 5, 1.000 ) ) imageLoop = GafferImage.ImageLoop() imageLoop["in"].setInput( loopInit["out"] ) merge = GafferImage.Merge() merge["in"].addChild( GafferImage.ImagePlug( "in2", flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic, ) ) merge["in"]["in0"].setInput( blur["out"] ) merge["in"]["in1"].setInput( imageLoop["previous"] ) offset = GafferImage.Offset() offset["offset"].setValue( IECore.V2i( -5, 0 ) ) offset["in"].setInput( merge["out"] ) imageLoop["next"].setInput( offset["out"] ) deleteChannels = GafferImage.DeleteChannels() deleteChannels["mode"].setValue( GafferImage.DeleteChannels.Mode.Keep ) deleteChannels["channels"].setValue( IECore.StringVectorData( [ 'R' ] ) ) deleteChannels["in"].setInput( imageLoop["out"] ) finalCrop = GafferImage.Crop() finalCrop["areaSource"].setValue( 1 ) finalCrop["in"].setInput( deleteChannels["out"] ) # Enable to write out images for visual comparison if False: testWriter = GafferImage.ImageWriter() testWriter["in"].setInput( finalCrop["out"] ) testWriter["fileName"].setValue( "/tmp/blurRange.exr" ) testWriter["openexr"]["dataType"].setValue( 'float' ) testWriter["task"].execute() expectedReader = GafferImage.ImageReader() expectedReader["fileName"].setValue( os.path.dirname( __file__ ) + "/images/blurRange.exr" ) self.assertImagesEqual( finalCrop["out"], expectedReader["out"], maxDifference = 0.00001, ignoreMetadata = True )
def testPassThrough( self ) : c = GafferImage.Constant() b = GafferImage.Blur() b["in"].setInput( c["out"] ) b["radius"].setValue( IECore.V2f( 0 ) ) self.assertEqual( c["out"].imageHash(), b["out"].imageHash() ) self.assertEqual( c["out"].image(), b["out"].image() )
def testPassThrough(self): c = GafferImage.Constant() b = GafferImage.Blur() b["in"].setInput(c["out"]) b["radius"].setValue(imath.V2f(0)) self.assertImageHashesEqual(c["out"], b["out"]) self.assertImagesEqual(c["out"], b["out"])
def testExpandDataWindow( self ) : c = GafferImage.Constant() b = GafferImage.Blur() b["in"].setInput( c["out"] ) b["radius"].setValue( IECore.V2f( 1 ) ) self.assertEqual( b["out"]["dataWindow"].getValue(), c["out"]["dataWindow"].getValue() ) b["expandDataWindow"].setValue( True ) self.assertEqual( b["out"]["dataWindow"].getValue().min, c["out"]["dataWindow"].getValue().min - IECore.V2i( 2 ) ) self.assertEqual( b["out"]["dataWindow"].getValue().max, c["out"]["dataWindow"].getValue().max + IECore.V2i( 2 ) )
def testDestroyWhileProcessing( self ) : s = Gaffer.ScriptNode() s["c"] = GafferImage.Constant() s["c"]["format"].setValue( GafferImage.Format( 2000, 2000 ) ) s["b"] = GafferImage.Blur() s["b"]["in"].setInput( s["c"]["out"] ) s["b"]["radius"].setValue( imath.V2f( 400 ) ) g = GafferImageUI.ImageGadget() g.setImage( s["b"]["out"] ) with GafferUI.Window() as w : GafferUI.GadgetWidget( g ) w.setVisible( True ) self.waitForIdle( 1000 ) del g, w del s
def runInteractive(self, useUI, useBlur, resolution): script = Gaffer.ScriptNode() script["Camera"] = GafferScene.Camera() script["Camera"]["transform"]["translate"]["z"].setValue(6) script["Sphere"] = GafferScene.Sphere("Sphere") script["Sphere"]["radius"].setValue(10) script["ImageShader"] = GafferArnold.ArnoldShader() script["ImageShader"].loadShader("image") script["ImageShader"]["parameters"]["filename"].setValue( os.path.dirname(__file__) + "/../GafferImageTest/images/GafferChecker.exr") script["ImageShader"]["parameters"]["sscale"].setValue(16) script["ImageShader"]["parameters"]["tscale"].setValue(16) script["ShaderAssignment"] = GafferScene.ShaderAssignment() script["ShaderAssignment"]["in"].setInput(script["Sphere"]["out"]) script["ShaderAssignment"]["shader"].setInput( script["ImageShader"]["out"]) script["Group"] = GafferScene.Group() script["Group"]["in"][0].setInput(script["Camera"]["out"]) script["Group"]["in"][1].setInput(script["ShaderAssignment"]["out"]) script["StandardOptions"] = GafferScene.StandardOptions() script["StandardOptions"]["in"].setInput(script["Group"]["out"]) script["StandardOptions"]["options"]["renderCamera"]["value"].setValue( '/group/camera') script["StandardOptions"]["options"]["renderCamera"][ "enabled"].setValue(True) script["StandardOptions"]["options"]["renderResolution"][ "value"].setValue(imath.V2i(resolution, resolution)) script["StandardOptions"]["options"]["renderResolution"][ "enabled"].setValue(True) script["ArnoldOptions"] = GafferArnold.ArnoldOptions("ArnoldOptions") script["ArnoldOptions"]["in"].setInput( script["StandardOptions"]["out"]) # Make sure we leave some CPU available for Gaffer script["ArnoldOptions"]["options"]["threads"]["value"].setValue(-1) script["ArnoldOptions"]["options"]["threads"]["enabled"].setValue(True) script["Outputs"] = GafferScene.Outputs() script["Outputs"].addOutput( "beauty", IECoreScene.Output( "Interactive/Beauty", "ieDisplay", "rgba", { "quantize": IECore.IntVectorData([0, 0, 0, 0]), "driverType": 'ClientDisplayDriver', "displayHost": 'localhost', "displayPort": str(GafferImage.Catalogue.displayDriverServer().portNumber( )), "remoteDisplayType": 'GafferImage::GafferDisplayDriver', "filter": 'box', })) script["Outputs"]["in"].setInput(script["ArnoldOptions"]["out"]) script[ "InteractiveArnoldRender"] = GafferArnold.InteractiveArnoldRender( ) script["InteractiveArnoldRender"]["in"].setInput( script["Outputs"]["out"]) script["Catalogue"] = GafferImage.Catalogue("Catalogue") script["Catalogue"]["directory"].setValue(self.temporaryDirectory() + "/catalogues/test") script["Blur"] = GafferImage.Blur("Blur") script["Blur"]["in"].setInput(script["Catalogue"]["out"]) script["Blur"]["radius"]["x"].setValue(1.0) script["Blur"]["radius"]["y"].setValue(1.0) watchNode = script["Blur"] if useBlur else script["Catalogue"] if useUI: with GafferUI.Window() as window: window.setFullScreen(True) viewer = GafferUI.Viewer(script) window.setVisible(True) viewer.setNodeSet(Gaffer.StandardSet([watchNode])) script['InteractiveArnoldRender']['state'].setValue( GafferScene.InteractiveRender.State.Running) self.waitForIdle(10) viewer.view().viewportGadget().frame( viewer.view().viewportGadget().getPrimaryChild().bound(), imath.V3f(0, 0, 1)) frameCounter = {'i': 0} def testFunc(): frameCounter['i'] += 1 script["Camera"]["transform"]["translate"]["x"].setValue( math.sin(frameCounter['i'] * 0.1)) if frameCounter['i'] >= 50: GafferUI.EventLoop.mainEventLoop().stop() timer = QtCore.QTimer() timer.setInterval(20) timer.timeout.connect(testFunc) GafferImageUI.ImageGadget.resetTileUpdateCount() timer.start() with GafferTest.TestRunner.PerformanceScope() as ps: GafferUI.EventLoop.mainEventLoop().start() ps.setNumIterations( GafferImageUI.ImageGadget.tileUpdateCount()) script['InteractiveArnoldRender']['state'].setValue( GafferScene.InteractiveRender.State.Stopped) del window, viewer, timer self.waitForIdle(10) else: with GafferTest.ParallelAlgoTest.UIThreadCallHandler() as h: with IECore.CapturingMessageHandler() as mh: script['InteractiveArnoldRender']['state'].setValue( GafferScene.InteractiveRender.State.Running) h.waitFor(2) arnoldStartupErrors = mh.messages tc = Gaffer.ScopedConnection( GafferImageTest.connectProcessTilesToPlugDirtiedSignal( watchNode["out"])) with GafferTest.TestRunner.PerformanceScope() as ps: with Gaffer.PerformanceMonitor() as m: for i in range(250): script["Camera"]["transform"]["translate"][ "x"].setValue(math.sin((i + 1) * 0.1)) h.waitFor(0.02) ps.setNumIterations( m.plugStatistics( watchNode["out"] ["channelData"].source()).computeCount) script['InteractiveArnoldRender']['state'].setValue( GafferScene.InteractiveRender.State.Stopped)
def testConcatenation(self): # Identical transformation chains, but one # with concatenation broken by a Blur node. # # checker # | # deleteChannels # /\ # / \ # tc1 t1 # | | # tc2 blur # | # t2 checker = GafferImage.Checkerboard() checker["format"].setValue(GafferImage.Format(200, 200)) deleteChannels = GafferImage.DeleteChannels() deleteChannels["in"].setInput(checker["out"]) deleteChannels["channels"].setValue("A") tc1 = GafferImage.ImageTransform() tc1["in"].setInput(deleteChannels["out"]) tc1["filter"].setValue("gaussian") tc2 = GafferImage.ImageTransform() tc2["in"].setInput(tc1["out"]) tc2["filter"].setInput(tc1["filter"]) t1 = GafferImage.ImageTransform() t1["in"].setInput(deleteChannels["out"]) t1["transform"].setInput(tc1["transform"]) t1["filter"].setInput(tc1["filter"]) blur = GafferImage.Blur() blur["in"].setInput(t1["out"]) t2 = GafferImage.ImageTransform() t2["in"].setInput(blur["out"]) t2["transform"].setInput(tc2["transform"]) t2["filter"].setInput(tc1["filter"]) # The blur doesn't do anything except # break concatentation. Check that tc2 # is practically identical to t2 for # a range of transforms. for i in range(0, 10): random.seed(i) translate1 = imath.V2f(random.uniform(-100, 100), random.uniform(-100, 100)) rotate1 = random.uniform(-360, 360) scale1 = imath.V2f(random.uniform(-2, 2), random.uniform(-2, 2)) tc1["transform"]["translate"].setValue(translate1) tc1["transform"]["rotate"].setValue(rotate1) tc1["transform"]["scale"].setValue(scale1) translate2 = imath.V2f(random.uniform(-100, 100), random.uniform(-100, 100)) rotate2 = random.uniform(-360, 360) scale2 = imath.V2f(random.uniform(-2, 2), random.uniform(-2, 2)) tc2["transform"]["translate"].setValue(translate2) tc2["transform"]["rotate"].setValue(rotate2) tc2["transform"]["scale"].setValue(scale2) # The `maxDifference` here is surprisingly high, but visual checks # show that it is legitimate : differences in filtering are that great. # The threshold is still significantly lower than the differences between # checker tiles, so does guarantee that tiles aren't getting out of alignment. self.assertImagesEqual(tc2["out"], t2["out"], maxDifference=0.11, ignoreDataWindow=True)
def nodeMenuCreateCommand(menu): blur = GafferImage.Blur() blur["radius"].gang() return blur
def testNonFlatThrows(self): blur = GafferImage.Blur() blur["radius"].setValue(imath.V2f(1)) self.assertRaisesDeepNotSupported(blur)