def main(argv):


    
    arguments = osg.ArgumentParser(argv)

    # construct the viewer.
    viewer = osgViewer.Viewer(arguments)


    node = osgDB.readNodeFiles(arguments)
    if  not node : return 0

    gce = osg.GraphicsCostEstimator()

    viewer.setSceneData(node)

    viewer.realize()

    compileCost = gce.estimateCompileCost(node)
    drawCost = gce.estimateDrawCost(node)

    OSG_NOTICE, "estimateCompileCost(", node.getName(), "), CPU=", compileCost.first, " GPU=", compileCost.second
    OSG_NOTICE, "estimateDrawCost(", node.getName(), "), CPU=", drawCost.first, " GPU=", drawCost.second

    return viewer.run()
Exemple #2
0
 def test_resultTypes(self):
     g = osg.GraphicsCostEstimator()
     node = osg.Node()
     g.estimateCompileCost(node)
#include <osgDB/ReadFile>

#include <osg/GraphicsCostEstimator>

class CalibrateCostEsimator (osg.GraphicsOperation) :

    CalibrateCostEsimator(osg.GraphicsCostEstimator* gce):
        osg.GraphicsOperation("CalbirateCostEstimator",False),
        _gce(gce) 

    virtual void operator () (osg.GraphicsContext* context)
        renderInfo = osg.RenderInfo(context.getState(), 0)
        _gce.calibrate(renderInfo)

    _gce = osg.GraphicsCostEstimator()




def main(argv):


    
    arguments = osg.ArgumentParser(argv)

    # construct the viewer.
    viewer = osgViewer.Viewer(arguments)


    node = osgDB.readNodeFiles(arguments)
Exemple #4
0
 def test_classAvailable(self):
     g = osg.GraphicsCostEstimator()