return m3 from dataset_reader import Dataset, Testset # Plots high sample count last image. parser = optparse.OptionParser() parser.add_option("-s", "--samples", dest="samples", default=2048, type="int") parser.add_option("-c", "--distribution", dest="distribution", default=None) (options, args) = parser.parse_args() directory = args[0] dataset = Dataset(directory) # TODO: Temporary copyfile(dataset.testSet().targetMeshPath, "/tmp/mts_mesh_intensity_slot_0.ply") renderable = dataset.testSet().renderable(1) pt0 = np.array(dataset.lastAvailableBSDF()) pt1 = np.array(dataset.testSet().targetBSDF()) paramList = dataset.testSet().parameterList() #print paramList #print pt0 #print toMap(paramList, pt1) if dataset.testSet().bsdfAdaptiveSampled: adaptiveParamList = dataset.bsdfAdaptiveSamplingParameterList print adaptiveParamList adaptiveParamMap = toMap(adaptiveParamList, len(adaptiveParamList) * [1.0]) else: adaptiveParamMap = {}
parser.add_option("-l", "--linear", dest="linear", action="store_true", default=False) parser.add_option("-c", "--distribution", dest="distribution", default=None, type="string") parser.add_option("-r", "--samples", dest="samples", default=128, type="int") parser.add_option("-d", "--direct-samples", dest="directSamples", default=4, type="int") parser.add_option("-i", "--iteration", dest="iteration", default=None, type="int") parser.add_option("-s", "--super-iteration", dest="superiteration", default=0, type="int") parser.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False) parser.add_option("--direct-only", dest="directOnly", action="store_true", default=False) (options, args) = parser.parse_args() directory = args[0] dataset = Dataset(directory) testset = dataset.testSet() if options.directOnly: idirpath = directory + "/renders/direct-targets" else: idirpath = directory + "/renders/indirect-targets" if not os.path.exists(idirpath): os.mkdir(idirpath) meshfile = testset.targetMeshPath paramlist = testset.parameterList() bsdf = toMap(paramlist, testset.targetBSDF()) for k in range(dataset.testSet().numLights()): print "Light ", k, "/", dataset.testSet().numLights(), "\r",
parser.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False) parser.add_option("--direct-only", dest="directOnly", action="store_true", default=False) (options, args) = parser.parse_args() directory = args[0] dataset = Dataset(directory) testset = dataset.testSet() if not options.directOnly: idirpath = directory + "/renders/indirect" else: idirpath = directory + "/renders/direct" if not os.path.exists(idirpath): os.mkdir(idirpath) for i in range(dataset.testSet().numIterations() + dataset.testSet().numBSDFIterations()): if i >= dataset.testSet().numIterations(): ii = 0 bi = i - dataset.testSet().numIterations() else:
(options, args) = parser.parse_args() directory = args[0] dataset = Dataset(directory) if options.superIndex == -1 and options.index == -1: targetBSDF = dataset.lastAvailableBSDF() else: targetBSDF = dataset.BSDFAt(iteration=options.index, superiteration=options.superIndex) print targetBSDF if options.dryRun: sys.exit(0) testset = dataset.testSet() sphereXML = os.path.dirname(__file__) + "/data/sphere-embeddable.xml" sphereXMLP = os.path.dirname(__file__) + "/data/sphere-postprocessed.xml" testset.embedOnto(sphereXML, sphereXMLP) renderable = testset.renderables[0] renderable.setFile(sphereXMLP) renderable.setEmbeddedParameter("envmap", "doge.exr") if options.distribution is not None: localThreads = 2 else: localThreads = 8 sphereImage = renderable.renderReadback(
parser.add_option("--layout-columns", dest="layoutColumns", default=5, type="int") parser.add_option("--layout-rows", dest="layoutRows", default=5, type="int") parser.add_option("--adaptive-mode", dest="adaptiveMode", default=None, type="string") (options, args) = parser.parse_args() directory = args[0] dataset = Dataset(directory) testset = dataset.testSet() gdirpath = directory + "/images/raw-bsdf-gradients" lr = options.layoutRows lc = options.layoutColumns if not os.path.exists(gdirpath): os.mkdir(gdirpath) for k in range(dataset.testSet().numLights()): xmlAddGradientSliceRendering(testset.gradientRenderables[k], slices=len(testset.parameterList())) for i in range(dataset.testSet().numBSDFIterations()): if options.iteration is not None and i != options.iteration:
# from dataset_reader import Dataset (Alternative temporary fix) #execfile(os.path.dirname(__file__) + "/../tools/dataset_reader.py") parser = optparse.OptionParser() (options, args) = parser.parse_args() directory = args[0] dataset = Dataset(directory) profiles = [{ "name": "invAlpha", "sampleWeights": invAlpha(dataset.testSet().bsdfDictionary).tolist(), "type": "bsdf-adaptive" }, { "name": "uniform", "sampleWeights": [1.0] * len(dataset.testSet().bsdfDictionary["elements"]), "type": "bsdf-adaptive" }, { "name": "bsdfWeight", "sampleWeights": dataset.lastAvailableBSDF(), "type": "bsdf-adaptive" }] uniform = np.stack(
import merl_io parser = optparse.OptionParser() parser.add_option("-l", "--linear", dest="linear", action="store_true", default=False) parser.add_option("-c", "--distribution", dest="distribution", default=None, type="string") parser.add_option("-r", "--samples", dest="samples", default=128, type="int") parser.add_option("-i", "--iteration", dest="iteration", default=None, type="int") parser.add_option("-s", "--super-iteration", dest="superiteration", default=0, type="int") (options, args) = parser.parse_args() directory = args[0] dataset = Dataset(directory) testset = dataset.testSet() gdirpath = directory + "/meshes/single-bounce-gradients" idirpath = directory + "/images/single-bounce-currents" gidirpath = directory + "/renders/single-bounce-gradients" if not os.path.exists(gdirpath): os.mkdir(gdirpath) if not os.path.exists(idirpath): os.mkdir(idirpath) if not os.path.exists(gidirpath): os.mkdir(gidirpath) print(dataset.testSet().numIterations() + dataset.testSet().numBSDFIterations()) print("") print("")
type="string") parser.add_option("-k", "--resolution", dest="resolution", default=256, type="int") (options, args) = parser.parse_args() directory = args[0] dataset = Dataset(directory) pt = np.array(dataset.lastAvailableBSDF()) if not dataset.testSet().bsdfAdaptiveSampled: print("BSDF Adaptive Sampling should be enabled for variance testing") sys.exit(1) adaptiveParamList = dataset.bsdfAdaptiveSamplingParameterList paramList = dataset.testSet().parameterList() testset = dataset.testSet() ### # Monte Carlo gradient ### # override for k in range(dataset.testSet().numLights()): testset.gradientRenderables[k].setEmbeddedParameter( "sampleCount", options.samples)
default=False) (options, args) = parser.parse_args() directory = args[0] dataset = Dataset(directory) if options.sampleFile is None: print("Using default sample profile filename") options.sampleFile = directory + "/spatial_sample_profiles.json" sampleProfiles = json.load(open(options.sampleFile, "r")) pt = np.array(dataset.lastAvailableBSDF()) if not dataset.testSet().bsdfAdaptiveSampled: print("BSDF Adaptive Sampling should be enabled for variance testing") sys.exit(1) adaptiveParamList = dataset.bsdfAdaptiveSamplingParameterList paramList = dataset.testSet().parameterList() testset = dataset.testSet() # override for k in range(dataset.testSet().numLights()): testset.gradientRenderables[k].setEmbeddedParameter( "sampleCount", options.renderSamples) testset.gradientRenderables[k].setParameter("blockSize", 8) reductors = None