def check_uniform_wavefront_sampler(sampler, res=16, atol=0.5): from mitsuba.core import Float, UInt32, UInt64, Vector2u sample_count = sampler.sample_count() sampler.set_samples_per_wavefront(sample_count) sampler.seed(0, sample_count) hist_1d = ek.zero(UInt32, res) hist_2d = ek.zero(UInt32, res * res) v_1d = ek.clamp(sampler.next_1d() * res, 0, res) ek.scatter_add( target=hist_1d, index=UInt32(v_1d), source=UInt32(1.0) ) v_2d = Vector2u(ek.clamp(sampler.next_2d() * res, 0, res)) ek.scatter_add( target=hist_2d, index=UInt32(v_2d.x * res + v_2d.y), source=UInt32(1.0) ) assert ek.allclose(Float(hist_1d), float(sample_count) / res, atol=atol) assert ek.allclose(Float(hist_2d), float(sample_count) / (res * res), atol=atol)
def test_tea_vectorized(variant_packet_rgb): from mitsuba.core import sample_tea_float32, sample_tea_float64, UInt32 count = 100 result = sample_tea_float32(UInt32.full(1, count), UInt32.arange(count), 4) for i in range(count): assert result[i] == sample_tea_float32(1, i, 4) result = sample_tea_float64(UInt32.full(1, count), UInt32.arange(count), 4) for i in range(count): assert result[i] == sample_tea_float64(1, i, 4)
def ravel(buf, dim=3): idx = dim * UInt32.arange(ek.slices(buf) // dim) if dim == 2: return Vector2f(ek.gather(buf, idx), ek.gather(buf, idx + 1)) elif dim == 3: return Vector3f(ek.gather(buf, idx), ek.gather(buf, idx + 1), ek.gather(buf, idx + 2))
def test02_ply_triangle(variant_scalar_rgb): from mitsuba.core import UInt32 from mitsuba.core.xml import load_string m = load_string(""" <shape type="ply" version="0.5.0"> <string name="filename" value="data/triangle.ply"/> <boolean name="face_normals" value="true"/> </shape> """) positions = m.vertex_positions_buffer() faces = m.faces_buffer() assert not m.has_vertex_normals() assert ek.slices(positions) == 9 assert ek.allclose(positions[0:3], [0, 0, 0]) assert ek.allclose(positions[3:6], [0, 0, 1]) assert ek.allclose(positions[6:9], [0, 1, 0]) assert ek.slices(faces) == 3 assert faces[0] == UInt32(0) assert faces[1] == UInt32(1) assert faces[2] == UInt32(2)
def test02_ply_triangle(variant_scalar_rgb): from mitsuba.core import UInt32, Vector3f from mitsuba.core.xml import load_string shape = load_string(""" <shape type="ply" version="0.5.0"> <string name="filename" value="data/triangle.ply"/> <boolean name="face_normals" value="true"/> </shape> """) vertices, faces = shape.vertices(), shape.faces() assert not shape.has_vertex_normals() assert vertices.ndim == 1 assert vertices.shape == (3, ) assert ek.allclose(vertices['x'], [0, 0, 0]) assert ek.allclose(vertices['y'], [0, 0, 1]) assert ek.allclose(vertices['z'], [0, 1, 0]) assert faces.ndim == 1 assert faces.shape == (1, ) assert faces[0]['i0'] == UInt32(0) assert faces[0]['i1'] == UInt32(1) assert faces[0]['i2'] == UInt32(2)
def _render_helper(scene, spp=None, sensor_index=0): """ Internally used function: render the specified Mitsuba scene and return a floating point array containing RGB values and AOVs, if applicable """ from mitsuba.core import (Float, UInt32, UInt64, Vector2f, is_monochromatic, is_rgb, is_polarized) from mitsuba.render import ImageBlock sensor = scene.sensors()[sensor_index] film = sensor.film() sampler = sensor.sampler() film_size = film.crop_size() if spp is None: spp = sampler.sample_count() total_sample_count = ek.hprod(film_size) * spp if sampler.wavefront_size() != total_sample_count: sampler.seed(ek.arange(UInt64, total_sample_count)) pos = ek.arange(UInt32, total_sample_count) pos //= spp scale = Vector2f(1.0 / film_size[0], 1.0 / film_size[1]) pos = Vector2f(Float(pos % int(film_size[0])), Float(pos // int(film_size[0]))) pos += sampler.next_2d() rays, weights = sensor.sample_ray_differential( time=0, sample1=sampler.next_1d(), sample2=pos * scale, sample3=0 ) spec, mask, aovs = scene.integrator().sample(scene, sampler, rays) spec *= weights del mask if is_polarized: from mitsuba.core import depolarize spec = depolarize(spec) if is_monochromatic: rgb = [spec[0]] elif is_rgb: rgb = spec else: from mitsuba.core import spectrum_to_xyz, xyz_to_srgb xyz = spectrum_to_xyz(spec, rays.wavelengths) rgb = xyz_to_srgb(xyz) del xyz aovs.insert(0, Float(1.0)) for i in range(len(rgb)): aovs.insert(i + 1, rgb[i]) del rgb, spec, weights, rays block = ImageBlock( size=film.crop_size(), channel_count=len(aovs), filter=film.reconstruction_filter(), warn_negative=False, warn_invalid=False, border=False ) block.clear() block.put(pos, aovs) del pos del aovs data = block.data() ch = block.channel_count() i = UInt32.arange(ek.hprod(block.size()) * (ch - 1)) weight_idx = i // (ch - 1) * ch values_idx = (i * ch) // (ch - 1) + 1 weight = ek.gather(data, weight_idx) values = ek.gather(data, values_idx) return values / (weight + 1e-8)
def unravel(source, target, dim=3): idx = UInt32.arange(ek.slices(source)) for i in range(dim): ek.scatter(target, source[i], dim * idx + i)
def render_sample(scene, sampler, rays, bdata, heightmap_pybind, bssrdf=None): """ Sample RTE TODO: Support multi channel sampling Args: scene: Target scene object sampler: Sampler object for random number rays: Given rays for sampling bdata: BSSRDF Data object heightmap_pybind: Object for getting height map around incident position. Refer src/librender/python/heightmap.cpp Returns: result: Sampling RTE result valid_rays: Mask data whether rays are valid or not scatter: Scatter components of Sampling RTE result non_scatter: Non scatter components of Sampling RTE result invalid_sample: Sampling RTE result with invalid sampled data by VAEBSSRDF """ eta = Float(1.0) emission_weight = Float(1.0) throughput = Spectrum(1.0) result = Spectrum(0.0) scatter = Spectrum(0.0) non_scatter = Spectrum(0.0) invalid_sample = Spectrum(0.0) active = True is_bssrdf = False ##### First interaction ##### si = scene.ray_intersect(rays, active) active = si.is_valid() & active valid_rays = si.is_valid() emitter = si.emitter(scene, active) depth = 0 # Set channel # At and after evaluating BSSRDF, a ray consider only this one channel n_channels = 3 channel = UInt32( ek.min(sampler.next_1d(active) * n_channels, n_channels - 1)) d_out_local = Vector3f().zero() d_out_pdf = Float(0) sss = Mask(False) while (True): depth += 1 if config.aovs and depth == 2: sss = is_bssrdf ##### Interaction with emitters ##### emission_val = emission_weight * throughput * Emitter.eval_vec( emitter, si, active) result += ek.select(active, emission_val, Spectrum(0.0)) invalid_sample += ek.select(active, emission_val, Spectrum(0.0)) scatter += ek.select(active & sss, emission_val, Spectrum(0.0)) non_scatter += ek.select(active & ~sss, emission_val, Spectrum(0.0)) active = active & si.is_valid() # Process russian roulette if depth > config.rr_depth: q = ek.min(ek.hmax(throughput) * ek.sqr(eta), 0.95) active = active & (sampler.next_1d(active) < q) throughput *= ek.rcp(q) # Stop if the number of bouces exceeds the given limit bounce, or # all rays are invalid. latter check is done only when the limit # bounce is infinite if depth >= config.max_depth: break ##### Emitter sampling ##### bsdf = si.bsdf(rays) ctx = BSDFContext() active_e = active & has_flag(BSDF.flags_vec(bsdf), BSDFFlags.Smooth) ds, emitter_val = scene.sample_emitter_direction( si, sampler.next_2d(active_e), True, active_e) active_e &= ek.neq(ds.pdf, 0.0) # Query the BSDF for that emitter-sampled direction wo = si.to_local(ds.d) bsdf_val = BSDF.eval_vec(bsdf, ctx, si, wo, active_e) # Determine density of sampling that same direction using BSDF sampling bsdf_pdf = BSDF.pdf_vec(bsdf, ctx, si, wo, active_e) mis = ek.select(ds.delta, Float(1), mis_weight(ds.pdf, bsdf_pdf)) emission_val = mis * throughput * bsdf_val * emitter_val result += ek.select(active, emission_val, Spectrum(0.0)) invalid_sample += ek.select(active, emission_val, Spectrum(0.0)) scatter += ek.select(active & sss, emission_val, Spectrum(0.0)) non_scatter += ek.select(active & ~sss, emission_val, Spectrum(0.0)) ##### BSDF sampling ##### bs, bsdf_val = BSDF.sample_vec(bsdf, ctx, si, sampler.next_1d(active), sampler.next_2d(active), active) ##### BSSRDF replacing ##### if (config.enable_bssrdf): # Replace bsdf samples by ones of BSSRDF bs.wo = ek.select(is_bssrdf, d_out_local, bs.wo) bs.pdf = ek.select(is_bssrdf, d_out_pdf, bs.pdf) bs.sampled_component = ek.select(is_bssrdf, UInt32(1), bs.sampled_component) bs.sampled_type = ek.select(is_bssrdf, UInt32(+BSDFFlags.DeltaTransmission), bs.sampled_type) ############################ throughput *= ek.select(is_bssrdf, Float(1.0), bsdf_val) active &= ek.any(ek.neq(throughput, 0)) eta *= bs.eta # Intersect the BSDF ray against the scene geometry rays = RayDifferential3f(si.spawn_ray(si.to_world(bs.wo))) si_bsdf = scene.ray_intersect(rays, active) ##### Checking BSSRDF ##### if (config.enable_bssrdf): # Whether the BSDF is BSS RDF or not? is_bssrdf = (active & has_flag(BSDF.flags_vec(bsdf), BSDFFlags.BSSRDF) & (Frame3f.cos_theta(bs.wo) < Float(0.0)) & (Frame3f.cos_theta(si.wi) > Float(0.0))) # Decide whether we should use 0-scattering or multiple scattering is_zero_scatter = utils_render.check_zero_scatter( sampler, si_bsdf, bs, channel, is_bssrdf) is_bssrdf = is_bssrdf & ~is_zero_scatter throughput *= ek.select(is_bssrdf, ek.sqr(bs.eta), Float(1.0)) ########################### ###### Process for BSSRDF ###### if (config.enable_bssrdf and not ek.none(is_bssrdf)): # Get projected samples from BSSRDF projected_si, project_suc, abs_prob = bssrdf.sample_bssrdf( scene, bsdf, bs, si, bdata, heightmap_pybind, channel, is_bssrdf) if config.visualize_invalid_sample and (depth <= 1): active = active & (~is_bssrdf | project_suc) invalid_sample += ek.select((is_bssrdf & (~project_suc)), Spectrum([100, 0, 0]), Spectrum(0.0)) # Sample outgoing direction from projected position d_out_local, d_out_pdf = utils_render.resample_wo( sampler, is_bssrdf) # Apply absorption probability throughput *= ek.select(is_bssrdf, Spectrum(1) - abs_prob, Spectrum(1)) # Replace interactions by sampled ones from BSSRDF si_bsdf = SurfaceInteraction3f().masked_si(si_bsdf, projected_si, is_bssrdf) ################################ # Determine probability of having sampled that same # direction using emitter sampling emitter = si_bsdf.emitter(scene, active) ds = DirectionSample3f(si_bsdf, si) ds.object = emitter delta = has_flag(bs.sampled_type, BSDFFlags.Delta) emitter_pdf = ek.select(delta, Float(0.0), scene.pdf_emitter_direction(si, ds)) emission_weight = mis_weight(bs.pdf, emitter_pdf) si = si_bsdf return result, valid_rays, scatter, non_scatter, invalid_sample
def run(self, significance_level=0.01, test_count=1, quiet=False): """ Run the Chi^2 test Parameter ``significance_level`` (float): Denotes the desired significance level (e.g. 0.01 for a test at the 1% significance level) Parameter ``test_count`` (int): Specifies the total number of statistical tests run by the user. This value will be used to adjust the provided significance level so that the combination of the entire set of tests has the provided significance level. Returns → bool: ``True`` upon success, ``False`` if the null hypothesis was rejected. """ from mitsuba.core import UInt32, Float64 from mitsuba.core.math import chi2 from mitsuba.python.math import rlgamma if self.histogram is None: self.tabulate_histogram() if self.pdf is None: self.tabulate_pdf() index = UInt32( [i[0] for i in sorted(enumerate(self.pdf), key=lambda x: x[1])]) # Sort entries by expected frequency (increasing) pdf = Float64(ek.gather(self.pdf, index)) histogram = Float64(ek.gather(self.histogram, index)) # Compute chi^2 statistic and pool low-valued cells chi2val, dof, pooled_in, pooled_out = \ chi2(histogram, pdf, 5) if dof < 1: self._log('Failure: The number of degrees of freedom is too low!') self.fail = True if ek.any(ek.eq(pdf, 0) & ek.neq(histogram, 0)): self._log('Failure: Found samples in a cell with expected ' 'frequency 0. Rejecting the null hypothesis!') self.fail = True if pooled_in > 0: self._log('Pooled %i low-valued cells into %i cells to ' 'ensure sufficiently high expected cell frequencies' % (pooled_in, pooled_out)) pdf_time = (self.pdf_end - self.pdf_start) * 1000 histogram_time = (self.histogram_end - self.histogram_start) * 1000 self._log('Histogram sum = %f (%.2f ms), PDF sum = %f (%.2f ms)' % (self.histogram_sum, histogram_time, self.pdf_sum, pdf_time)) self._log('Chi^2 statistic = %f (d.o.f = %i)' % (chi2val, dof)) # Probability of observing a test statistic at least as # extreme as the one here assuming that the distributions match self.p_value = 1 - rlgamma(dof / 2, chi2val / 2) # Apply the Šidák correction term, since we'll be conducting multiple # independent hypothesis tests. This accounts for the fact that the # probability of a failure increases quickly when several hypothesis # tests are run in sequence. significance_level = 1.0 - \ (1.0 - significance_level) ** (1.0 / test_count) if self.fail: self._log('Not running the test for reasons listed above. Target ' 'density and histogram were written to "chi2_data.py') result = False elif self.p_value < significance_level \ or not ek.isfinite(self.p_value): self._log('***** Rejected ***** the null hypothesis (p-value = %f,' ' significance level = %f). Target density and histogram' ' were written to "chi2_data.py".' % (self.p_value, significance_level)) result = False else: self._log('Accepted the null hypothesis (p-value = %f, ' 'significance level = %f)' % (self.p_value, significance_level)) result = True if not quiet: print(self.messages) if not result: self._dump_tables() return result