Commit ae2b9dc5 authored by TheNumbat's avatar TheNumbat
Browse files

Release new version

Features:
    - Particle systems can now specify a maximum dt per step
    - Animation key-framing & timing system now supports objects with simulation
    - Mixture/multiple importance sampling for correct low-variance direct lighting
        - New BSDF, point light, and environment light APIs that separate sampling, evaluation, and pdf
        - Area light sampling infrastructure
        - Removed rectangle area lights; all area lights are now emissive meshes
        - Reworked PathTracer tasks 4-6, adjusted/improved instructions for the other tasks

Bug fixes:
    - Use full rgb/srgb conversion equation instead of approximation
    - Material albedo now specified in srgb (matching the displayed color)
    - ImGui input fields becoming inactive no longer apply to a newly selected object
    - Rendering animations with path tracing correctly steps simulations each frame
    - Rasterization based renderer no longer inherits projection matrix from window
    - Scene file format no longer corrupts particle emitter enable states
    - Documentation videos no longer autoplay
    - Misc. refactoring
    - Misc. documentation website improvements
parent afa3f68f
......@@ -27,6 +27,7 @@ void BVH<Primitive>::build(std::vector<Primitive>&& prims, size_t max_leaf_size)
// to create a new node, don't allocate one yourself - use BVH::new_node, which
// returns the index of a newly added node.
assert(false);
// Keep these
nodes.clear();
primitives = std::move(prims);
......@@ -153,7 +154,7 @@ size_t BVH<Primitive>::visualize(GL::Lines& lines, GL::Lines& active, size_t lev
} else {
for(size_t i = node.start; i < node.start + node.size; i++) {
size_t c = primitives[i].visualize(lines, active, level - lvl, trans);
max_level = std::max(c, max_level);
max_level = std::max(c + lvl, max_level);
}
}
}
......
......@@ -44,7 +44,7 @@
struct Debug_Data {
// Setting it here makes it default to false.
bool normal_colors = false;
bool normal_colors = true;
};
// This tells other code about a global variable of type Debug_Data, allowing
......
#include "../rays/env_light.h"
#include "debug.h"
#include <limits>
namespace PT {
Light_Sample Env_Map::sample() const {
Vec3 Env_Map::sample() const {
Light_Sample ret;
ret.distance = std::numeric_limits<float>::infinity();
// TODO (PathTracer): Task 7
// First, implement Samplers::Sphere::Uniform so the following line works.
// Second, implement Samplers::Sphere::Image and swap to image_sampler
return uniform_sampler.sample();
}
float Env_Map::pdf(Vec3 dir) const {
// TODO (PathTracer): Task 7
// Uniformly sample the sphere. Tip: implement Samplers::Sphere::Uniform
Samplers::Sphere::Uniform uniform;
ret.direction = uniform.sample(ret.pdf);
// Once you've implemented Samplers::Sphere::Image, remove the above and
// uncomment this line to use importance sampling instead.
// ret.direction = sampler.sample(ret.pdf);
// First, return the pdf for a uniform spherical distribution.
// Second, swap to image_sampler.pdf().
ret.radiance = sample_direction(ret.direction);
return ret;
return 0.0f;
}
Spectrum Env_Map::sample_direction(Vec3 dir) const {
Spectrum Env_Map::evaluate(Vec3 dir) const {
// TODO (PathTracer): Task 7
// Find the incoming light along a given direction by finding the corresponding
// place in the enviornment image. You should bi-linearly interpolate the value
// between the 4 image pixels nearest to the exact direction.
return Spectrum();
// Compute emitted radiance along a given direction by finding the corresponding
// pixels in the enviornment image. You should bi-linearly interpolate the value
// between the 4 nearest pixels.
return Spectrum{};
}
Vec3 Env_Hemisphere::sample() const {
return sampler.sample();
}
Light_Sample Env_Hemisphere::sample() const {
Light_Sample ret;
ret.direction = sampler.sample(ret.pdf);
ret.radiance = radiance;
ret.distance = std::numeric_limits<float>::infinity();
return ret;
float Env_Hemisphere::pdf(Vec3 dir) const {
return 1.0f / (2.0f * PI_F);
}
Spectrum Env_Hemisphere::sample_direction(Vec3 dir) const {
Spectrum Env_Hemisphere::evaluate(Vec3 dir) const {
if(dir.y > 0.0f) return radiance;
return {};
}
Light_Sample Env_Sphere::sample() const {
Light_Sample ret;
ret.direction = sampler.sample(ret.pdf);
ret.radiance = radiance;
ret.distance = std::numeric_limits<float>::infinity();
return ret;
Vec3 Env_Sphere::sample() const {
return sampler.sample();
}
float Env_Sphere::pdf(Vec3 dir) const {
return 1.0f / (4.0f * PI_F);
}
Spectrum Env_Sphere::sample_direction(Vec3) const {
Spectrum Env_Sphere::evaluate(Vec3) const {
return radiance;
}
......
......@@ -2,7 +2,22 @@
#include "../scene/particles.h"
#include "../rays/pathtracer.h"
bool Particle::update(const PT::BVH<PT::Object>& scene, float dt, float radius) {
bool Scene_Particles::Particle::update(const PT::Object& scene, float dt, float radius) {
// TODO(Animation): Task 4
// Compute the trajectory of this particle for the next dt seconds.
// (1) Build a ray representing the particle's path if it travelled at constant velocity.
// (2) Intersect the ray with the scene and account for collisions. Be careful when place
// collision points based on the particle radius. Move the particle to its next position.
// (3) Account for acceleration.
// (4) Repeat until the entire time step has been consumed.
// (5) Decrease the particle's age and return whether it should die.
return false;
}
......@@ -8,127 +8,135 @@ namespace PT {
Spectrum Pathtracer::trace_pixel(size_t x, size_t y) {
// TODO (PathTracer): Task 1
// Generate a ray that uniformly samples pixel (x,y) and return the incoming light.
// The following code generates a ray at the bottom left of the pixel every time.
// Tip: Samplers::Rect::Uniform
// Tip: log_ray is useful for debugging
Vec2 xy((float)x, (float)y);
Vec2 wh((float)out_w, (float)out_h);
// TODO (PathTracer): Task 1
Ray ray = camera.generate_ray(xy / wh);
ray.depth = max_depth;
// Pathtracer::trace() returns the incoming light split into emissive and reflected components.
auto [emissive, reflected] = trace(ray);
return emissive + reflected;
}
// Generate a sample within the pixel with coordinates xy and return the
// incoming light using trace_ray.
Spectrum Pathtracer::sample_indirect_lighting(const Shading_Info& hit) {
// Tip: Samplers::Rect::Uniform
// Tip: you may want to use log_ray for debugging
// TODO (PathTrace): Task 4
// This currently generates a ray at the bottom left of the pixel every time.
// This function computes a single-sample Monte Carlo estimate of the _indirect_
// lighting at our ray intersection point.
Ray out = camera.generate_ray(xy / wh);
return trace_ray(out);
// (1) Randomly sample a new ray direction from the BSDF distribution using BSDF::scatter().
// (2) Create a new world-space ray and call Pathtracer::trace() to get incoming light. You
// should modify time_bounds so that the ray does not intersect at time = 0. Remember to
// set the new depth value.
// (3) Add contribution due to incoming light scaled by BSDF attenuation. Whether you
// compute the BSDF scattering PDF should depend on if the BSDF is a discrete distribution
// (see BSDF::is_discrete()).
// You should only use the indirect component of incoming light (the second value returned
// by Pathtracer::trace()), as the direct component will be computed in
// Pathtracer::sample_direct_lighting().
Spectrum radiance;
return radiance;
}
Spectrum Pathtracer::trace_ray(const Ray& ray) {
Spectrum Pathtracer::sample_direct_lighting(const Shading_Info& hit) {
// This function computes a Monte Carlo estimate of the _direct_ lighting at our ray
// intersection point by sampling both the BSDF and area lights.
// Point lights are handled separately, as they cannot be intersected by tracing rays
// into the scene.
Spectrum radiance = point_lighting(hit);
// TODO (PathTrace): Task 4
// For task 4, this function should perform almost the same sampling procedure as
// Pathtracer::sample_indirect_lighting(), but instead accumulates the emissive component of
// incoming light (the first value returned by Pathtracer::trace()). Note that since we only
// want emissive, we can trace a ray with depth = 0.
// Trace ray into scene. If nothing is hit, sample the environment
Trace hit = scene.hit(ray);
if(!hit.hit) {
// TODO (PathTrace): Task 6
// For task 6, we want to upgrade our direct light sampling procedure to also
// sample area lights using mixture sampling.
// (1) If the BSDF is discrete, we don't need to bother sampling lights: the behavior
// should be the same as task 4.
// (2) Otherwise, we should randomly choose whether we get our sample from `BSDF::scatter`
// or `Pathtracer::sample_area_lights`. Note that `Pathtracer::sample_area_lights` returns
// a world-space direction pointing toward an area light. Choose between the strategies
// with equal probability.
// (3) Create a new world-space ray and call Pathtracer::trace() to get incoming light. You
// should modify time_bounds so that the ray does not intersect at time = 0. We are again
// only interested in the emissive component, so the ray depth can be zero.
// (4) Add estimate of incoming light scaled by BSDF attenuation. Given a sample,
// we don't know whether it came from the BSDF or the light, so you should use BSDF::evaluate(),
// BSDF::pdf(), and Pathtracer::area_lights_pdf() to compute the proper weighting.
// What is the PDF of our sample, given it could have been produced from either source?
return radiance;
}
std::pair<Spectrum, Spectrum> Pathtracer::trace(const Ray& ray) {
// This function orchestrates the path tracing process. For convenience, it
// returns the incoming light along a ray in two components: emitted from the
// surface the ray hits, and reflected through that point from other sources.
// Trace ray into scene.
Trace result = scene.hit(ray);
if(!result.hit) {
// If no surfaces were hit, sample the environemnt map.
if(env_light.has_value()) {
return env_light.value().sample_direction(ray.dir);
return {env_light.value().evaluate(ray.dir), {}};
}
return {};
}
// If we're using a two-sided material, treat back-faces the same as front-faces
const BSDF& bsdf = materials[hit.material];
if(!bsdf.is_sided() && dot(hit.normal, ray.dir) > 0.0f) {
hit.normal = -hit.normal;
}
// Set up a coordinate frame at the hit point, where the surface normal becomes {0, 1, 0}
// This gives us out_dir and later in_dir in object space, where computations involving the
// normal become much easier. For example, cos(theta) = dot(N,dir) = dir.y!
Mat4 object_to_world = Mat4::rotate_to(hit.normal);
Mat4 world_to_object = object_to_world.T();
Vec3 out_dir = world_to_object.rotate(ray.point - hit.position).unit();
// Debugging: if the normal colors flag is set, return the normal color
if(debug_data.normal_colors) return Spectrum::direction(hit.normal);
// Now we can compute the rendering equation at this point.
// We split it into two stages: sampling lighting (i.e. directly connecting
// the current path to each light in the scene), then sampling the BSDF
// to create a new path segment.
// TODO (PathTracer): Task 5
// The starter code sets radiance_out to (0.5,0.5,0.5) so that you can test your geometry
// queries before you implement path tracing. You should change this to (0,0,0) and accumulate
// the direct and indirect lighting computed below.
Spectrum radiance_out = Spectrum(0.5f);
{
auto sample_light = [&](const auto& light) {
// If the light is discrete (e.g. a point light), then we only need
// one sample, as all samples will be equivalent
int samples = light.is_discrete() ? 1 : (int)n_area_samples;
for(int i = 0; i < samples; i++) {
Light_Sample sample = light.sample(hit.position);
Vec3 in_dir = world_to_object.rotate(sample.direction);
// If the light is below the horizon, ignore it
float cos_theta = in_dir.y;
if(cos_theta <= 0.0f) continue;
// If the BSDF has 0 throughput in this direction, ignore it.
// This is another oppritunity to do Russian roulette on low-throughput rays,
// which would allow us to skip the shadow ray cast, increasing efficiency.
Spectrum attenuation = bsdf.evaluate(out_dir, in_dir);
if(attenuation.luma() == 0.0f) continue;
// TODO (PathTracer): Task 4
// Construct a shadow ray and compute whether the intersected surface is
// in shadow. Only accumulate light if not in shadow.
// Tip: since you're creating the shadow ray at the intersection point, it may
// intersect the surface at time=0. Similarly, if the ray is allowed to have
// arbitrary length, it will hit the light it was cast at. Therefore, you should
// modify the time_bounds of your shadow ray to account for this. Using EPS_F is
// recommended.
// Note: that along with the typical cos_theta, pdf factors, we divide by samples.
// This is because we're doing another monte-carlo estimate of the lighting from
// area lights.
radiance_out +=
(cos_theta / (samples * sample.pdf)) * sample.radiance * attenuation;
}
};
// If the BSDF is discrete (i.e. uses dirac deltas/if statements), then we are never
// going to hit the exact right direction by sampling lights, so ignore them.
if(!bsdf.is_discrete()) {
for(const auto& light : lights) sample_light(light);
if(env_light.has_value()) sample_light(env_light.value());
}
const BSDF& bsdf = materials[result.material];
if(!bsdf.is_sided() && dot(result.normal, ray.dir) > 0.0f) {
result.normal = -result.normal;
}
// TODO (PathTracer): Task 5
// Compute an indirect lighting estimate using pathtracing with Monte Carlo.
// TODO (PathTracer): Task 4
// You will want to change the default normal_colors in debug.h, or delete this early out.
if(debug_data.normal_colors) return {Spectrum::direction(result.normal), {}};
// (1) Ray objects have a depth field; if it reaches max_depth, you should
// terminate the path.
// If the BSDF is emissive, stop tracing and return the emitted light
Spectrum emissive = bsdf.emissive();
if(emissive.luma() > 0.0f) return {emissive, {}};
// (2) Randomly select a new ray direction (it may be reflection or transmittance
// ray depending on surface type) using bsdf.sample()
// If the ray has reached maximum depth, stop tracing
if(ray.depth == 0) return {};
// (3) Compute the throughput of the recursive ray. This should be the current ray's
// throughput scaled by the BSDF attenuation, cos(theta), and inverse BSDF sample PDF.
// Potentially terminate the path using Russian roulette as a function of the new throughput.
// Note that allowing the termination probability to approach 1 may cause extra speckling.
// Set up shading information
Mat4 object_to_world = Mat4::rotate_to(result.normal);
Mat4 world_to_object = object_to_world.T();
Vec3 out_dir = world_to_object.rotate(ray.point - result.position).unit();
// (4) Create new scene-space ray and cast it to get incoming light. As with shadow rays, you
// should modify time_bounds so that the ray does not intersect at time = 0. Remember to
// set the new throughput and depth values.
Shading_Info hit = {bsdf, world_to_object, object_to_world, result.position,
out_dir, result.normal, ray.depth};
// (5) Add contribution due to incoming light with proper weighting. Remember to add in
// the BSDF sample emissive term.
return radiance_out;
// Sample and return light reflected through the intersection
return {{}, sample_direct_lighting(hit) + sample_indirect_lighting(hit)};
}
} // namespace PT
#include "../rays/samplers.h"
#include "../util/rand.h"
#include "debug.h"
namespace Samplers {
Vec2 Rect::Uniform::sample(float& pdf) const {
Vec2 Rect::sample() const {
// TODO (PathTracer): Task 1
// Generate a uniformly random point on a rectangle of size size.x * size.y
// Tip: RNG::unit()
pdf = 1.0f; // the PDF should integrate to 1 over the whole rectangle
return Vec2();
}
Vec3 Hemisphere::Cosine::sample(float& pdf) const {
// TODO (PathTracer): Task 6
// You may implement this, but don't have to.
return Vec3();
return Vec2{};
}
Vec3 Sphere::Uniform::sample(float& pdf) const {
Vec3 Sphere::Uniform::sample() const {
// TODO (PathTracer): Task 7
// Generate a uniformly random point on the unit sphere (or equivalently, direction)
// Generate a uniformly random point on the unit sphere.
// Tip: start with Hemisphere::Uniform
pdf = 1.0f; // what was the PDF at the chosen direction?
return Vec3();
return Vec3{};
}
Sphere::Image::Image(const HDR_Image& image) {
// TODO (PathTracer): Task 7
// Set up importance sampling for a spherical environment map image.
// You may make use of the pdf, cdf, and total members, or create your own
// representation.
// Set up importance sampling data structures for a spherical environment map image.
// You may make use of the _pdf, _cdf, and total members, or create your own.
const auto [_w, _h] = image.dimension();
w = _w;
h = _h;
}
Vec3 Sphere::Image::sample(float& out_pdf) const {
Vec3 Sphere::Image::sample() const {
// TODO (PathTracer): Task 7
// Use your importance sampling data structure to generate a sample direction.
// Tip: std::upper_bound can easily binary search your CDF
// Tip: std::upper_bound
out_pdf = 1.0f; // what was the PDF (again, PMF here) of your chosen sample?
return Vec3();
return Vec3{};
}
Vec3 Point::sample(float& pmf) const {
float Sphere::Image::pdf(Vec3 dir) const {
// TODO (PathTracer): Task 7
// What is the PDF of this distribution at a particular direction?
return 0.0f;
}
pmf = 1.0f;
Vec3 Point::sample() const {
return point;
}
Vec3 Two_Points::sample(float& pmf) const {
if(RNG::unit() < prob) {
pmf = prob;
return p1;
}
pmf = 1.0f - prob;
return p2;
Vec3 Triangle::sample() const {
float u = std::sqrt(RNG::unit());
float v = RNG::unit();
float a = u * (1.0f - v);
float b = u * v;
return a * v0 + b * v1 + (1.0f - a - b) * v2;
}
Vec3 Hemisphere::Uniform::sample(float& pdf) const {
Vec3 Hemisphere::Uniform::sample() const {
float Xi1 = RNG::unit();
float Xi2 = RNG::unit();
......@@ -82,8 +79,20 @@ Vec3 Hemisphere::Uniform::sample(float& pdf) const {
float ys = std::cos(theta);
float zs = std::sin(theta) * std::sin(phi);
pdf = 1.0f / (2.0f * PI_F);
return Vec3(xs, ys, zs);
}
Vec3 Hemisphere::Cosine::sample() const {
float phi = RNG::unit() * 2.0f * PI_F;
float cos_t = std::sqrt(RNG::unit());
float sin_t = std::sqrt(1 - cos_t * cos_t);
float x = std::cos(phi) * sin_t;
float z = std::sin(phi) * sin_t;
float y = cos_t;
return Vec3(x, y, z);
}
} // namespace Samplers
......@@ -71,17 +71,16 @@ Mat4 Skeleton::joint_to_posed(const Joint* j) const {
return Mat4::I;
}
void Skeleton::find_joints(const GL::Mesh& mesh,
std::unordered_map<unsigned int, std::vector<Joint*>>& map) {
void Skeleton::find_joints(const GL::Mesh& mesh, std::vector<std::vector<Joint*>>& map) {
// TODO(Animation): Task 3
// Construct a mapping from vertex indices to lists of joints in this skeleton
// that should effect the vertex at that index. A joint should effect a vertex
// if it is within Joint::radius distance of the bone's line segment in bind position.
// Construct a mapping: vertex index -> list of joints that should effect the vertex.
// A joint should effect a vertex if it is within Joint::radius distance of the
// bone's line segment in bind position.
const std::vector<GL::Mesh::Vert>& verts = mesh.verts();
(void)verts;
map.resize(verts.size());
// For each i in [0, verts.size()), map[i] should contain the list of joints that
// effect vertex i. Note that i is NOT Vert::id! i is the index in verts.
......@@ -92,7 +91,7 @@ void Skeleton::find_joints(const GL::Mesh& mesh,
}
void Skeleton::skin(const GL::Mesh& input, GL::Mesh& output,
const std::unordered_map<unsigned int, std::vector<Joint*>>& map) {
const std::vector<std::vector<Joint*>>& map) {
// TODO(Animation): Task 3
......@@ -104,6 +103,7 @@ void Skeleton::skin(const GL::Mesh& input, GL::Mesh& output,
// Currently, this just copies the input to the output without modification.
std::vector<GL::Mesh::Vert> verts = input.verts();
for(size_t i = 0; i < verts.size(); i++) {
// Skin vertex i. Note that its position is given in object bind space.
......
#include "../rays/tri_mesh.h"
#include "debug.h"
#include "../rays/samplers.h"
namespace PT {
BBox Triangle::bbox() const {
// TODO (PathTracer): Task 2
// compute the bounding box of the triangle
// Compute the bounding box of the triangle.
// Beware of flat/zero-volume boxes! You may need to
// account for that here, or later on in BBox::intersect
// account for that here, or later on in BBox::intersect.
BBox box;
return box;
......@@ -18,7 +18,7 @@ BBox Triangle::bbox() const {
Trace Triangle::hit(const Ray& ray) const {
// Vertices of triangle - has postion and surface normal
// Each vertex contains a postion and surface normal
Tri_Mesh_Vert v_0 = vertex_list[v0];
Tri_Mesh_Vert v_1 = vertex_list[v1];
Tri_Mesh_Vert v_2 = vertex_list[v2];
......@@ -27,7 +27,7 @@ Trace Triangle::hit(const Ray& ray) const {
(void)v_2;
// TODO (PathTracer): Task 2
// Intersect this ray with a triangle defined by the three above points.
// Intersect the ray with the triangle defined by the three vertices.
Trace ret;
ret.origin = ray.point;
......@@ -43,10 +43,40 @@ Triangle::Triangle(Tri_Mesh_Vert* verts, unsigned int v0, unsigned int v1, unsig
: vertex_list(verts), v0(v0), v1(v1), v2(v2) {
}
void Tri_Mesh::build(const GL::Mesh& mesh) {
Vec3 Triangle::sample(Vec3 from) const {
Tri_Mesh_Vert v_0 = vertex_list[v0];
Tri_Mesh_Vert v_1 = vertex_list[v1];
Tri_Mesh_Vert v_2 = vertex_list[v2];
Samplers::Triangle sampler(v_0.position, v_1.position, v_2.position);
Vec3 pos = sampler.sample();
return (pos - from).unit();
}
float Triangle::pdf(Ray wray, const Mat4& T, const Mat4& iT) const {
Ray tray = wray;
tray.transform(iT);
Trace trace = hit(tray);
if(trace.hit) {
trace.transform(T, iT.T());
Vec3 v_0 = T * vertex_list[v0].position;
Vec3 v_1 = T * vertex_list[v1].position;
Vec3 v_2 = T * vertex_list[v2].position;
float a = 2.0f / cross(v_1 - v_0, v_2 - v_0).norm();
float g =
(trace.position - wray.point).norm_squared() / std::abs(dot(trace.normal, wray.dir));
return a * g;
}
return 0.0f;
}
void Tri_Mesh::build(const GL::Mesh& mesh, bool bvh) {
use_bvh = bvh;
verts.clear();
triangles.clear();
triangle_bvh.clear();
triangle_list.clear();
for(const auto& v : mesh.verts()) {
verts.push_back({v.pos, v.norm});
......@@ -59,32 +89,54 @@ void Tri_Mesh::build(const GL::Mesh& mesh) {
tris.push_back(Triangle(verts.data(), idxs[i], idxs[i + 1], idxs[i + 2]));
}
triangles.build(std::move(tris), 4);
if(use_bvh) {
triangle_bvh.build(std::move(tris), 4);
} else {
triangle_list = List<Triangle>(std::move(tris));
}
}
Tri_Mesh::Tri_Mesh(const GL::Mesh& mesh) {
build(mesh);
Tri_Mesh::Tri_Mesh(const GL::Mesh& mesh, bool use_bvh) {
build(mesh, use_bvh);
}
Tri_Mesh Tri_Mesh::copy() const {
Tri_Mesh ret;
ret.verts = verts;
ret.triangles = triangles.copy();
ret.triangle_bvh = triangle_bvh.copy();
ret.triangle_list = triangle_list.copy();
ret.use_bvh = use_bvh;
return ret;
}
BBox Tri_Mesh::bbox() const {
return triangles.bbox();
if(use_bvh) return triangle_bvh.bbox();
return triangle_list.bbox();
}
Trace Tri_Mesh::hit(const Ray& ray) const {
Trace t = triangles.hit(ray);
return t;
if(use_bvh) return triangle_bvh.hit(ray);
return triangle_list.hit(ray);
}
size_t Tri_Mesh::visualize(GL::Lines& lines, GL::Lines& active, size_t level,
const Mat4& trans) const {
return triangles.visualize(lines, active, level, trans);
if(use_bvh) return triangle_bvh.visualize(lines, active, level, trans);
return 0;
}
Vec3 Tri_Mesh::sample(Vec3 from) const {
if(use_bvh) {
die("Sampling BVH-based triangle meshes is not yet supported.");
}
return triangle_list.sample(from);
}
float Tri_Mesh::pdf(Ray ray, const Mat4& T, const Mat4& iT) const {
if(use_bvh) {
die("Sampling BVH-based triangle meshes is not yet supported.");
}
return triangle_list.pdf(ray, T, iT);
}
} // namespace PT
......@@ -121,8 +121,8 @@ std::string HDR_Image::load_from(std::string file) {
stbi_image_free(data);
for(size_t i = 0; i < pixels.size(); i++) {
pixels[i].make_linear();
if(!pixels[i].valid()) pixels[i] = {};
pixels[i] = pixels[i].to_linear();
}
}
......@@ -177,7 +177,7 @@ void HDR_Image::tonemap_to(std::vector<unsigned char>& data, float e) const {
float b = 1.0f - std::exp(-sample.b * exposure);
Spectrum out(r, g, b);
out.make_srgb();
out = out.to_srgb();
size_t didx = 4 * (j * w + i);
data[didx] = (unsigned char)std::round(out.r * 255.0f);
......
......@@ -16,7 +16,8 @@ float unit() {
}
int integer(int min, int max) {
return (int)lerp((float)min, (float)max, unit());
std::uniform_int_distribution<int> d(min, max - 1);
return d(rng);
}
bool coin_flip(float p) {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment