Ray tracing is the gold standard of photorealistic rendering. Unlike rasterization, which projects triangles onto the screen, ray tracing simulates the physical behavior of light by casting rays from the camera into the scene and tracking how they bounce, reflect, refract, and scatter off surfaces. In this post, we'll build a complete Whitted-style ray tracer inside a single GLSL fragment shader — no textures, no external data, just math and light.
The scene features multiple spheres with distinct materials — perfect mirrors, refractive glass, diffuse matte, and brushed metallic — all sitting on a reflective checkered ground plane. Rays bounce up to 4 times, producing recursive reflections (spheres reflecting in spheres reflecting in spheres), physically-based refractive caustics through glass, and soft colored shadows. This is the classic "Ray Tracing in One Weekend" aesthetic, rendered in real time.
This is a fully self-contained WebGL1 fragment shader. Paste it into any ShaderToy-style host that provides iResolution and iTime uniforms, and it will render immediately.
precision mediump float;
uniform vec2 iResolution;
uniform float iTime;
// ============================================================
// WHITTED-STYLE RAY TRACER
// Features: mirror, glass, diffuse, metallic spheres
// reflective ground plane, shadows, sky gradient
// up to 4 ray bounces
// ============================================================
#define MAX_BOUNCES 4
#define NUM_SPHERES 5
#define INF 1e10
#define EPSILON 0.002
#define PI 3.14159265
// --- Material IDs ---
#define MAT_DIFFUSE 0
#define MAT_MIRROR 1
#define MAT_GLASS 2
#define MAT_METAL 3
#define MAT_GROUND 4
// ========================
// DATA STRUCTURES
// ========================
struct Sphere {
vec3 center;
float radius;
int material;
vec3 color;
};
struct HitRecord {
float t;
vec3 pos;
vec3 normal;
int material;
vec3 color;
bool frontFace;
};
struct Ray {
vec3 origin;
vec3 dir;
};
// ========================
// SCENE DEFINITION
// ========================
// Spheres are set up each frame so we can animate them
Sphere spheres[NUM_SPHERES];
void buildScene() {
float st = iTime * 0.4;
// Large mirror sphere (left)
spheres[0].center = vec3(-2.0, 1.0, -1.0);
spheres[0].radius = 1.0;
spheres[0].material = MAT_MIRROR;
spheres[0].color = vec3(0.95, 0.95, 0.98);
// Glass sphere (center) — bobs gently
spheres[1].center = vec3(0.0, 1.0 + 0.15 * sin(st * 2.0), 0.5);
spheres[1].radius = 1.0;
spheres[1].material = MAT_GLASS;
spheres[1].color = vec3(1.0, 1.0, 1.0);
// Diffuse red sphere (right)
spheres[2].center = vec3(2.2, 0.7, -0.5);
spheres[2].radius = 0.7;
spheres[2].material = MAT_DIFFUSE;
spheres[2].color = vec3(0.85, 0.12, 0.1);
// Metallic gold sphere (far left)
spheres[3].center = vec3(-0.8, 0.45, 1.8);
spheres[3].radius = 0.45;
spheres[3].material = MAT_METAL;
spheres[3].color = vec3(0.95, 0.75, 0.25);
// Small diffuse blue sphere (far right)
spheres[4].center = vec3(1.5, 0.35, 1.5);
spheres[4].radius = 0.35;
spheres[4].material = MAT_DIFFUSE;
spheres[4].color = vec3(0.15, 0.3, 0.85);
}
// ========================
// RAY-SPHERE INTERSECTION
// ========================
// Analytic solution of ray-sphere intersection using the quadratic formula.
// Returns distance t or -1.0 if no hit.
float intersectSphere(Ray ray, Sphere sph, out bool front) {
vec3 oc = ray.origin - sph.center;
float b = dot(oc, ray.dir);
float c = dot(oc, oc) - sph.radius * sph.radius;
float disc = b * b - c;
if (disc < 0.0) {
front = true;
return -1.0;
}
float sqrtDisc = sqrt(disc);
float t0 = -b - sqrtDisc;
float t1 = -b + sqrtDisc;
// Pick nearest positive t
if (t0 > EPSILON) {
front = true;
return t0;
}
if (t1 > EPSILON) {
front = false; // inside the sphere
return t1;
}
front = true;
return -1.0;
}
// ========================
// RAY-PLANE INTERSECTION
// ========================
// Infinite ground plane at y = 0
float intersectPlane(Ray ray) {
if (abs(ray.dir.y) < 1e-6) return -1.0;
float t = -ray.origin.y / ray.dir.y;
return t > EPSILON ? t : -1.0;
}
// ========================
// SCENE INTERSECTION
// ========================
HitRecord traceScene(Ray ray) {
HitRecord hit;
hit.t = INF;
hit.material = -1;
// Test all spheres
for (int i = 0; i < NUM_SPHERES; i++) {
bool front;
float t = intersectSphere(ray, spheres[i], front);
if (t > 0.0 && t < hit.t) {
hit.t = t;
hit.pos = ray.origin + t * ray.dir;
hit.normal = normalize(hit.pos - spheres[i].center);
hit.material = spheres[i].material;
hit.color = spheres[i].color;
hit.frontFace = front;
if (!front) hit.normal = -hit.normal;
}
}
// Test ground plane
float tp = intersectPlane(ray);
if (tp > 0.0 && tp < hit.t) {
hit.t = tp;
hit.pos = ray.origin + tp * ray.dir;
hit.normal = vec3(0.0, 1.0, 0.0);
hit.material = MAT_GROUND;
hit.frontFace = true;
// Checkerboard pattern
float checker = mod(floor(hit.pos.x) + floor(hit.pos.z), 2.0);
hit.color = mix(vec3(0.8, 0.8, 0.8), vec3(0.2, 0.2, 0.25), checker);
}
return hit;
}
// ========================
// SKY COLOR
// ========================
vec3 skyColor(vec3 dir) {
float t = 0.5 * (dir.y + 1.0);
// Gradient from warm horizon to deep blue zenith
vec3 bottom = vec3(1.0, 0.85, 0.6);
vec3 top = vec3(0.25, 0.45, 0.9);
vec3 sky = mix(bottom, top, t);
// Sun disc
vec3 sunDir = normalize(vec3(1.0, 0.6, -0.5));
float sunDot = max(dot(dir, sunDir), 0.0);
sky += vec3(1.0, 0.9, 0.7) * pow(sunDot, 256.0) * 2.0;
sky += vec3(1.0, 0.7, 0.3) * pow(sunDot, 32.0) * 0.3;
return sky;
}
// ========================
// SHADOW TEST
// ========================
float traceShadow(vec3 pos, vec3 lightDir) {
Ray shadowRay;
shadowRay.origin = pos + EPSILON * lightDir;
shadowRay.dir = lightDir;
for (int i = 0; i < NUM_SPHERES; i++) {
bool front;
float t = intersectSphere(shadowRay, spheres[i], front);
if (t > 0.0 && t < 100.0) {
// Glass spheres cast colored/partial shadows
if (spheres[i].material == MAT_GLASS) {
return 0.45; // partial shadow — light passes through
}
return 0.0; // fully in shadow
}
}
return 1.0;
}
// ========================
// FRESNEL (Schlick approximation)
// ========================
float fresnelSchlick(float cosTheta, float ior) {
float r0 = (1.0 - ior) / (1.0 + ior);
r0 = r0 * r0;
return r0 + (1.0 - r0) * pow(1.0 - cosTheta, 5.0);
}
// ========================
// LIGHTING
// ========================
vec3 shade(HitRecord hit, vec3 viewDir) {
vec3 lightDir = normalize(vec3(1.0, 0.6, -0.5));
vec3 lightColor = vec3(1.0, 0.95, 0.85);
vec3 ambient = vec3(0.12, 0.15, 0.22);
vec3 N = hit.normal;
vec3 L = lightDir;
vec3 V = -viewDir;
// Shadow factor
float shadow = traceShadow(hit.pos, L);
// Diffuse (Lambertian)
float diff = max(dot(N, L), 0.0);
// Specular (Blinn-Phong)
vec3 H = normalize(L + V);
float spec = pow(max(dot(N, H), 0.0), 64.0);
// Fill light from opposite side (subtle)
vec3 fillDir = normalize(vec3(-0.5, 0.3, 0.7));
float fill = max(dot(N, fillDir), 0.0) * 0.15;
vec3 col = hit.color * (ambient + lightColor * diff * shadow + fill);
col += lightColor * spec * shadow * 0.5;
return col;
}
// ========================
// MAIN RAY TRACE LOOP
// ========================
vec3 trace(Ray ray) {
vec3 finalColor = vec3(0.0);
vec3 throughput = vec3(1.0);
for (int bounce = 0; bounce < MAX_BOUNCES; bounce++) {
HitRecord hit = traceScene(ray);
// Miss — hit the sky
if (hit.material == -1) {
finalColor += throughput * skyColor(ray.dir);
break;
}
// --- Diffuse material ---
if (hit.material == MAT_DIFFUSE) {
finalColor += throughput * shade(hit, ray.dir);
break; // diffuse terminates the ray
}
// --- Ground (reflective checkerboard) ---
if (hit.material == MAT_GROUND) {
vec3 groundCol = shade(hit, ray.dir);
float groundReflectivity = 0.25;
finalColor += throughput * groundCol * (1.0 - groundReflectivity);
throughput *= groundReflectivity * hit.color;
ray.origin = hit.pos + hit.normal * EPSILON;
ray.dir = reflect(ray.dir, hit.normal);
continue;
}
// --- Perfect mirror ---
if (hit.material == MAT_MIRROR) {
// Tint the reflection slightly
throughput *= hit.color;
ray.origin = hit.pos + hit.normal * EPSILON;
ray.dir = reflect(ray.dir, hit.normal);
// Add a faint specular highlight directly
vec3 lightDir = normalize(vec3(1.0, 0.6, -0.5));
vec3 H = normalize(lightDir - ray.dir);
float spec = pow(max(dot(hit.normal, H), 0.0), 128.0);
finalColor += throughput * spec * 0.3;
continue;
}
// --- Glass (refractive) ---
if (hit.material == MAT_GLASS) {
float ior = 1.5;
float eta = hit.frontFace ? (1.0 / ior) : ior;
vec3 N = hit.normal;
float cosI = -dot(ray.dir, N);
if (cosI < 0.0) { cosI = -cosI; N = -N; }
// Fresnel determines reflect vs refract ratio
float fresnel = fresnelSchlick(cosI, ior);
// Refraction via Snell's law
float sinT2 = eta * eta * (1.0 - cosI * cosI);
// Total internal reflection check
if (sinT2 > 1.0) {
// Total internal reflection
ray.origin = hit.pos + N * EPSILON;
ray.dir = reflect(ray.dir, N);
throughput *= vec3(0.98, 0.99, 1.0);
} else {
// Use Fresnel to decide reflect or refract
// Deterministic choice based on fresnel for consistent look
if (fresnel > 0.5) {
// Reflect
ray.origin = hit.pos + N * EPSILON;
ray.dir = reflect(ray.dir, N);
throughput *= vec3(0.98, 0.99, 1.0);
} else {
// Refract
float cosT = sqrt(1.0 - sinT2);
ray.dir = eta * ray.dir + (eta * cosI - cosT) * N;
ray.dir = normalize(ray.dir);
ray.origin = hit.pos - N * EPSILON;
// Slight absorption for color tint
throughput *= vec3(0.95, 0.97, 1.0);
}
}
continue;
}
// --- Metallic (fuzzy reflection) ---
if (hit.material == MAT_METAL) {
vec3 metalCol = shade(hit, ray.dir);
float metalReflectivity = 0.6;
finalColor += throughput * metalCol * (1.0 - metalReflectivity);
throughput *= hit.color * metalReflectivity;
ray.origin = hit.pos + hit.normal * EPSILON;
// Slightly perturbed reflection for brushed metal look
vec3 refl = reflect(ray.dir, hit.normal);
// Deterministic fuzz using surface position
float fuzz = 0.05;
vec3 fuzzOffset = fuzz * vec3(
sin(hit.pos.x * 173.7 + hit.pos.z * 71.1),
sin(hit.pos.y * 239.3 + hit.pos.x * 43.7),
sin(hit.pos.z * 157.9 + hit.pos.y * 97.3)
);
ray.dir = normalize(refl + fuzzOffset);
continue;
}
}
return finalColor;
}
// ========================
// CAMERA SETUP
// ========================
Ray getCameraRay(vec2 uv) {
// Orbit camera around the scene
float angle = iTime * 0.2;
float camDist = 6.5;
float camHeight = 2.8;
vec3 camPos = vec3(
camDist * sin(angle),
camHeight,
camDist * cos(angle)
);
vec3 target = vec3(0.0, 0.6, 0.0);
vec3 forward = normalize(target - camPos);
vec3 right = normalize(cross(forward, vec3(0.0, 1.0, 0.0)));
vec3 up = cross(right, forward);
float fov = 1.2;
vec3 dir = normalize(forward * fov + right * uv.x + up * uv.y);
Ray ray;
ray.origin = camPos;
ray.dir = dir;
return ray;
}
// ========================
// TONEMAPPING & GAMMA
// ========================
vec3 ACESFilm(vec3 x) {
float a = 2.51;
float b = 0.03;
float c = 2.43;
float d = 0.59;
float e = 0.14;
return clamp((x * (a * x + b)) / (x * (c * x + d) + e), 0.0, 1.0);
}
void main() {
vec2 uv = (gl_FragCoord.xy - 0.5 * iResolution.xy) / iResolution.y;
buildScene();
// Simple 2x2 super-sampling for anti-aliasing
vec3 col = vec3(0.0);
float aa = 0.5 / iResolution.y;
// Sample offsets for 2x2 grid
col += trace(getCameraRay(uv + vec2(-aa, -aa)));
col += trace(getCameraRay(uv + vec2( aa, -aa)));
col += trace(getCameraRay(uv + vec2(-aa, aa)));
col += trace(getCameraRay(uv + vec2( aa, aa)));
col *= 0.25;
// Tone map and gamma correct
col = ACESFilm(col);
col = pow(col, vec3(1.0 / 2.2));
gl_FragColor = vec4(col, 1.0);
}
At the highest level, ray tracing answers one question per pixel: "What color is the light arriving from this direction?" The camera emits a ray through each pixel into the scene. When the ray strikes an object, we compute the local illumination (diffuse, specular, shadows) and then spawn new rays depending on the surface material — a reflection ray for mirrors, a refracted ray for glass, or nothing further for purely diffuse surfaces. This recursive process is known as Whitted-style ray tracing, named after Turner Whitted's seminal 1980 paper.
Our shader unrolls this recursion into an iterative loop with an accumulator (throughput) that tracks how much energy remains after each bounce. When the ray finally hits the sky or a diffuse surface, the loop terminates and the accumulated color is returned.
Unlike ray marching (which steps along the ray testing a distance field), analytical ray tracing solves the intersection equation directly. For a sphere centered at C with radius r, a point on the ray is P = O + tD. Substituting into the sphere equation |P - C|² = r² gives us a quadratic in t:
// Derivation of ray-sphere intersection:
//
// |O + tD - C|^2 = r^2
// Let oc = O - C
// |oc + tD|^2 = r^2
// dot(oc + tD, oc + tD) = r^2
// t^2*dot(D,D) + 2t*dot(oc,D) + dot(oc,oc) - r^2 = 0
//
// Since D is normalized, dot(D,D) = 1:
// t^2 + 2bt + c = 0 where b = dot(oc,D), c = dot(oc,oc) - r^2
//
// Discriminant: disc = b^2 - c
// disc < 0 → ray misses the sphere
// disc == 0 → ray grazes (tangent)
// disc > 0 → two intersections at t = -b ± sqrt(disc)
float intersectSphere(Ray ray, vec3 center, float radius) {
vec3 oc = ray.origin - center;
float b = dot(oc, ray.dir);
float c = dot(oc, oc) - radius * radius;
float disc = b * b - c;
if (disc < 0.0) return -1.0;
return -b - sqrt(disc); // nearest hit
}
This is exact — no stepping artifacts, no missed thin geometry. The trade-off is that we need a dedicated intersection function for every primitive type (sphere, plane, triangle, etc.), whereas ray marching can render arbitrary implicit surfaces defined by a single distance function.
When a ray hits a mirror surface, it bounces according to the law of reflection: the angle of incidence equals the angle of reflection, both measured from the surface normal. GLSL gives us this for free with reflect(), but understanding the math is valuable:
// Reflection formula:
// R = D - 2 * dot(D, N) * N
//
// Where:
// D = incoming ray direction (pointing INTO the surface)
// N = surface normal (pointing OUT)
// R = reflected direction (pointing AWAY from the surface)
//
// Geometrically, we decompose D into components parallel and
// perpendicular to N, then flip the perpendicular component.
vec3 reflectRay(vec3 D, vec3 N) {
return D - 2.0 * dot(D, N) * N;
}
// After reflection, we offset the new ray origin slightly along
// the normal to prevent self-intersection (shadow acne):
// newOrigin = hitPoint + normal * EPSILON;
In our shader, the mirror sphere accumulates its tint color into the throughput and continues the loop. After 4 bounces of mirror-to-mirror reflections, the recursion terminates, which is why deeply nested reflections fade to black — a classic visual signature of bounded ray tracing.
The glass sphere is the showpiece of any Whitted ray tracer. When light enters a denser medium, it bends toward the normal; when it exits, it bends away. This is governed by Snell's Law: n1 sin(θi) = n2 sin(θt).
// Refraction using Snell's law:
//
// Given:
// eta = n1/n2 (ratio of refractive indices)
// I = incident ray direction (normalized, pointing into surface)
// N = surface normal (pointing outward)
//
// cos(theta_i) = -dot(I, N)
// sin^2(theta_t) = eta^2 * (1 - cos^2(theta_i))
//
// If sin^2(theta_t) > 1.0, total internal reflection occurs
// (no refracted ray exists — all light reflects).
//
// Otherwise, the refracted direction is:
// T = eta * I + (eta * cos_i - cos_t) * N
// where cos_t = sqrt(1 - sin^2(theta_t))
vec3 refractRay(vec3 I, vec3 N, float eta) {
float cosI = -dot(I, N);
float sin2T = eta * eta * (1.0 - cosI * cosI);
if (sin2T > 1.0) return reflect(I, N); // TIR
float cosT = sqrt(1.0 - sin2T);
return eta * I + (eta * cosI - cosT) * N;
}
Glass at typical index of refraction (1.5 for crown glass) produces that characteristic lensing effect — the background behind the sphere appears inverted and distorted. Our shader uses an IOR of 1.5 and tints refracted rays slightly blue-white to simulate the subtle color absorption of real glass.
Real glass doesn't just refract — it also reflects. The balance between reflection and refraction depends on the viewing angle. At steep angles (looking straight through), most light passes through. At grazing angles, the surface becomes almost perfectly mirror-like. This is the Fresnel effect, and we approximate it with Schlick's formula:
// Schlick's approximation to the Fresnel equations:
//
// F(theta) = F0 + (1 - F0) * (1 - cos(theta))^5
//
// Where F0 is the reflectance at normal incidence:
// F0 = ((n1 - n2) / (n1 + n2))^2
//
// For glass (n=1.5): F0 = ((1.0 - 1.5)/(1.0 + 1.5))^2 = 0.04
// This means at head-on viewing, only 4% of light reflects.
// At grazing angles, reflection approaches 100%.
float fresnelSchlick(float cosTheta, float ior) {
float r0 = (1.0 - ior) / (1.0 + ior);
r0 = r0 * r0;
return r0 + (1.0 - r0) * pow(1.0 - cosTheta, 5.0);
}
// In a full path tracer, you'd randomly choose reflect or refract
// weighted by the Fresnel term. In our deterministic Whitted tracer,
// we use a threshold (0.5) for a clean, artifact-free result.
The Fresnel effect is one of those subtle details that makes CG images look "right." Without it, glass spheres look flat and unconvincing. With it, you get that beautiful rim reflection that grounds the object in the scene.
If you've explored the SDF (Signed Distance Field) posts in this forum, you might wonder: why use analytical ray tracing when ray marching is so flexible? The answer is nuance:
Analytical ray tracing computes exact intersections in closed form. It's perfect for scenes composed of geometric primitives — spheres, planes, triangles, cylinders. Intersection is O(1) per primitive per ray, and the resulting hits are mathematically exact with no stepping artifacts. This is what production renderers (Arnold, RenderMan, PBRT) use, with triangle meshes as the primary primitive and BVH acceleration structures to cull unnecessary tests.
Ray marching (sphere tracing) steps along the ray using a distance field to determine safe step sizes. It excels at rendering implicit surfaces, fractals, and procedural geometry that can't be expressed as simple primitives. The cost is proportional to the number of steps, and thin geometry can be missed if step sizes are too large.
For our classic sphere scene, analytical tracing is the natural choice — we get pixel-perfect intersections, no wasted steps, and the math is elegant. For a Mandelbulb fractal or smooth-blended organic shapes, ray marching would be far more practical.
Our Whitted-style tracer handles mirrors, glass, and sharp shadows beautifully, but it can't produce soft shadows, glossy reflections, color bleeding, or caustics. These effects require path tracing — the Monte Carlo extension of ray tracing where each bounce direction is randomly sampled from a probability distribution (BRDF). By averaging thousands of random paths per pixel, path tracing converges to the physically correct solution of the rendering equation.
This is exactly what offline renderers do. A single frame of a Pixar film might trace billions of paths. The noise from insufficient samples is managed through importance sampling, multiple importance sampling (MIS), and sophisticated denoising algorithms.
For decades, ray tracing was "too expensive for real time." That changed with NVIDIA's RTX architecture (2018), which introduced dedicated RT cores — fixed-function hardware that accelerates BVH traversal and ray-triangle intersection by orders of magnitude. Microsoft's DXR (DirectX Raytracing) and Vulkan's VK_KHR_ray_tracing extensions expose this hardware through standardized APIs.
Modern real-time ray tracing typically uses a hybrid approach:
1. Rasterize the primary visibility pass (G-buffer) — this is still faster than tracing primary rays for dense geometry.
2. Ray trace secondary effects — reflections, shadows, ambient occlusion, global illumination — where ray tracing's quality advantage is most visible.
3. Denoise the result — since real-time budgets only allow 1-4 rays per pixel for secondary effects, the raw output is extremely noisy. Temporal and spatial denoisers (NVIDIA's NRD, AMD's FidelityFX) reconstruct a clean image from this sparse sampling.
This pipeline delivers "ray traced quality" reflections, shadows, and GI at real-time frame rates — something that seemed impossible just a few years ago.
Our fragment shader tests every ray against every sphere — O(N) per ray. With 5 spheres, this is fine. With millions of triangles, it's catastrophic. The solution is a Bounding Volume Hierarchy (BVH): a tree of axis-aligned bounding boxes that lets you skip large portions of the scene. By testing a ray against a node's bounding box first, you can reject entire subtrees if the ray misses the box. This reduces intersection cost from O(N) to approximately O(log N), making scenes with millions of primitives tractable.
Building efficient BVHs is its own sub-field — Surface Area Heuristic (SAH) is the gold standard for choosing where to split, and techniques like SBVH (Spatial BVH) handle overlapping geometry. For GPU ray tracing, NVIDIA's RT cores implement a hardware BVH traversal unit that processes nodes at the rate of roughly one box test per clock cycle.
Real-time path tracing at 1 sample per pixel produces an image that looks like television static. Denoising is what makes it usable. Modern denoisers combine spatial filtering (bilateral, non-local means) with temporal accumulation (reusing information from previous frames) and AI-based reconstruction (NVIDIA DLSS, Intel XeSS). The key insight is that ray traced noise has known statistical properties — a denoiser trained on this noise distribution can reconstruct remarkably clean images from very sparse input.
The combination of hardware-accelerated ray tracing, hybrid rendering pipelines, and AI-powered denoising has brought us to a point where real-time path-traced global illumination is shipping in commercial games. What took Turner Whitted minutes per frame on a VAX in 1979 now runs at 60fps in your browser — and our little fragment shader is a direct descendant of that original vision.