Vertex shaders are the first programmable stage in the GPU rendering pipeline. They process each vertex individually, transforming positions from object space to clip space and passing data to the fragment shader.
This preview visualizes the kind of vertex displacement a wave shader produces — the height field is rendered as a lit surface with grid overlay:
precision mediump float;
uniform vec2 iResolution;
uniform float iTime;
void main() {
vec2 uv = gl_FragCoord.xy / iResolution.xy;
// Visualize vertex displacement as a height field
float x = uv.x * 8.0;
float z = uv.y * 6.0;
float wave = sin(x + iTime * 2.0) * 0.3 + cos(z + iTime * 1.5) * 0.2;
wave += sin(x * 0.5 + z * 0.7 + iTime) * 0.15;
// Compute fake normal from height gradient
float eps = 0.05;
float wx = sin((x+eps) + iTime*2.0)*0.3 + cos(z + iTime*1.5)*0.2 + sin((x+eps)*0.5+z*0.7+iTime)*0.15;
float wz = sin(x + iTime*2.0)*0.3 + cos((z+eps) + iTime*1.5)*0.2 + sin(x*0.5+(z+eps)*0.7+iTime)*0.15;
vec3 normal = normalize(vec3(wave - wx, eps * 2.0, wave - wz));
// Lighting
vec3 lightDir = normalize(vec3(0.5, 1.0, -0.3));
float diff = max(dot(normal, lightDir), 0.0);
float spec = pow(max(dot(reflect(-lightDir, normal), vec3(0.0, 0.0, -1.0)), 0.0), 32.0);
// Color by height
vec3 col = mix(vec3(0.1, 0.3, 0.8), vec3(0.3, 0.9, 0.5), wave * 2.0 + 0.5);
col = col * (0.2 + diff * 0.7) + vec3(1.0) * spec * 0.4;
// Grid lines
float gridX = smoothstep(0.02, 0.0, abs(fract(x * 0.5) - 0.5));
float gridZ = smoothstep(0.02, 0.0, abs(fract(z * 0.5) - 0.5));
col = mix(col, vec3(0.5, 0.8, 1.0), (gridX + gridZ) * 0.3);
gl_FragColor = vec4(col, 1.0);
}
Every vertex in your mesh passes through the vertex shader. At minimum, it must output a clip-space position via gl_Position. But it can also compute lighting normals, pass texture coordinates, and even deform geometry procedurally.
Here is a minimal vertex shader that transforms vertices and passes UV coordinates to the fragment stage:
#version 330 core
layout(location = 0) in vec3 aPos;
layout(location = 1) in vec2 aTexCoord;
layout(location = 2) in vec3 aNormal;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
uniform mat3 normalMatrix;
out vec2 TexCoord;
out vec3 FragNormal;
out vec3 FragPos;
void main()
{
vec4 worldPos = model * vec4(aPos, 1.0);
FragPos = worldPos.xyz;
FragNormal = normalize(normalMatrix * aNormal);
TexCoord = aTexCoord;
gl_Position = projection * view * worldPos;
}
You can animate vertices procedurally. This example creates a wave effect using a time uniform:
#version 330 core
layout(location = 0) in vec3 aPos;
layout(location = 1) in vec2 aTexCoord;
uniform mat4 mvp;
uniform float time;
out vec2 TexCoord;
void main()
{
vec3 pos = aPos;
// Sine wave displacement along Y based on X position and time
pos.y += sin(pos.x * 4.0 + time * 2.0) * 0.3;
pos.y += cos(pos.z * 3.0 + time * 1.5) * 0.2;
TexCoord = aTexCoord;
gl_Position = mvp * vec4(pos, 1.0);
}
This is fundamental to effects like water surfaces, cloth simulation, terrain LOD morphing, and procedural animation in games.
In the next post we will cover fragment shaders and how they receive the interpolated data from the vertex stage.