�?3.4章:渲染纹理技�?

渲染纹理(Render Texture)是现代图形编程中的核心技术,允许我们将场景渲染到纹理中进行后续处理。本教程将深入讲解渲染纹理的原理、实现和各种应用场景�?

🎯 学习目标

  • 理解渲染纹理和FBO的工作原�?- 掌握Cocos Creator中的渲染纹理实现
  • 学会实现后处理效果和屏幕空间技�?- 了解渲染纹理的性能优化策略

🔧 渲染纹理基础

FBO基础概念

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
// 帧缓冲对�?FBO)管理�?class FrameBufferManager {
private gl: WebGL2RenderingContext;
private frameBuffers: Map<string, FrameBufferObject> = new Map();

interface FrameBufferObject {
fbo: WebGLFramebuffer;
colorTextures: WebGLTexture[];
depthTexture?: WebGLTexture;
depthRenderbuffer?: WebGLRenderbuffer;
width: number;
height: number;
format: TextureFormat;
msaa: boolean;
mipLevels: number;
}

interface TextureFormat {
internalFormat: number;
format: number;
type: number;
name: string;
}

private static readonly FORMATS = {
RGBA8: { internalFormat: WebGL2RenderingContext.RGBA8, format: WebGL2RenderingContext.RGBA, type: WebGL2RenderingContext.UNSIGNED_BYTE, name: 'RGBA8' },
RGBA16F: { internalFormat: WebGL2RenderingContext.RGBA16F, format: WebGL2RenderingContext.RGBA, type: WebGL2RenderingContext.HALF_FLOAT, name: 'RGBA16F' },
RGBA32F: { internalFormat: WebGL2RenderingContext.RGBA32F, format: WebGL2RenderingContext.RGBA, type: WebGL2RenderingContext.FLOAT, name: 'RGBA32F' },
DEPTH24_STENCIL8: { internalFormat: WebGL2RenderingContext.DEPTH24_STENCIL8, format: WebGL2RenderingContext.DEPTH_STENCIL, type: WebGL2RenderingContext.UNSIGNED_INT_24_8, name: 'DEPTH24_STENCIL8' }
};

public createFrameBuffer(
name: string,
width: number,
height: number,
config: {
colorCount?: number;
colorFormat?: keyof typeof FrameBufferManager.FORMATS;
hasDepth?: boolean;
depthFormat?: keyof typeof FrameBufferManager.FORMATS;
msaa?: boolean;
mipLevels?: number;
} = {}
): FrameBufferObject {
console.log(`🖼�?创建帧缓�? ${name} (${width}x${height})`);

const fbo = this.gl.createFramebuffer()!;
this.gl.bindFramebuffer(this.gl.FRAMEBUFFER, fbo);

const colorCount = config.colorCount || 1;
const colorFormat = FrameBufferManager.FORMATS[config.colorFormat || 'RGBA8'];
const colorTextures: WebGLTexture[] = [];

// 创建颜色纹理
for (let i = 0; i < colorCount; i++) {
const colorTexture = this.createTexture2D(width, height, colorFormat, config.mipLevels || 1);
this.gl.framebufferTexture2D(
this.gl.FRAMEBUFFER,
this.gl.COLOR_ATTACHMENT0 + i,
this.gl.TEXTURE_2D,
colorTexture,
0
);
colorTextures.push(colorTexture);
}

// 设置绘制缓冲�? const drawBuffers = Array.from({length: colorCount}, (_, i) => this.gl.COLOR_ATTACHMENT0 + i);
this.gl.drawBuffers(drawBuffers);

// 创建深度缓冲�? let depthTexture: WebGLTexture | undefined;
let depthRenderbuffer: WebGLRenderbuffer | undefined;

if (config.hasDepth) {
if (config.depthFormat) {
// 使用深度纹理
const depthFormat = FrameBufferManager.FORMATS[config.depthFormat];
depthTexture = this.createTexture2D(width, height, depthFormat, 1);
this.gl.framebufferTexture2D(
this.gl.FRAMEBUFFER,
this.gl.DEPTH_STENCIL_ATTACHMENT,
this.gl.TEXTURE_2D,
depthTexture,
0
);
} else {
// 使用深度渲染缓冲�? depthRenderbuffer = this.gl.createRenderbuffer()!;
this.gl.bindRenderbuffer(this.gl.RENDERBUFFER, depthRenderbuffer);
this.gl.renderbufferStorage(this.gl.RENDERBUFFER, this.gl.DEPTH24_STENCIL8, width, height);
this.gl.framebufferRenderbuffer(
this.gl.FRAMEBUFFER,
this.gl.DEPTH_STENCIL_ATTACHMENT,
this.gl.RENDERBUFFER,
depthRenderbuffer
);
}
}

// 检查完整�? const status = this.gl.checkFramebufferStatus(this.gl.FRAMEBUFFER);
if (status !== this.gl.FRAMEBUFFER_COMPLETE) {
throw new Error(`帧缓冲不完整: ${this.getFramebufferStatusString(status)}`);
}

this.gl.bindFramebuffer(this.gl.FRAMEBUFFER, null);

const frameBuffer: FrameBufferObject = {
fbo: fbo,
colorTextures: colorTextures,
depthTexture: depthTexture,
depthRenderbuffer: depthRenderbuffer,
width: width,
height: height,
format: colorFormat,
msaa: config.msaa || false,
mipLevels: config.mipLevels || 1
};

this.frameBuffers.set(name, frameBuffer);

console.log(`�?帧缓冲创建成�? ${colorCount}个颜色附�? 深度: ${config.hasDepth}`);
return frameBuffer;
}

private createTexture2D(width: number, height: number, format: TextureFormat, mipLevels: number): WebGLTexture {
const texture = this.gl.createTexture()!;
this.gl.bindTexture(this.gl.TEXTURE_2D, texture);

// 分配存储空间
this.gl.texStorage2D(this.gl.TEXTURE_2D, mipLevels, format.internalFormat, width, height);

// 设置过滤参数
if (mipLevels > 1) {
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MIN_FILTER, this.gl.LINEAR_MIPMAP_LINEAR);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MAG_FILTER, this.gl.LINEAR);
} else {
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MIN_FILTER, this.gl.LINEAR);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_MAG_FILTER, this.gl.LINEAR);
}

this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_S, this.gl.CLAMP_TO_EDGE);
this.gl.texParameteri(this.gl.TEXTURE_2D, this.gl.TEXTURE_WRAP_T, this.gl.CLAMP_TO_EDGE);

return texture;
}

public bindFrameBuffer(name: string, clearColor?: [number, number, number, number]): boolean {
const frameBuffer = this.frameBuffers.get(name);
if (!frameBuffer) {
console.error(`帧缓冲不存在: ${name}`);
return false;
}

this.gl.bindFramebuffer(this.gl.FRAMEBUFFER, frameBuffer.fbo);
this.gl.viewport(0, 0, frameBuffer.width, frameBuffer.height);

if (clearColor) {
this.gl.clearColor(clearColor[0], clearColor[1], clearColor[2], clearColor[3]);
this.gl.clear(this.gl.COLOR_BUFFER_BIT | this.gl.DEPTH_BUFFER_BIT);
}

return true;
}
}

🎨 后处理效果系�?

后处理管�?

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
// 后处理基础着色器
CCProgram fullscreen-vs %{
precision highp float;

in vec2 a_position; // 全屏四边�?[-1,1]

out vec2 v_uv;

void vert() {
v_uv = a_position * 0.5 + 0.5; // 转换到[0,1]
gl_Position = vec4(a_position, 0.0, 1.0);
}
}%

// 基础后处理片段着色器
CCProgram post-process-base-fs %{
precision highp float;

in vec2 v_uv;
layout(location = 0) out vec4 fragColor;

uniform sampler2D inputTexture;
uniform vec4 screenSize; // xy: size, zw: inv_size

// 基础采样函数
vec4 sampleInput(vec2 uv) {
return texture(inputTexture, uv);
}

vec4 sampleInputOffset(vec2 uv, vec2 offset) {
return texture(inputTexture, uv + offset * screenSize.zw);
}

void frag() {
fragColor = sampleInput(v_uv);
}
}%

// 高斯模糊后处�?CCProgram gaussian-blur-fs %{
precision highp float;

in vec2 v_uv;
layout(location = 0) out vec4 fragColor;

uniform sampler2D inputTexture;
uniform vec4 blurParams; // x: radius, y: direction(0=horizontal,1=vertical), z: strength, w: unused
uniform vec4 screenSize;

const float weights[5] = float[](0.227027, 0.1945946, 0.1216216, 0.0540541, 0.0162162);

void frag() {
vec2 texelSize = screenSize.zw;
vec2 direction = blurParams.y < 0.5 ? vec2(1.0, 0.0) : vec2(0.0, 1.0);

vec3 result = texture(inputTexture, v_uv).rgb * weights[0];

for(int i = 1; i < 5; ++i) {
vec2 offset = direction * float(i) * texelSize * blurParams.x;
result += texture(inputTexture, v_uv + offset).rgb * weights[i];
result += texture(inputTexture, v_uv - offset).rgb * weights[i];
}

fragColor = vec4(result, 1.0);
}
}%

// Bloom后处�?CCProgram bloom-extract-fs %{
precision highp float;

in vec2 v_uv;
layout(location = 0) out vec4 fragColor;

uniform sampler2D inputTexture;
uniform vec4 bloomParams; // x: threshold, y: knee, z: intensity, w: unused

// 亮度提取
float luminance(vec3 color) {
return dot(color, vec3(0.2126, 0.7152, 0.0722));
}

vec3 thresholdFilter(vec3 color, float threshold, float knee) {
float brightness = luminance(color);
float softness = clamp(brightness - threshold + knee, 0.0, 2.0 * knee);
softness = (softness * softness) / (4.0 * knee + 1e-4);
float contribution = max(softness, brightness - threshold) / max(brightness, 1e-4);
return color * contribution;
}

void frag() {
vec3 color = texture(inputTexture, v_uv).rgb;
vec3 bloom = thresholdFilter(color, bloomParams.x, bloomParams.y);
fragColor = vec4(bloom * bloomParams.z, 1.0);
}
}%

// Tone Mapping后处�?CCProgram tone-mapping-fs %{
precision highp float;

in vec2 v_uv;
layout(location = 0) out vec4 fragColor;

uniform sampler2D hdrTexture;
uniform sampler2D bloomTexture;
uniform vec4 toneMappingParams; // x: exposure, y: gamma, z: bloomStrength, w: type

// ACES色调映射
vec3 ACESFilm(vec3 x) {
float a = 2.51;
float b = 0.03;
float c = 2.43;
float d = 0.59;
float e = 0.14;
return clamp((x * (a * x + b)) / (x * (c * x + d) + e), 0.0, 1.0);
}

// Reinhard色调映射
vec3 Reinhard(vec3 color) {
return color / (1.0 + color);
}

void frag() {
vec3 hdrColor = texture(hdrTexture, v_uv).rgb;
vec3 bloomColor = texture(bloomTexture, v_uv).rgb;

// 合并HDR和Bloom
hdrColor += bloomColor * toneMappingParams.z;

// 应用曝光
hdrColor *= toneMappingParams.x;

// 色调映射
vec3 mapped;
if (toneMappingParams.w < 0.5) {
mapped = Reinhard(hdrColor);
} else {
mapped = ACESFilm(hdrColor);
}

// 伽马校正
mapped = pow(mapped, vec3(1.0 / toneMappingParams.y));

fragColor = vec4(mapped, 1.0);
}
}%

TypeScript后处理系�?

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
// 后处理效果管理器
class PostProcessingManager {
private gl: WebGL2RenderingContext;
private fboManager: FrameBufferManager;
private effects: Map<string, PostEffect> = new Map();
private effectChain: string[] = [];
private tempBuffers: string[] = ['temp0', 'temp1'];

interface PostEffect {
name: string;
shader: Shader;
enabled: boolean;
parameters: Map<string, any>;
renderFunc?: (input: string, output: string) => void;
}

constructor(gl: WebGL2RenderingContext, fboManager: FrameBufferManager) {
this.gl = gl;
this.fboManager = fboManager;
this.initializeTempBuffers();
}

private initializeTempBuffers(): void {
const width = this.gl.canvas.width;
const height = this.gl.canvas.height;

// 创建临时缓冲�? this.fboManager.createFrameBuffer('temp0', width, height, {
colorFormat: 'RGBA16F',
hasDepth: false
});

this.fboManager.createFrameBuffer('temp1', width, height, {
colorFormat: 'RGBA16F',
hasDepth: false
});

// Bloom用的低分辨率缓冲�? this.fboManager.createFrameBuffer('bloom_extract', width / 2, height / 2, {
colorFormat: 'RGBA16F',
hasDepth: false
});

this.fboManager.createFrameBuffer('bloom_blur_h', width / 4, height / 4, {
colorFormat: 'RGBA16F',
hasDepth: false
});

this.fboManager.createFrameBuffer('bloom_blur_v', width / 4, height / 4, {
colorFormat: 'RGBA16F',
hasDepth: false
});
}

public addEffect(name: string, shader: Shader, parameters: {[key: string]: any} = {}): void {
const effect: PostEffect = {
name: name,
shader: shader,
enabled: true,
parameters: new Map(Object.entries(parameters))
};

this.effects.set(name, effect);

console.log(`�?添加后处理效�? ${name}`);
}

public setEffectChain(chain: string[]): void {
this.effectChain = chain.filter(name => this.effects.has(name));
console.log(`🔗 设置后处理链: ${this.effectChain.join(' -> ')}`);
}

public setEffectParameter(effectName: string, paramName: string, value: any): void {
const effect = this.effects.get(effectName);
if (effect) {
effect.parameters.set(paramName, value);
}
}

public processFrame(inputTexture: WebGLTexture): void {
if (this.effectChain.length === 0) {
this.copyToScreen(inputTexture);
return;
}

let currentInput = 'input';
let currentOutput = 'temp0';
let tempIndex = 0;

// 将输入纹理绑定到temp缓冲区用于读�? // 这里简化处理,实际需要更复杂的纹理管�?
for (let i = 0; i < this.effectChain.length; i++) {
const effectName = this.effectChain[i];
const effect = this.effects.get(effectName);

if (!effect || !effect.enabled) continue;

// 确定输出目标
if (i === this.effectChain.length - 1) {
currentOutput = 'screen'; // 最后一个效果输出到屏幕
} else {
currentOutput = this.tempBuffers[tempIndex % 2];
}

// 执行效果
this.executeEffect(effect, currentInput, currentOutput);

// 为下一个效果准备输�? currentInput = this.tempBuffers[tempIndex % 2];
tempIndex++;
}
}

private executeEffect(effect: PostEffect, inputBuffer: string, outputBuffer: string): void {
console.log(`🎨 执行后处�? ${effect.name} (${inputBuffer} -> ${outputBuffer})`);

// 特殊效果的自定义渲染逻辑
if (effect.renderFunc) {
effect.renderFunc(inputBuffer, outputBuffer);
return;
}

// 绑定输出缓冲�? if (outputBuffer === 'screen') {
this.gl.bindFramebuffer(this.gl.FRAMEBUFFER, null);
this.gl.viewport(0, 0, this.gl.canvas.width, this.gl.canvas.height);
} else {
this.fboManager.bindFrameBuffer(outputBuffer);
}

// 使用着色器
this.gl.useProgram(effect.shader.program);

// 设置参数
this.setEffectUniforms(effect);

// 绑定输入纹理
// 这里需要从inputBuffer获取纹理并绑�?
// 渲染全屏四边�? this.renderFullscreenQuad();
}

public setupBloomEffect(): void {
// Bloom效果的自定义渲染逻辑
const bloomEffect: PostEffect = {
name: 'bloom',
shader: this.getBloomShader(),
enabled: true,
parameters: new Map([
['threshold', 1.0],
['knee', 0.5],
['intensity', 1.0],
['radius', 2.0]
]),
renderFunc: (input: string, output: string) => {
this.renderBloom(input, output);
}
};

this.effects.set('bloom', bloomEffect);
}

private renderBloom(input: string, output: string): void {
console.log(`🌟 渲染Bloom效果`);

// 1. 亮度提取
this.fboManager.bindFrameBuffer('bloom_extract');
this.gl.useProgram(this.getBloomExtractShader().program);
// 设置阈值参�? this.renderFullscreenQuad();

// 2. 下采样和水平模糊
this.fboManager.bindFrameBuffer('bloom_blur_h');
this.gl.useProgram(this.getGaussianBlurShader().program);
// 设置水平模糊参数
this.renderFullscreenQuad();

// 3. 垂直模糊
this.fboManager.bindFrameBuffer('bloom_blur_v');
// 设置垂直模糊参数
this.renderFullscreenQuad();

// 4. 合成到输�? if (output === 'screen') {
this.gl.bindFramebuffer(this.gl.FRAMEBUFFER, null);
} else {
this.fboManager.bindFrameBuffer(output);
}

this.gl.useProgram(this.getBloomCompositeShader().program);
// 绑定原始纹理和模糊纹�? this.renderFullscreenQuad();
}

private renderFullscreenQuad(): void {
// 渲染全屏四边形的顶点数据
const vertices = new Float32Array([
-1, -1,
1, -1,
-1, 1,
1, 1
]);

const vbo = this.gl.createBuffer();
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, vbo);
this.gl.bufferData(this.gl.ARRAY_BUFFER, vertices, this.gl.STATIC_DRAW);

const posLocation = this.gl.getAttribLocation(this.gl.getParameter(this.gl.CURRENT_PROGRAM), 'a_position');
this.gl.enableVertexAttribArray(posLocation);
this.gl.vertexAttribPointer(posLocation, 2, this.gl.FLOAT, false, 0, 0);

this.gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 4);

this.gl.deleteBuffer(vbo);
}
}

🖼�?高级渲染纹理应用

屏幕空间反射(SSR)

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
// 屏幕空间反射着色器
CCProgram ssr-fs %{
precision highp float;

in vec2 v_uv;
layout(location = 0) out vec4 fragColor;

uniform sampler2D colorTexture;
uniform sampler2D normalTexture;
uniform sampler2D depthTexture;
uniform sampler2D roughnessTexture;

uniform mat4 viewMatrix;
uniform mat4 projMatrix;
uniform mat4 invViewMatrix;
uniform mat4 invProjMatrix;
uniform vec4 ssrParams; // x: maxDistance, y: resolution, z: thickness, w: fadeDistance

// 从深度重构视图空间位�? vec3 reconstructViewPos(vec2 uv, float depth) {
vec4 clipPos = vec4(uv * 2.0 - 1.0, depth * 2.0 - 1.0, 1.0);
vec4 viewPos = invProjMatrix * clipPos;
return viewPos.xyz / viewPos.w;
}

// 光线步进
vec2 rayMarch(vec3 rayStart, vec3 rayDir, float maxDistance) {
float stepSize = maxDistance / ssrParams.y; // resolution控制步数
vec3 currentPos = rayStart;

for (float i = 0.0; i < ssrParams.y; i += 1.0) {
currentPos += rayDir * stepSize;

// 转换到屏幕空�? vec4 clipPos = projMatrix * vec4(currentPos, 1.0);
vec3 screenPos = clipPos.xyz / clipPos.w;
vec2 screenUV = screenPos.xy * 0.5 + 0.5;

// 检查边�? if (screenUV.x < 0.0 || screenUV.x > 1.0 || screenUV.y < 0.0 || screenUV.y > 1.0) {
break;
}

// 采样深度
float sceneDepth = texture(depthTexture, screenUV).r;
vec3 sceneViewPos = reconstructViewPos(screenUV, sceneDepth);

// 检查相�? float depthDiff = abs(currentPos.z - sceneViewPos.z);
if (depthDiff < ssrParams.z && currentPos.z > sceneViewPos.z) {
return screenUV;
}
}

return vec2(-1.0); // 未找到相�? }

void frag() {
vec4 normalData = texture(normalTexture, v_uv);
vec3 normal = normalData.rgb * 2.0 - 1.0;
float roughness = texture(roughnessTexture, v_uv).r;

// 粗糙表面不计算反�? if (roughness > 0.8) {
fragColor = vec4(0.0);
return;
}

float depth = texture(depthTexture, v_uv).r;
vec3 viewPos = reconstructViewPos(v_uv, depth);
vec3 viewDir = normalize(-viewPos);
vec3 reflectDir = reflect(-viewDir, normal);

// 光线步进
vec2 hitUV = rayMarch(viewPos, reflectDir, ssrParams.x);

if (hitUV.x >= 0.0) {
vec3 reflectionColor = texture(colorTexture, hitUV).rgb;

// 根据距离衰减
float fadeDistance = length(hitUV - v_uv) / ssrParams.w;
float fade = 1.0 - clamp(fadeDistance, 0.0, 1.0);

// 根据粗糙度调整反射强�? float reflectance = 1.0 - roughness;

fragColor = vec4(reflectionColor, fade * reflectance);
} else {
fragColor = vec4(0.0);
}
}
}%

实时阴影映射

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
// 实时阴影映射系统
class RealtimeShadowSystem {
private cascadedShadowMaps: WebGLTexture[] = [];
private shadowMapSize: number = 2048;
private cascadeCount: number = 4;

interface ShadowCascade {
viewMatrix: mat4;
projMatrix: mat4;
splitDistance: number;
shadowBounds: BoundingBox;
}

public setupCascadedShadowMaps(): void {
console.log(`🌑 设置级联阴影映射 (${this.cascadeCount}�?`);

// 创建2D数组纹理用于级联阴影
const shadowMapArray = this.gl.createTexture()!;
this.gl.bindTexture(this.gl.TEXTURE_2D_ARRAY, shadowMapArray);

this.gl.texImage3D(
this.gl.TEXTURE_2D_ARRAY,
0,
this.gl.DEPTH_COMPONENT24,
this.shadowMapSize,
this.shadowMapSize,
this.cascadeCount,
0,
this.gl.DEPTH_COMPONENT,
this.gl.UNSIGNED_INT,
null
);

this.gl.texParameteri(this.gl.TEXTURE_2D_ARRAY, this.gl.TEXTURE_MIN_FILTER, this.gl.LINEAR);
this.gl.texParameteri(this.gl.TEXTURE_2D_ARRAY, this.gl.TEXTURE_MAG_FILTER, this.gl.LINEAR);
this.gl.texParameteri(this.gl.TEXTURE_2D_ARRAY, this.gl.TEXTURE_WRAP_S, this.gl.CLAMP_TO_BORDER);
this.gl.texParameteri(this.gl.TEXTURE_2D_ARRAY, this.gl.TEXTURE_WRAP_T, this.gl.CLAMP_TO_BORDER);

// 设置边界颜色为白�?无阴�?
this.gl.texParameterfv(this.gl.TEXTURE_2D_ARRAY, this.gl.TEXTURE_BORDER_COLOR, [1.0, 1.0, 1.0, 1.0]);

this.cascadedShadowMaps.push(shadowMapArray);
}

public calculateCascadeSplits(camera: Camera, lightDir: vec3): ShadowCascade[] {
const cascades: ShadowCascade[] = [];
const nearPlane = camera.near;
const farPlane = camera.far;

// 计算级联分割距离
const splitDistances: number[] = [];
for (let i = 0; i < this.cascadeCount; i++) {
const ratio = (i + 1) / this.cascadeCount;
const logarithmic = nearPlane * Math.pow(farPlane / nearPlane, ratio);
const uniform = nearPlane + (farPlane - nearPlane) * ratio;
const splitDistance = mix(uniform, logarithmic, 0.5); // 混合分割
splitDistances.push(splitDistance);
}

let prevSplitDistance = nearPlane;

for (let i = 0; i < this.cascadeCount; i++) {
const cascade = this.calculateCascadeMatrices(
camera,
lightDir,
prevSplitDistance,
splitDistances[i]
);
cascades.push(cascade);
prevSplitDistance = splitDistances[i];
}

return cascades;
}

private calculateCascadeMatrices(
camera: Camera,
lightDir: vec3,
nearDist: number,
farDist: number
): ShadowCascade {
// 计算级联的视锥体角点
const frustumCorners = this.calculateFrustumCorners(camera, nearDist, farDist);

// 计算包围�? const center = this.calculateCenter(frustumCorners);
const radius = this.calculateRadius(frustumCorners, center);

// 光源视图矩阵
const lightPos = vec3.subtract(vec3.create(), center, vec3.scale(vec3.create(), lightDir, radius * 2));
const lightView = mat4.lookAt(mat4.create(), lightPos, center, [0, 1, 0]);

// 正交投影矩阵
const lightProj = mat4.ortho(mat4.create(), -radius, radius, -radius, radius, 0.1, radius * 4);

return {
viewMatrix: lightView,
projMatrix: lightProj,
splitDistance: farDist,
shadowBounds: this.calculateShadowBounds(frustumCorners, lightView, lightProj)
};
}

public renderShadowCascades(cascades: ShadowCascade[], renderList: RenderObject[]): void {
console.log(`🌑 渲染级联阴影映射`);

const shadowFBO = this.fboManager.getFrameBuffer('cascaded_shadows');

for (let i = 0; i < cascades.length; i++) {
// 绑定到特定层
this.gl.bindFramebuffer(this.gl.FRAMEBUFFER, shadowFBO.fbo);
this.gl.framebufferTextureLayer(
this.gl.FRAMEBUFFER,
this.gl.DEPTH_ATTACHMENT,
this.cascadedShadowMaps[0],
0,
i
);

this.gl.viewport(0, 0, this.shadowMapSize, this.shadowMapSize);
this.gl.clear(this.gl.DEPTH_BUFFER_BIT);

// 设置光源矩阵
const lightViewProj = mat4.multiply(mat4.create(), cascades[i].projMatrix, cascades[i].viewMatrix);

// 渲染阴影投射�? renderList.forEach(obj => {
if (obj.castShadow && this.objectInCascade(obj, cascades[i])) {
this.renderObjectToShadowMap(obj, lightViewProj);
}
});
}
}
}

📝 本章小结

通过本教程,你应该掌握了�?

  1. 渲染纹理基础: 理解FBO和渲染纹理的工作原理
  2. *后处理系�?: 学会实现完整的后处理效果管线
  3. 高级应用: 掌握SSR、阴影映射等高级技�?4. 性能优化: 了解渲染纹理的内存管理和优化策略

�?3章:高级渲染技术完结!🎮�?

🚀 下一步学�?

继续学习跨平台兼容性技术!🌐