渲染引擎
渲染引擎是游戏引擎中最核心的组成部分之一,负责将游戏中的三维场景转换为屏幕上的二维图像。本章将深入探讨现代游戏渲染技术,从基础的三角形光栅化到高级的光照和后处理效果,并提供C/C++实现代码。
10.1 采用深度缓冲的三角形光栅化基础
10.1.1 图形渲染管线概述
现代图形渲染管线是将三维场景转换为屏幕图像的一系列处理阶段。一个典型的渲染管线包括以下几个阶段:
应用阶段:准备要渲染的几何体和场景数据
几何处理:顶点变换、光照计算和裁剪
光栅化:将几何图元转换为像素片段
像素处理:片段着色、纹理采样和混合
以下是渲染管线的高层次结构:
cpp
// 渲染管线的基础结构
class RenderPipeline {
public:
// 初始化渲染管线
bool Initialize(int width, int height) {
screenWidth = width;
screenHeight = height;
// 分配深度缓冲区内存
depthBuffer = new float[width * height];
// 分配颜色缓冲区内存
colorBuffer = new uint32_t[width * height];
return true;
}
// 清空缓冲区
void ClearBuffers() {
// 清空颜色缓冲区(设为黑色)
memset(colorBuffer, 0, screenWidth * screenHeight * sizeof(uint32_t));
// 清空深度缓冲区(设为最大深度值)
for (int i = 0; i < screenWidth * screenHeight; i++) {
depthBuffer[i] = 1.0f;
}
}
// 渲染一帧
void RenderFrame(const Scene& scene, const Camera& camera) {
ClearBuffers();
// 计算视图矩阵和投影矩阵
Matrix4x4 viewMatrix = camera.GetViewMatrix();
Matrix4x4 projMatrix = camera.GetProjectionMatrix();
Matrix4x4 viewProjMatrix = Matrix4x4::Multiply(viewMatrix, projMatrix);
// 遍历场景中的所有对象
for (const auto& object : scene.GetObjects()) {
// 获取对象的网格和材质
const Mesh& mesh = object.GetMesh();
const Material& material = object.GetMaterial();
// 计算模型视图投影矩阵
Matrix4x4 modelMatrix = object.GetTransformMatrix();
Matrix4x4 mvpMatrix = Matrix4x4::Multiply(modelMatrix, viewProjMatrix);
// 处理网格中的所有三角形
for (size_t i = 0; i < mesh.indices.size(); i += 3) {
// 获取三角形的三个顶点
Vertex v0 = mesh.vertices[mesh.indices[i]];
Vertex v1 = mesh.vertices[mesh.indices[i+1]];
Vertex v2 = mesh.vertices[mesh.indices[i+2]];
// 应用顶点变换
Vector4 p0 = TransformVertex(v0.position, mvpMatrix);
Vector4 p1 = TransformVertex(v1.position, mvpMatrix);
Vector4 p2 = TransformVertex(v2.position, mvpMatrix);
// 执行裁剪(这里简化处理)
if (IsTriangleVisible(p0, p1, p2)) {
// 透视除法(归一化设备坐标)
Vector3 ndc0 = PerspectiveDivide(p0);
Vector3 ndc1 = PerspectiveDivide(p1);
Vector3 ndc2 = PerspectiveDivide(p2);
// 视口变换(屏幕坐标)
Vector2 screen0 = ViewportTransform(ndc0);
Vector2 screen1 = ViewportTransform(ndc1);
Vector2 screen2 = ViewportTransform(ndc2);
// 光栅化三角形
RasterizeTriangle(screen0, screen1, screen2,
ndc0.z, ndc1.z, ndc2.z,
v0, v1, v2, material);
}
}
}
// 呈现最终图像
PresentFrame();
}
// 释放资源
void Shutdown() {
delete[] depthBuffer;
delete[] colorBuffer;
}
private:
int screenWidth;
int screenHeight;
float* depthBuffer;
uint32_t* colorBuffer;
// 顶点变换
Vector4 TransformVertex(const Vector3& position, const Matrix4x4& mvpMatrix) {
Vector4 pos(position.x, position.y, position.z, 1.0f);
return Matrix4x4::Transform(pos, mvpMatrix);
}
// 检查三角形是否可见
bool IsTriangleVisible(const Vector4& v0, const Vector4& v1, const Vector4& v2) {
// 简化的视锥体裁剪检查
// 实际实现需要更复杂的裁剪算法
if (v0.w <= 0 || v1.w <= 0 || v2.w <= 0) {
return false;
}
// 背面剔除
Vector3 p0(v0.x / v0.w, v0.y / v0.w, v0.z / v0.w);
Vector3 p1(v1.x / v1.w, v1.y / v1.w, v1.z / v1.w);
Vector3 p2(v2.x / v2.w, v2.y / v2.w, v2.z / v2.w);
Vector3 e1 = Vector3::Subtract(p1, p0);
Vector3 e2 = Vector3::Subtract(p2, p0);
Vector3 normal = Vector3::Cross(e1, e2);
// 如果法线z分量为正,则三角形背向观察者
return normal.z <= 0;
}
// 透视除法
Vector3 PerspectiveDivide(const Vector4& clip) {
return Vector3(
clip.x / clip.w,
clip.y / clip.w,
clip.z / clip.w
);
}
// 视口变换
Vector2 ViewportTransform(const Vector3& ndc) {
return Vector2(
(ndc.x * 0.5f + 0.5f) * screenWidth,
(1.0f - (ndc.y * 0.5f + 0.5f)) * screenHeight
);
}
// 光栅化三角形
void RasterizeTriangle(
const Vector2& p0, const Vector2& p1, const Vector2& p2,
float z0, float z1, float z2,
const Vertex& v0, const Vertex& v1, const Vertex& v2,
const Material& material) {
// 计算三角形的包围盒
int minX = max(0, static_cast<int>(min(min(p0.x, p1.x), p2.x)));
int minY = max(0, static_cast<int>(min(min(p0.y, p1.y), p2.y)));
int maxX = min(screenWidth - 1, static_cast<int>(max(max(p0.x, p1.x), p2.x)));
int maxY = min(screenHeight - 1, static_cast<int>(max(max(p0.y, p1.y), p2.y)));
// 预计算三角形面积的两倍(用于重心坐标)
float area = EdgeFunction(p0, p1, p2);
if (fabs(area) < 0.0001f) {
return; // 面积过小,跳过
}
// 遍历包围盒中的所有像素
for (int y = minY; y <= maxY; y++) {
for (int x = minX; x <= maxX; x++) {
Vector2 pixel(x + 0.5f, y + 0.5f); // 像素中心
// 使用重心坐标判断像素是否在三角形内
float w0 = EdgeFunction(p1, p2, pixel);
float w1 = EdgeFunction(p2, p0, pixel);
float w2 = EdgeFunction(p0, p1, pixel);
// 归一化重心坐标
w0 /= area;
w1 /= area;
w2 /= area;
// 如果像素在三角形内
if (w0 >= 0 && w1 >= 0 && w2 >= 0) {
// 计算插值深度
float depth = w0 * z0 + w1 * z1 + w2 * z2;
// 执行深度测试
int index = y * screenWidth + x;
if (depth < depthBuffer[index]) {
// 深度测试通过,更新深度缓冲区
depthBuffer[index] = depth;
// 插值顶点属性(如UV、法线、颜色)
Vector2 uv = InterpolateUV(v0.texCoord, v1.texCoord, v2.texCoord, w0, w1, w2);
Vector3 normal = InterpolateNormal(v0.normal, v1.normal, v2.normal, w0, w1, w2);
// 计算像素颜色
Color color = ShadePixel(uv, normal, material);
// 写入颜色缓冲区
colorBuffer[index] = ColorToUint32(color);
}
}
}
}
}
// 边缘函数(用于重心坐标计算)
float EdgeFunction(const Vector2& a, const Vector2& b, const Vector2& c) {
return (c.x - a.x) * (b.y - a.y) - (c.y - a.y) * (b.x - a.x);
}
// 插值UV坐标
Vector2 InterpolateUV(const Vector2& uv0, const Vector2& uv1, const Vector2& uv2,
float w0, float w1, float w2) {
return Vector2(
w0 * uv0.x + w1 * uv1.x + w2 * uv2.x,
w0 * uv0.y + w1 * uv1.y + w2 * uv2.y
);
}
// 插值法线
Vector3 InterpolateNormal(const Vector3& n0, const Vector3& n1, const Vector3& n2,
float w0, float w1, float w2) {
Vector3 normal(
w0 * n0.x + w1 * n1.x + w2 * n2.x,
w0 * n0.y + w1 * n1.y + w2 * n2.y,
w0 * n0.z + w1 * n1.z + w2 * n2.z
);
return Vector3::Normalize(normal);
}
// 像素着色
Color ShadePixel(const Vector2& uv, const Vector3& normal, const Material& material) {
// 简单的着色模型
Color baseColor = material.diffuseMap ? material.diffuseMap->Sample(uv) : material.diffuseColor;
// 简化的光照计算
Vector3 lightDir = Vector3::Normalize(Vector3(0.5f, 0.5f, -1.0f));
float nDotL = max(0.0f, Vector3::Dot(normal, Vector3::Negate(lightDir)));
// 环境光+漫反射
float ambient = 0.2f;
float diffuse = nDotL;
Color finalColor;
finalColor.r = baseColor.r * (ambient + diffuse);
finalColor.g = baseColor.g * (ambient + diffuse);
finalColor.b = baseColor.b * (ambient + diffuse);
finalColor.a = baseColor.a;
return finalColor;
}
// 转换颜色为32位整数
uint32_t ColorToUint32(const Color& color) {
uint8_t r = static_cast<uint8_t>(min(1.0f, color.r) * 255);
uint8_t g = static_cast<uint8_t>(min(1.0f, color.g) * 255);
uint8_t b = static_cast<uint8_t>(min(1.0f, color.b) * 255);
uint8_t a = static_cast<uint8_t>(min(1.0f, color.a) * 255);
return (a << 24) | (r << 16) | (g << 8) | b;
}
// 呈现帧缓冲
void PresentFrame() {
// 这里需要将颜色缓冲区显示到屏幕上
// 具体实现取决于使用的图形API(如OpenGL、DirectX等)
}
};
10.1.2 深度缓冲与Z-Fighting
深度缓冲是现代3D渲染的核心技术,它存储每个像素的深度值,用于确定哪些像素应该可见,哪些应该被遮挡。然而,当两个表面非常接近时,可能会出现Z-Fighting(深度冲突)现象。
以下是改进深度缓冲精度的一些技术:
cpp
// 处理深度冲突的辅助函数
class DepthBufferHelper {
public:
// 设置反转深度(提高远处物体的深度精度)
static void ConfigureReverseDepth(bool enable) {
useReverseDepth = enable;
}
// 为透视投影设置最优深度范围
static void SetOptimalDepthRange(float& nearPlane, float& farPlane) {
// 保持近平面尽可能远
nearPlane = max(nearPlane, 0.1f);
// 在反转深度时,可以使用更大的远平面距离
if (useReverseDepth) {
// 反转深度时,可以使用非常大的远平面值
farPlane = max(farPlane, 10000.0f);
} else {
// 标准深度时,远近平面比例影响精度
// 尝试保持比例在合理范围
float ratio = farPlane / nearPlane;
if (ratio > 1000.0f) {
farPlane = nearPlane * 1000.0f;
}
}
}
// 创建最优的投影矩阵
static Matrix4x4 CreateOptimalProjectionMatrix(float fov, float aspect, float nearPlane, float farPlane) {
if (useReverseDepth) {
// 反转深度的投影矩阵
return Matrix4x4::PerspectiveFovReverseZ(fov, aspect, nearPlane, farPlane);
} else {
// 标准投影矩阵
return Matrix4x4::PerspectiveFov(fov, aspect, nearPlane, farPlane);
}
}
// 获取深度比较函数
static DepthCompareFunction GetDepthCompareFunction() {
return useReverseDepth ? DepthCompareFunction::GreaterEqual : DepthCompareFunction::LessEqual;
}
// 获取清除深度值
static float GetClearDepthValue() {
return useReverseDepth ? 0.0f : 1.0f;
}
private:
static bool useReverseDepth; // 是否使用反转深度
};
bool DepthBufferHelper::useReverseDepth = false;
// 深度比较函数枚举
enum class DepthCompareFunction {
Never,
Less,
Equal,
LessEqual,
Greater,
NotEqual,
GreaterEqual,
Always
};
// 反转深度的投影矩阵计算
Matrix4x4 Matrix4x4::PerspectiveFovReverseZ(float fov, float aspect, float nearPlane, float farPlane) {
float yScale = 1.0f / tan(fov * 0.5f);
float xScale = yScale / aspect;
Matrix4x4 proj;
proj.m[0][0] = xScale;
proj.m[1][1] = yScale;
proj.m[2][2] = nearPlane / (nearPlane - farPlane); // 反转深度
proj.m[2][3] = 1.0f;
proj.m[3][2] = -(farPlane * nearPlane) / (nearPlane - farPlane);
proj.m[3][3] = 0.0f;
return proj;
}
10.1.3 透视正确的属性插值
为了正确渲染纹理和其他顶点属性,需要进行透视正确的插值:
cpp
// 透视正确的属性插值
class PerspectiveCorrectInterpolation {
public:
// 计算透视正确的重心坐标
static void CalculatePerspectiveCorrectBarycentrics(
const Vector4& v0, const Vector4& v1, const Vector4& v2,
const Vector2& pixel,
float& w0, float& w1, float& w2) {
// 计算屏幕空间的重心坐标
float area = EdgeFunction(
Vector2(v0.x, v0.y),
Vector2(v1.x, v1.y),
Vector2(v2.x, v2.y)
);
w0 = EdgeFunction(Vector2(v1.x, v1.y), Vector2(v2.x, v2.y), pixel) / area;
w1 = EdgeFunction(Vector2(v2.x, v2.y), Vector2(v0.x, v0.y), pixel) / area;
w2 = EdgeFunction(Vector2(v0.x, v0.y), Vector2(v1.x, v1.y), pixel) / area;
// 应用透视校正
float invW0 = 1.0f / v0.w;
float invW1 = 1.0f / v1.w;
float invW2 = 1.0f / v2.w;
float correctedW0 = w0 * invW0;
float correctedW1 = w1 * invW1;
float correctedW2 = w2 * invW2;
// 归一化
float sum = correctedW0 + correctedW1 + correctedW2;
correctedW0 /= sum;
correctedW1 /= sum;
correctedW2 /= sum;
// 返回校正后的权重
w0 = correctedW0;
w1 = correctedW1;
w2 = correctedW2;
}
// 透视正确地插值UV坐标
static Vector2 InterpolateUV(
const Vector2& uv0, const Vector2& uv1, const Vector2& uv2,
float w0, float w1, float w2) {
return Vector2(
w0 * uv0.x + w1 * uv1.x + w2 * uv2.x,
w0 * uv0.y + w1 * uv1.y + w2 * uv2.y
);
}
// 透视正确地插值颜色
static Color InterpolateColor(
const Color& c0, const Color& c1, const Color& c2,
float w0, float w1, float w2) {
Color result;
result.r = w0 * c0.r + w1 * c1.r + w2 * c2.r;
result.g = w0 * c0.g + w1 * c1.g + w2 * c2.g;
result.b = w0 * c0.b + w1 * c1.b + w2 * c2.b;
result.a = w0 * c0.a + w1 * c1.a + w2 * c2.a;
return result;
}
private:
// 边缘函数
static float EdgeFunction(const Vector2& a, const Vector2& b, const Vector2& c) {
return (b.x - a.x) * (c.y - a.y) - (b.y - a.y) * (c.x - a.x);
}
};
10.1.4 顶点和片段着色器
现代图形渲染通常使用着色器程序来处理顶点和片段(像素)。以下是一个简化的着色器系统:
cpp
// 着色器输入/输出结构
struct VertexShaderInput {
Vector3 position;
Vector3 normal;
Vector2 texCoord;
Vector4 color;
};
struct VertexShaderOutput {
Vector4 position; // 齐次裁剪空间位置
Vector3 worldPos; // 世界空间位置
Vector3 normal; // 法线
Vector2 texCoord; // 纹理坐标
Vector4 color; // 顶点颜色
};
struct PixelShaderInput {
Vector3 worldPos; // 世界空间位置
Vector3 normal; // 法线
Vector2 texCoord; // 纹理坐标
Vector4 color; // 插值顶点颜色
};
struct PixelShaderOutput {
Vector4 color; // 输出颜色
};
// 着色器接口
class IVertexShader {
public:
virtual ~IVertexShader() {}
// 处理单个顶点
virtual VertexShaderOutput ProcessVertex(const VertexShaderInput& input) = 0;
// 设置常量缓冲区数据
virtual void SetConstants(const void* data, size_t size) = 0;
};
class IPixelShader {
public:
virtual ~IPixelShader() {}
// 处理单个像素
virtual PixelShaderOutput ProcessPixel(const PixelShaderInput& input) = 0;
// 设置常量缓冲区数据
virtual void SetConstants(const void* data, size_t size) = 0;
// 设置纹理
virtual void SetTexture(int slot, const Texture* texture) = 0;
};
// 标准着色器实现
class StandardVertexShader : public IVertexShader {
public:
struct Constants {
Matrix4x4 worldMatrix;
Matrix4x4 viewMatrix;
Matrix4x4 projectionMatrix;
};
StandardVertexShader() : constants{} {}
VertexShaderOutput ProcessVertex(const VertexShaderInput& input) override {
VertexShaderOutput output;
// 计算世界空间位置
Vector4 worldPos = Matrix4x4::Transform(
Vector4(input.position.x, input.position.y, input.position.z, 1.0f),
constants.worldMatrix
);
// 计算观察空间位置
Vector4 viewPos = Matrix4x4::Transform(worldPos, constants.viewMatrix);
// 计算裁剪空间位置
output.position = Matrix4x4::Transform(viewPos, constants.projectionMatrix);
// 变换法线到世界空间
Matrix4x4 normalMatrix = Matrix4x4::Transpose(Matrix4x4::Inverse(constants.worldMatrix));
output.normal = Matrix4x4::TransformNormal(input.normal, normalMatrix);
// 保存其他属性
output.worldPos = Vector3(worldPos.x, worldPos.y, worldPos.z);
output.texCoord = input.texCoord;
output.color = input.color;
return output;
}
void SetConstants(const void* data, size_t size) override {
if (size == sizeof(Constants)) {
memcpy(&constants, data, size);
}
}
private:
Constants constants;
};
class StandardPixelShader : public IPixelShader {
public:
struct Constants {
Vector3 ambientColor;
float ambientIntensity;
Vector3 lightDirection;
float padding;
Vector3 lightColor;
float lightIntensity;
Vector3 cameraPosition;
float specularPower;
};
StandardPixelShader() : constants{}, diffuseTexture(nullptr) {}
PixelShaderOutput ProcessPixel(const PixelShaderInput& input) override {
PixelShaderOutput output;
// 获取基础颜色
Vector4 baseColor = diffuseTexture
? diffuseTexture->Sample(input.texCoord)
: input.color;
// 计算环境光
Vector3 ambient = constants.ambientColor * constants.ambientIntensity;
// 计算漫反射光
Vector3 normal = Vector3::Normalize(input.normal);
Vector3 lightDir = Vector3::Normalize(constants.lightDirection);
float nDotL = max(0.0f, Vector3::Dot(normal, Vector3::Negate(lightDir)));
Vector3 diffuse = constants.lightColor * constants.lightIntensity * nDotL;
// 计算镜面反射光
Vector3 viewDir = Vector3::Normalize(Vector3::Subtract(constants.cameraPosition, input.worldPos));
Vector3 reflectDir = Vector3::Reflect(lightDir, normal);
float specFactor = pow(max(0.0f, Vector3::Dot(viewDir, reflectDir)), constants.specularPower);
Vector3 specular = constants.lightColor * constants.lightIntensity * specFactor;
// 组合最终颜色
Vector3 finalColor = Vector3::Multiply(
Vector3(baseColor.x, baseColor.y, baseColor.z),
Vector3::Add(ambient, Vector3::Add(diffuse, specular))
);
output.color = Vector4(finalColor.x, finalColor.y, finalColor.z, baseColor.w);
return output;
}
void SetConstants(const void* data, size_t size) override {
if (size == sizeof(Constants)) {
memcpy(&constants, data, size);
}
}
void SetTexture(int slot, const Texture* texture) override {
if (slot == 0) {
diffuseTexture = texture;
}
}
private:
Constants constants;
const Texture* diffuseTexture;
};
10.2 渲染管道
渲染管道是游戏引擎中将3D场景转换为2D图像的核心流程。现代渲染管道包含多个阶段,每个阶段处理特定的任务。
10.2.1 前向渲染 vs 延迟渲染
前向渲染和延迟渲染是两种主要的渲染技术,各有优缺点:
cpp
// 前向渲染管道
class ForwardRenderingPipeline {
public:
bool Initialize(int width, int height) {
screenWidth = width;
screenHeight = height;
// 创建深度缓冲区
depthBuffer = new float[width * height];
// 创建颜色缓冲区
colorBuffer = new uint32_t[width * height];
return true;
}
void Render(const Scene& scene, const Camera& camera) {
// 清空缓冲区
ClearBuffers();
// 获取视图和投影矩阵
Matrix4x4 viewMatrix = camera.GetViewMatrix();
Matrix4x4 projMatrix = camera.GetProjectionMatrix();
// 渲染所有物体
for (const auto& object : scene.GetObjects()) {
// 获取物体的网格和材质
const Mesh& mesh = object.GetMesh();
const Material& material = object.GetMaterial();
// 设置着色器
SetupShaders(object, camera, viewMatrix, projMatrix);
// 渲染物体
RenderObject(mesh);
}
// 应用后处理效果
ApplyPostProcessing();
// 显示最终图像
PresentFrame();
}
private:
int screenWidth;
int screenHeight;
float* depthBuffer;
uint32_t* colorBuffer;
void ClearBuffers() {
// 清空颜色缓冲区(设为黑色)
memset(colorBuffer, 0, screenWidth * screenHeight * sizeof(uint32_t));
// 清空深度缓冲区(设为最大深度值)
for (int i = 0; i < screenWidth * screenHeight; i++) {
depthBuffer[i] = 1.0f;
}
}
void SetupShaders(const Object& object, const Camera& camera,
const Matrix4x4& viewMatrix, const Matrix4x4& projMatrix) {
// 设置顶点着色器常量
StandardVertexShader::Constants vsConstants;
vsConstants.worldMatrix = object.GetTransformMatrix();
vsConstants.viewMatrix = viewMatrix;
vsConstants.projectionMatrix = projMatrix;
vertexShader.SetConstants(&vsConstants, sizeof(vsConstants));
// 设置像素着色器常量
StandardPixelShader::Constants psConstants;
psConstants.ambientColor = Vector3(0.2f, 0.2f, 0.2f);
psConstants.ambientIntensity = 0.1f;
psConstants.lightDirection = Vector3(0.5f, -0.5f, 0.5f);
psConstants.lightColor = Vector3(1.0f, 1.0f, 1.0f);
psConstants.lightIntensity = 1.0f;
psConstants.cameraPosition = camera.GetPosition();
psConstants.specularPower = 32.0f;
pixelShader.SetConstants(&psConstants, sizeof(psConstants));
// 设置纹理
if (object.GetMaterial().diffuseMap) {
pixelShader.SetTexture(0, object.GetMaterial().diffuseMap);
}
}
void RenderObject(const Mesh& mesh) {
// 处理所有顶点
std::vector<VertexShaderOutput> vertexOutput;
vertexOutput.reserve(mesh.vertices.size());
for (const auto& vertex : mesh.vertices) {
VertexShaderInput vsInput;
vsInput.position = vertex.position;
vsInput.normal = vertex.normal;
vsInput.texCoord = vertex.texCoord;
vsInput.color = vertex.color;
vertexOutput.push_back(vertexShader.ProcessVertex(vsInput));
}
// 处理所有三角形
for (size_t i = 0; i < mesh.indices.size(); i += 3) {
size_t idx0 = mesh.indices[i];
size_t idx1 = mesh.indices[i+1];
size_t idx2 = mesh.indices[i+2];
// 裁剪和光栅化三角形
RasterizeTriangle(vertexOutput[idx0], vertexOutput[idx1], vertexOutput[idx2]);
}
}
void RasterizeTriangle(const VertexShaderOutput& v0, const VertexShaderOutput& v1,
const VertexShaderOutput& v2) {
// 透视除法
Vector3 ndc0 = PerspectiveDivide(v0.position);
Vector3 ndc1 = PerspectiveDivide(v1.position);
Vector3 ndc2 = PerspectiveDivide(v2.position);
// 视口变换
Vector2 screen0 = ViewportTransform(ndc0);
Vector2 screen1 = ViewportTransform(ndc1);
Vector2 screen2 = ViewportTransform(ndc2);
// 计算三角形的包围盒
int minX = max(0, static_cast<int>(min(min(screen0.x, screen1.x), screen2.x)));
int minY = max(0, static_cast<int>(min(min(screen0.y, screen1.y), screen2.y)));
int maxX = min(screenWidth - 1, static_cast<int>(max(max(screen0.x, screen1.x), screen2.x)));
int maxY = min(screenHeight - 1, static_cast<int>(max(max(screen0.y, screen1.y), screen2.y)));
// 计算边缘函数的分母
float area = EdgeFunction(screen0, screen1, screen2);
if (fabs(area) < 0.0001f) {
return; // 面积太小,跳过
}
// 遍历包围盒中的所有像素
for (int y = minY; y <= maxY; y++) {
for (int x = minX; x <= maxX; x++) {
Vector2 pixel(x + 0.5f, y + 0.5f); // 像素中心
// 计算重心坐标
float w0 = EdgeFunction(screen1, screen2, pixel) / area;
float w1 = EdgeFunction(screen2, screen0, pixel) / area;
float w2 = EdgeFunction(screen0, screen1, pixel) / area;
// 如果像素在三角形内
if (w0 >= 0 && w1 >= 0 && w2 >= 0) {
// 透视校正插值
float correctedW0 = w0 / v0.position.w;
float correctedW1 = w1 / v1.position.w;
float correctedW2 = w2 / v2.position.w;
float sum = correctedW0 + correctedW1 + correctedW2;
correctedW0 /= sum;
correctedW1 /= sum;
correctedW2 /= sum;
// 计算插值深度
float depth = correctedW0 * ndc0.z + correctedW1 * ndc1.z + correctedW2 * ndc2.z;
// 深度测试
int index = y * screenWidth + x;
if (depth < depthBuffer[index]) {
// 更新深度缓冲区
depthBuffer[index] = depth;
// 插值其他顶点属性
PixelShaderInput psInput;
psInput.worldPos = InterpolateVector3(v0.worldPos, v1.worldPos, v2.worldPos,
correctedW0, correctedW1, correctedW2);
psInput.normal = InterpolateVector3(v0.normal, v1.normal, v2.normal,
correctedW0, correctedW1, correctedW2);
psInput.texCoord = InterpolateVector2(v0.texCoord, v1.texCoord, v2.texCoord,
correctedW0, correctedW1, correctedW2);
psInput.color = InterpolateVector4(v0.color, v1.color, v2.color,
correctedW0, correctedW1, correctedW2);
// 调用像素着色器
PixelShaderOutput psOutput = pixelShader.ProcessPixel(psInput);
// 写入颜色缓冲区
colorBuffer[index] = ColorToUint32(psOutput.color);
}
}
}
}
}
// 其他辅助函数...
StandardVertexShader vertexShader;
StandardPixelShader pixelShader;
};
// 延迟渲染管道
class DeferredRenderingPipeline {
public:
bool Initialize(int width, int height) {
screenWidth = width;
screenHeight = height;
// 创建G-Buffer
positionBuffer = new Vector3[width * height];
normalBuffer = new Vector3[width * height];
diffuseBuffer = new Vector4[width * height];
specularBuffer = new Vector4[width * height];
// 创建深度缓冲区
depthBuffer = new float[width * height];
// 创建颜色缓冲区
colorBuffer = new uint32_t[width * height];
return true;
}
void Render(const Scene& scene, const Camera& camera) {
// 清空所有缓冲区
ClearBuffers();
// 几何处理阶段 - 填充G-Buffer
GeometryPass(scene, camera);
// 光照处理阶段 - 使用G-Buffer计算光照
LightingPass(scene, camera);
// 应用后处理效果
ApplyPostProcessing();
// 显示最终图像
PresentFrame();
}
private:
int screenWidth;
int screenHeight;
Vector3* positionBuffer; // 世界空间位置
Vector3* normalBuffer; // 法线
Vector4* diffuseBuffer; // 漫反射颜色和alpha
Vector4* specularBuffer; // 高光颜色和强度
float* depthBuffer;
uint32_t* colorBuffer;
// 清空所有缓冲区
void ClearBuffers() {
// 清空G-Buffer
for (int i = 0; i < screenWidth * screenHeight; i++) {
positionBuffer[i] = Vector3(0, 0, 0);
normalBuffer[i] = Vector3(0, 0, 0);
diffuseBuffer[i] = Vector4(0, 0, 0, 0);
specularBuffer[i] = Vector4(0, 0, 0, 0);
}
// 清空深度缓冲区
for (int i = 0; i < screenWidth * screenHeight; i++) {
depthBuffer[i] = 1.0f;
}
// 清空颜色缓冲区
memset(colorBuffer, 0, screenWidth * screenHeight * sizeof(uint32_t));
}
// 几何处理阶段
void GeometryPass(const Scene& scene, const Camera& camera) {
// 获取视图和投影矩阵
Matrix4x4 viewMatrix = camera.GetViewMatrix();
Matrix4x4 projMatrix = camera.GetProjectionMatrix();
// 渲染所有物体到G-Buffer
for (const auto& object : scene.GetObjects()) {
const Mesh& mesh = object.GetMesh();
const Material& material = object.GetMaterial();
// 设置G-Buffer着色器
SetupGBufferShader(object, camera, viewMatrix, projMatrix);
// 渲染物体到G-Buffer
RenderObjectToGBuffer(mesh, material);
}
}
// 光照处理阶段
void LightingPass(const Scene& scene, const Camera& camera) {
// 遍历所有像素
for (int y = 0; y < screenHeight; y++) {
for (int x = 0; x < screenWidth; x++) {
int index = y * screenWidth + x;
// 获取G-Buffer数据
Vector3 position = positionBuffer[index];
Vector3 normal = normalBuffer[index];
Vector4 diffuse = diffuseBuffer[index];
Vector4 specular = specularBuffer[index];
// 如果没有有效数据(背景像素),跳过
if (normal.x == 0 && normal.y == 0 && normal.z == 0) {
continue;
}
// 计算环境光
Vector3 ambientColor(0.2f, 0.2f, 0.2f);
float ambientIntensity = 0.1f;
Vector3 ambient = Vector3::Multiply(ambientColor, ambientIntensity);
// 初始化累积光照
Vector3 lighting = ambient;
// 处理所有光源
for (const auto& light : scene.GetLights()) {
// 根据光源类型计算光照贡献
Vector3 lightContribution = CalculateLightContribution(
light, position, normal, diffuse, specular, camera.GetPosition());
// 累加光照贡献
lighting = Vector3::Add(lighting, lightContribution);
}
// 计算最终颜色
Vector3 finalColor = Vector3::Multiply(
Vector3(diffuse.x, diffuse.y, diffuse.z),
lighting
);
// 将结果写入颜色缓冲区
colorBuffer[index] = ColorToUint32(Vector4(finalColor.x, finalColor.y, finalColor.z, diffuse.w));
}
}
}
// 计算单个光源的贡献
Vector3 CalculateLightContribution(
const Light& light, const Vector3& position, const Vector3& normal,
const Vector4& diffuse, const Vector4& specular, const Vector3& cameraPosition) {
Vector3 contribution(0, 0, 0);
switch (light.GetType()) {
case LightType::Directional: {
// 计算方向光的贡献
Vector3 lightDir = Vector3::Normalize(light.GetDirection());
float nDotL = max(0.0f, Vector3::Dot(normal, Vector3::Negate(lightDir)));
// 漫反射
Vector3 diffuseLight = Vector3::Multiply(light.GetColor(), light.GetIntensity() * nDotL);
// 镜面反射
Vector3 viewDir = Vector3::Normalize(Vector3::Subtract(cameraPosition, position));
Vector3 reflectDir = Vector3::Reflect(lightDir, normal);
float specFactor = pow(max(0.0f, Vector3::Dot(viewDir, reflectDir)), specular.w * 100.0f);
Vector3 specularLight = Vector3::Multiply(light.GetColor(), light.GetIntensity() * specFactor);
// 组合贡献
contribution = Vector3::Add(diffuseLight,
Vector3::Multiply(Vector3(specular.x, specular.y, specular.z), specularLight));
break;
}
case LightType::Point: {
// 计算点光源的贡献
Vector3 lightVec = Vector3::Subtract(light.GetPosition(), position);
float distance = Vector3::Length(lightVec);
Vector3 lightDir = Vector3::Divide(lightVec, distance);
// 计算衰减
float attenuation = 1.0f / (1.0f + light.GetAttenuationLinear() * distance +
light.GetAttenuationQuadratic() * distance * distance);
float nDotL = max(0.0f, Vector3::Dot(normal, lightDir));
// 漫反射
Vector3 diffuseLight = Vector3::Multiply(light.GetColor(), light.GetIntensity() * nDotL * attenuation);
// 镜面反射
Vector3 viewDir = Vector3::Normalize(Vector3::Subtract(cameraPosition, position));
Vector3 reflectDir = Vector3::Reflect(Vector3::Negate(lightDir), normal);
float specFactor = pow(max(0.0f, Vector3::Dot(viewDir, reflectDir)), specular.w * 100.0f);
Vector3 specularLight = Vector3::Multiply(light.GetColor(), light.GetIntensity() * specFactor * attenuation);
// 组合贡献
contribution = Vector3::Add(diffuseLight,
Vector3::Multiply(Vector3(specular.x, specular.y, specular.z), specularLight));
break;
}
case LightType::Spot: {
// 计算聚光灯的贡献
Vector3 lightVec = Vector3::Subtract(light.GetPosition(), position);
float distance = Vector3::Length(lightVec);
Vector3 lightDir = Vector3::Divide(lightVec, distance);
// 计算锥形衰减
float spotEffect = Vector3::Dot(lightDir, Vector3::Normalize(Vector3::Negate(light.GetDirection())));
if (spotEffect > light.GetSpotCutoff()) {
// 在锥形内
spotEffect = pow(spotEffect, light.GetSpotExponent());
// 计算距离衰减
float attenuation = spotEffect / (1.0f + light.GetAttenuationLinear() * distance +
light.GetAttenuationQuadratic() * distance * distance);
float nDotL = max(0.0f, Vector3::Dot(normal, lightDir));
// 漫反射
Vector3 diffuseLight = Vector3::Multiply(light.GetColor(), light.GetIntensity() * nDotL * attenuation);
// 镜面反射
Vector3 viewDir = Vector3::Normalize(Vector3::Subtract(cameraPosition, position));
Vector3 reflectDir = Vector3::Reflect(Vector3::Negate(lightDir), normal);
float specFactor = pow(max(0.0f, Vector3::Dot(viewDir, reflectDir)), specular.w * 100.0f);
Vector3 specularLight = Vector3::Multiply(light.GetColor(), light.GetIntensity() * specFactor * attenuation);
// 组合贡献
contribution = Vector3::Add(diffuseLight,
Vector3::Multiply(Vector3(specular.x, specular.y, specular.z), specularLight));
}
break;
}
}
return contribution;
}
// 设置G-Buffer着色器
void SetupGBufferShader(const Object& object, const Camera& camera,
const Matrix4x4& viewMatrix, const Matrix4x4& projMatrix) {
// 类似于前向渲染中的设置,但使用专门的G-Buffer着色器
// ...
}
// 渲染物体到G-Buffer
void RenderObjectToGBuffer(const Mesh& mesh, const Material& material) {
// 类似于前向渲染,但输出到G-Buffer
// ...
}
// 其他辅助函数...
};
10.2.2 材质系统
材质系统定义了物体的外观属性,包括颜色、纹理、反射率等:
cpp
// 材质基类
class Material {
public:
virtual ~Material() {}
// 设置着色器参数
virtual void SetShaderParameters(IVertexShader* vertexShader, IPixelShader* pixelShader) const = 0;
// 获取渲染队列(用于透明度排序)
virtual int GetRenderQueue() const = 0;
// 是否透明
virtual bool IsTransparent() const = 0;
};
// 标准材质
class StandardMaterial : public Material {
public:
StandardMaterial()
: diffuseColor(1.0f, 1.0f, 1.0f, 1.0f),
specularColor(1.0f, 1.0f, 1.0f, 1.0f),
shininess(32.0f),
diffuseMap(nullptr),
normalMap(nullptr),
specularMap(nullptr),
emissiveMap(nullptr),
emissiveColor(0.0f, 0.0f, 0.0f),
emissiveIntensity(0.0f),
renderQueue(1000),
transparent(false) {}
void SetShaderParameters(IVertexShader* vertexShader, IPixelShader* pixelShader) const override {
// 设置像素着色器常量
struct {
Vector4 diffuseColor;
Vector4 specularColor;
float shininess;
Vector3 emissiveColor;
float emissiveIntensity;
int hasDiffuseMap;
int hasNormalMap;
int hasSpecularMap;
int hasEmissiveMap;
} psConstants;
psConstants.diffuseColor = diffuseColor;
psConstants.specularColor = specularColor;
psConstants.shininess = shininess;
psConstants.emissiveColor = emissiveColor;
psConstants.emissiveIntensity = emissiveIntensity;
psConstants.hasDiffuseMap = diffuseMap ? 1 : 0;
psConstants.hasNormalMap = normalMap ? 1 : 0;
psConstants.hasSpecularMap = specularMap ? 1 : 0;
psConstants.hasEmissiveMap = emissiveMap ? 1 : 0;
pixelShader->SetConstants(&psConstants, sizeof(psConstants));
// 设置纹理
if (diffuseMap) pixelShader->SetTexture(0, diffuseMap);
if (normalMap) pixelShader->SetTexture(1, normalMap);
if (specularMap) pixelShader->SetTexture(2, specularMap);
if (emissiveMap) pixelShader->SetTexture(3, emissiveMap);
}
int GetRenderQueue() const override {
return renderQueue;
}
bool IsTransparent() const override {
return transparent;
}
// 设置漫反射颜色
void SetDiffuseColor(const Vector4& color) {
diffuseColor = color;
// 如果alpha小于1,标记为透明
transparent = diffuseColor.w < 0.99f;
// 透明物体使用更高的渲染队列
if (transparent) {
renderQueue = 2000;
}
}
// 设置漫反射纹理
void SetDiffuseMap(Texture* texture) {
diffuseMap = texture;
}
// 设置法线贴图
void SetNormalMap(Texture* texture) {
normalMap = texture;
}
// 设置高光贴图
void SetSpecularMap(Texture* texture) {
specularMap = texture;
}
// 设置自发光颜色和强度
void SetEmissive(const Vector3& color, float intensity) {
emissiveColor = color;
emissiveIntensity = intensity;
}
// 设置自发光贴图
void SetEmissiveMap(Texture* texture) {
emissiveMap = texture;
}
// 设置光泽度(影响高光大小)
void SetShininess(float value) {
shininess = value;
}
// 手动设置渲染队列
void SetRenderQueue(int queue) {
renderQueue = queue;
}
private:
Vector4 diffuseColor; // 漫反射颜色和alpha
Vector4 specularColor; // 高光颜色和强度
float shininess; // 光泽度
Vector3 emissiveColor; // 自发光颜色
float emissiveIntensity; // 自发光强度
Texture* diffuseMap; // 漫反射贴图
Texture* normalMap; // 法线贴图
Texture* specularMap; // 高光贴图
Texture* emissiveMap; // 自发光贴图
int renderQueue; // 渲染队列
bool transparent; // 是否透明
};
// 材质管理器
class MaterialManager {
public:
static MaterialManager& GetInstance() {
static MaterialManager instance;
return instance;
}
// 创建标准材质
StandardMaterial* CreateStandardMaterial(const std::string& name) {
if (materials_.find(name) != materials_.end()) {
return nullptr; // 已存在同名材质
}
StandardMaterial* material = new StandardMaterial();
materials_[name] = material;
return material;
}
// 获取材质
Material* GetMaterial(const std::string& name) {
auto it = materials_.find(name);
if (it != materials_.end()) {
return it->second;
}
return nullptr;
}
// 删除材质
void DeleteMaterial(const std::string& name) {
auto it = materials_.find(name);
if (it != materials_.end()) {
delete it->second;
materials_.erase(it);
}
}
// 释放所有材质
void Clear() {
for (auto& pair : materials_) {
delete pair.second;
}
materials_.clear();
}
private:
MaterialManager() {}
~MaterialManager() {
Clear();
}
std::unordered_map<std::string, Material*> materials_;
};
10.2.3 纹理资源管理
纹理是渲染中的重要资源,需要有效地管理和使用:
cpp
// 纹理格式枚举
enum class TextureFormat {
R8, // 单通道8位
RG8, // 双通道8位
RGB8, // 三通道8位
RGBA8, // 四通道8位
R16F, // 单通道16位浮点
RG16F, // 双通道16位浮点
RGB16F, // 三通道16位浮点
RGBA16F, // 四通道16位浮点
R32F, // 单通道32位浮点
RG32F, // 双通道32位浮点
RGB32F, // 三通道32位浮点
RGBA32F, // 四通道32位浮点
DXT1, // DXT1压缩格式
DXT3, // DXT3压缩格式
DXT5, // DXT5压缩格式
Depth16, // 16位深度
Depth24, // 24位深度
Depth32, // 32位深度
Depth24Stencil8 // 24位深度,8位模板
};
// 纹理类
class Texture {
public:
Texture() : width(0), height(0), format(TextureFormat::RGBA8), data(nullptr),
mipLevels(1), isRenderTarget(false) {}
~Texture() {
if (data && !isRenderTarget) {
delete[] data;
}
}
// 创建空纹理
bool Create(int w, int h, TextureFormat fmt, int mips = 1) {
width = w;
height = h;
format = fmt;
mipLevels = mips;
// 计算纹理大小并分配内存
size_t dataSize = CalculateDataSize();
data = new uint8_t[dataSize];
return data != nullptr;
}
// 从内存数据创建纹理
bool CreateFromMemory(int w, int h, TextureFormat fmt, const void* srcData, int mips = 1) {
if (!Create(w, h, fmt, mips)) {
return false;
}
// 计算要复制的主图像大小
size_t mainImageSize = CalculateMipLevelSize(0);
// 复制主图像数据
memcpy(data, srcData, mainImageSize);
// 生成mipmap(如果需要)
if (mipLevels > 1) {
GenerateMipMaps();
}
return true;
}
// 从文件创建纹理
bool LoadFromFile(const std::string& filename) {
// 实际实现将调用图像加载库(如stb_image)
// 这里简化为示例代码
// 加载图像数据
int w, h, channels;
uint8_t* imageData = nullptr;
// 根据通道数确定格式
TextureFormat fmt;
switch (channels) {
case 1: fmt = TextureFormat::R8; break;
case 2: fmt = TextureFormat::RG8; break;
case 3: fmt = TextureFormat::RGB8; break;
case 4: fmt = TextureFormat::RGBA8; break;
default: return false;
}
// 创建纹理
bool result = CreateFromMemory(w, h, fmt, imageData);
// 释放加载的图像数据
// stbi_image_free(imageData);
return result;
}
// 采样纹理
Vector4 Sample(const Vector2& uv) const {
// 计算纹理坐标
float u = fmod(uv.x, 1.0f);
float v = fmod(uv.y, 1.0f);
if (u < 0) u += 1.0f;
if (v < 0) v += 1.0f;
// 计算像素坐标
float x = u * width;
float y = v * height;
// 使用双线性插值
return SampleBilinear(x, y);
}
// 获取纹理属性
int GetWidth() const { return width; }
int GetHeight() const { return height; }
TextureFormat GetFormat() const { return format; }
int GetMipLevels() const { return mipLevels; }
const uint8_t* GetData() const { return data; }
uint8_t* GetData() { return data; }
// 获取特定mipmap级别的数据
uint8_t* GetMipLevelData(int level) {
if (level < 0 || level >= mipLevels) {
return nullptr;
}
uint8_t* result = data;
for (int i = 0; i < level; ++i) {
result += CalculateMipLevelSize(i);
}
return result;
}
// 设置为渲染目标
void SetAsRenderTarget(bool value) {
isRenderTarget = value;
}
private:
int width;
int height;
TextureFormat format;
uint8_t* data;
int mipLevels;
bool isRenderTarget;
// 计算整个纹理数据大小(包括所有mipmap)
size_t CalculateDataSize() const {
size_t total = 0;
for (int i = 0; i < mipLevels; ++i) {
total += CalculateMipLevelSize(i);
}
return total;
}
// 计算特定mipmap级别的大小
size_t CalculateMipLevelSize(int level) const {
int w = max(1, width >> level);
int h = max(1, height >> level);
switch (format) {
case TextureFormat::R8:
return w * h;
case TextureFormat::RG8:
return w * h * 2;
case TextureFormat::RGB8:
return w * h * 3;
case TextureFormat::RGBA8:
return w * h * 4;
case TextureFormat::R16F:
return w * h * 2;
case TextureFormat::RG16F:
return w * h * 4;
case TextureFormat::RGB16F:
return w * h * 6;
case TextureFormat::RGBA16F:
return w * h * 8;
case TextureFormat::R32F:
return w * h * 4;
case TextureFormat::RG32F:
return w * h * 8;
case TextureFormat::RGB32F:
return w * h * 12;
case TextureFormat::RGBA32F:
return w * h * 16;
// 压缩格式计算略复杂,这里简化
case TextureFormat::DXT1:
return max(8, w * h / 2);
case TextureFormat::DXT3:
case TextureFormat::DXT5:
return max(16, w * h);
case TextureFormat::Depth16:
return w * h * 2;
case TextureFormat::Depth24:
case TextureFormat::Depth32:
return w * h * 4;
case TextureFormat::Depth24Stencil8:
return w * h * 4;
default:
return 0;
}
}
// 生成mipmap
void GenerateMipMaps() {
// 简化版本,实际实现会更高效
for (int level = 1; level < mipLevels; ++level) {
int srcWidth = max(1, width >> (level - 1));
int srcHeight = max(1, height >> (level - 1));
int dstWidth = max(1, srcWidth / 2);
int dstHeight = max(1, srcHeight / 2);
uint8_t* srcData = GetMipLevelData(level - 1);
uint8_t* dstData = GetMipLevelData(level);
// 对每个目标像素,取源图像中2x2区域的平均值
for (int y = 0; y < dstHeight; ++y) {
for (int x = 0; x < dstWidth; ++x) {
// 计算源图像中的4个像素位置
int x0 = x * 2;
int y0 = y * 2;
int x1 = min(x0 + 1, srcWidth - 1);
int y1 = min(y0 + 1, srcHeight - 1);
// 实际的像素混合逻辑取决于纹理格式
// 这里使用一个简化的RGBA8示例
if (format == TextureFormat::RGBA8) {
int pixelSize = 4;
// 获取4个源像素
uint8_t* p00 = srcData + (y0 * srcWidth + x0) * pixelSize;
uint8_t* p01 = srcData + (y0 * srcWidth + x1) * pixelSize;
uint8_t* p10 = srcData + (y1 * srcWidth + x0) * pixelSize;
uint8_t* p11 = srcData + (y1 * srcWidth + x1) * pixelSize;
// 计算平均值
uint8_t* dst = dstData + (y * dstWidth + x) * pixelSize;
for (int i = 0; i < pixelSize; ++i) {
dst[i] = (p00[i] + p01[i] + p10[i] + p11[i]) / 4;
}
}
// 其他格式的处理...
}
}
}
}
// 双线性插值采样
Vector4 SampleBilinear(float x, float y) const {
// 计算整数和小数部分
int x0 = static_cast<int>(x);
int y0 = static_cast<int>(y);
int x1 = min(x0 + 1, width - 1);
int y1 = min(y0 + 1, height - 1);
float fx = x - x0;
float fy = y - y0;
// 获取四个最近的像素
Vector4 c00 = GetPixel(x0, y0);
Vector4 c01 = GetPixel(x0, y1);
Vector4 c10 = GetPixel(x1, y0);
Vector4 c11 = GetPixel(x1, y1);
// 沿x轴插值
Vector4 c0 = Vector4::Lerp(c00, c10, fx);
Vector4 c1 = Vector4::Lerp(c01, c11, fx);
// 沿y轴插值
return Vector4::Lerp(c0, c1, fy);
}
// 获取像素颜色
Vector4 GetPixel(int x, int y) const {
if (x < 0 || x >= width || y < 0 || y >= height) {
return Vector4(0, 0, 0, 0);
}
int index = y * width + x;
switch (format) {
case TextureFormat::R8: {
uint8_t r = data[index];
return Vector4(r / 255.0f, 0, 0, 1);
}
case TextureFormat::RG8: {
uint8_t r = data[index * 2];
uint8_t g = data[index * 2 + 1];
return Vector4(r / 255.0f, g / 255.0f, 0, 1);
}
case TextureFormat::RGB8: {
uint8_t r = data[index * 3];
uint8_t g = data[index * 3 + 1];
uint8_t b = data[index * 3 + 2];
return Vector4(r / 255.0f, g / 255.0f, b / 255.0f, 1);
}
case TextureFormat::RGBA8: {
uint8_t r = data[index * 4];
uint8_t g = data[index * 4 + 1];
uint8_t b = data[index * 4 + 2];
uint8_t a = data[index * 4 + 3];
return Vector4(r / 255.0f, g / 255.0f, b / 255.0f, a / 255.0f);
}
// 其他格式的处理...
default:
return Vector4(0, 0, 0, 0);
}
}
};
// 纹理管理器
class TextureManager {
public:
static TextureManager& GetInstance() {
static TextureManager instance;
return instance;
}
// 加载纹理
Texture* LoadTexture(const std::string& filename) {
// 检查是否已加载
auto it = textures_.find(filename);
if (it != textures_.end()) {
return it->second;
}
// 创建新纹理
Texture* texture = new Texture();
if (!texture->LoadFromFile(filename)) {
delete texture;
return nullptr;
}
// 添加到管理器
textures_[filename] = texture;
return texture;
}
// 创建空纹理
Texture* CreateTexture(const std::string& name, int width, int height, TextureFormat format) {
// 检查是否已存在
auto it = textures_.find(name);
if (it != textures_.end()) {
return nullptr;
}
// 创建新纹理
Texture* texture = new Texture();
if (!texture->Create(width, height, format)) {
delete texture;
return nullptr;
}
// 添加到管理器
textures_[name] = texture;
return texture;
}
// 获取纹理
Texture* GetTexture(const std::string& name) {
auto it = textures_.find(name);
if (it != textures_.end()) {
return it->second;
}
return nullptr;
}
// 释放纹理
void ReleaseTexture(const std::string& name) {
auto it = textures_.find(name);
if (it != textures_.end()) {
delete it->second;
textures_.erase(it);
}
}
// 释放所有纹理
void ReleaseAll() {
for (auto& pair : textures_) {
delete pair.second;
}
textures_.clear();
}
private:
TextureManager() {}
~TextureManager() {
ReleaseAll();
}
std::unordered_map<std::string, Texture*> textures_;
};
10.3 高级光照及全局光照
10.3.1 光照模型
现代游戏引擎支持多种光照模型,从简单的Phong模型到复杂的基于物理的渲染(PBR):
cpp
// 光照模型接口
class ILightingModel {
public:
virtual ~ILightingModel() {}
// 计算光照
virtual Vector3 CalculateLighting(
const Vector3& position, // 世界空间位置
const Vector3& normal, // 法线
const Vector3& viewDir, // 视线方向
const Material& material, // 材质
const Light& light, // 光源
const Vector2& texCoord // 纹理坐标
) const = 0;
};
// Phong光照模型
class PhongLightingModel : public ILightingModel {
public:
Vector3 CalculateLighting(
const Vector3& position,
const Vector3& normal,
const Vector3& viewDir,
const Material& material,
const Light& light,
const Vector2& texCoord
) const override {
const StandardMaterial& stdMaterial = static_cast<const StandardMaterial&>(material);
// 获取材质属性
Vector4 diffuseColor = stdMaterial.GetDiffuseColor();
if (stdMaterial.GetDiffuseMap()) {
diffuseColor = stdMaterial.GetDiffuseMap()->Sample(texCoord);
}
Vector4 specularColor = stdMaterial.GetSpecularColor();
if (stdMaterial.GetSpecularMap()) {
specularColor = stdMaterial.GetSpecularMap()->Sample(texCoord);
}
float shininess = stdMaterial.GetShininess();
// 计算光照方向和衰减
Vector3 lightDir;
float attenuation = 1.0f;
if (light.GetType() == LightType::Directional) {
// 平行光
lightDir = Vector3::Normalize(Vector3::Negate(light.GetDirection()));
} else {
// 点光源或聚光灯
Vector3 lightVec = Vector3::Subtract(light.GetPosition(), position);
float distance = Vector3::Length(lightVec);
lightDir = Vector3::Normalize(lightVec);
// 距离衰减
attenuation = 1.0f / (1.0f + light.GetAttenuationLinear() * distance +
light.GetAttenuationQuadratic() * distance * distance);
// 聚光灯锥形衰减
if (light.GetType() == LightType::Spot) {
float spotEffect = Vector3::Dot(lightDir, Vector3::Normalize(Vector3::Negate(light.GetDirection())));
if (spotEffect < light.GetSpotCutoff()) {
attenuation = 0.0f;
} else {
spotEffect = pow(spotEffect, light.GetSpotExponent());
attenuation *= spotEffect;
}
}
}
// 漫反射光照
float nDotL = max(0.0f, Vector3::Dot(normal, lightDir));
Vector3 diffuse = Vector3::Multiply(
Vector3(diffuseColor.x, diffuseColor.y, diffuseColor.z),
Vector3::Multiply(light.GetColor(), light.GetIntensity() * nDotL * attenuation)
);
// 镜面反射光照
Vector3 reflectDir = Vector3::Reflect(Vector3::Negate(lightDir), normal);
float specFactor = pow(max(0.0f, Vector3::Dot(viewDir, reflectDir)), shininess);
Vector3 specular = Vector3::Multiply(
Vector3(specularColor.x, specularColor.y, specularColor.z),
Vector3::Multiply(light.GetColor(), light.GetIntensity() * specFactor * attenuation)
);
// 返回光照结果(漫反射+镜面反射)
return Vector3::Add(diffuse, specular);
}
};
// 基于物理的渲染(PBR)光照模型
class PBRLightingModel : public ILightingModel {
public:
Vector3 CalculateLighting(
const Vector3& position,
const Vector3& normal,
const Vector3& viewDir,
const Material& material,
const Light& light,
const Vector2& texCoord
) const override {
const PBRMaterial& pbrMaterial = static_cast<const PBRMaterial&>(material);
// 获取PBR材质参数
Vector3 albedo = pbrMaterial.GetAlbedo(texCoord);
float metallic = pbrMaterial.GetMetallic(texCoord);
float roughness = pbrMaterial.GetRoughness(texCoord);
float ao = pbrMaterial.GetAO(texCoord);
// 计算光照方向和衰减
Vector3 lightDir;
float attenuation = 1.0f;
if (light.GetType() == LightType::Directional) {
lightDir = Vector3::Normalize(Vector3::Negate(light.GetDirection()));
} else {
Vector3 lightVec = Vector3::Subtract(light.GetPosition(), position);
float distance = Vector3::Length(lightVec);
lightDir = Vector3::Normalize(lightVec);
attenuation = 1.0f / (1.0f + light.GetAttenuationLinear() * distance +
light.GetAttenuationQuadratic() * distance * distance);
if (light.GetType() == LightType::Spot) {
float spotEffect = Vector3::Dot(lightDir, Vector3::Normalize(Vector3::Negate(light.GetDirection())));
if (spotEffect < light.GetSpotCutoff()) {
attenuation = 0.0f;
} else {
spotEffect = pow(spotEffect, light.GetSpotExponent());
attenuation *= spotEffect;
}
}
}
// 计算半角向量
Vector3 halfVector = Vector3::Normalize(Vector3::Add(lightDir, viewDir));
// 基本光照项
float nDotL = max(0.0f, Vector3::Dot(normal, lightDir));
// PBR光照计算
Vector3 F0 = Vector3(0.04f, 0.04f, 0.04f);
F0 = Vector3::Lerp(F0, albedo, metallic);
// Cook-Torrance BRDF
Vector3 F = FresnelSchlick(max(Vector3::Dot(halfVector, viewDir), 0.0f), F0);
float NDF = DistributionGGX(normal, halfVector, roughness);
float G = GeometrySmith(normal, viewDir, lightDir, roughness);
Vector3 numerator = Vector3::Multiply(NDF, Vector3::Multiply(G, F));
float denominator = 4.0f * max(Vector3::Dot(normal, viewDir), 0.0f) * max(Vector3::Dot(normal, lightDir), 0.0f) + 0.001f;
Vector3 specular = Vector3::Divide(numerator, denominator);
// kS是菲涅尔反射率,kD是漫反射率
Vector3 kS = F;
Vector3 kD = Vector3::Subtract(Vector3(1.0f, 1.0f, 1.0f), kS);
kD = Vector3::Multiply(kD, 1.0f - metallic);
// 组合直接光照结果
Vector3 radiance = Vector3::Multiply(light.GetColor(), light.GetIntensity() * attenuation);
Vector3 Lo = Vector3::Multiply(
Vector3::Add(Vector3::Multiply(kD, Vector3::Divide(albedo, Vector3(3.14159f, 3.14159f, 3.14159f))), specular),
Vector3::Multiply(radiance, nDotL)
);
return Lo;
}
private:
// PBR辅助函数
// 菲涅尔方程
Vector3 FresnelSchlick(float cosTheta, const Vector3& F0) const {
return Vector3::Add(F0, Vector3::Multiply(Vector3::Subtract(Vector3(1.0f, 1.0f, 1.0f), F0), pow(1.0f - cosTheta, 5.0f)));
}
// GGX法线分布函数
float DistributionGGX(const Vector3& N, const Vector3& H, float roughness) const {
float a = roughness * roughness;
float a2 = a * a;
float NdotH = max(Vector3::Dot(N, H), 0.0f);
float NdotH2 = NdotH * NdotH;
float nom = a2;
float denom = (NdotH2 * (a2 - 1.0f) + 1.0f);
denom = 3.14159f * denom * denom;
return nom / max(denom, 0.001f);
}
// Smith几何阴影函数
float GeometrySchlickGGX(float NdotV, float roughness) const {
float r = (roughness + 1.0f);
float k = (r * r) / 8.0f;
float nom = NdotV;
float denom = NdotV * (1.0f - k) + k;
return nom / max(denom, 0.001f);
}
float GeometrySmith(const Vector3& N, const Vector3& V, const Vector3& L, float roughness) const {
float NdotV = max(Vector3::Dot(N, V), 0.0f);
float NdotL = max(Vector3::Dot(N, L), 0.0f);
float ggx2 = GeometrySchlickGGX(NdotV, roughness);
float ggx1 = GeometrySchlickGGX(NdotL, roughness);
return ggx1 * ggx2;
}
};
10.3.2 全局光照
全局光照技术模拟光线在场景中的复杂交互,包括间接光照、反射和环境光遮蔽:
cpp
// 环境光照类
class AmbientLighting {
public:
// 设置环境色
void SetAmbientColor(const Vector3& color) {
ambientColor = color;
}
// 设置环境光强度
void SetAmbientIntensity(float intensity) {
ambientIntensity = intensity;
}
// 获取环境光贡献
Vector3 GetAmbientContribution(const Vector3& baseColor) const {
return Vector3::Multiply(Vector3::Multiply(ambientColor, baseColor), ambientIntensity);
}
private:
Vector3 ambientColor = Vector3(1.0f, 1.0f, 1.0f);
float ambientIntensity = 0.1f;
};
// 环境光遮蔽(AO)
class AmbientOcclusion {
public:
// 计算基于屏幕空间的环境光遮蔽(SSAO)
void ComputeSSAO(
RenderTarget* target,
const Texture* positionTexture,
const Texture* normalTexture,
const Matrix4x4& projectionMatrix,
float radius,
float bias,
int kernelSize) {
// 设置着色器参数
ssaoShader.SetPositionTexture(positionTexture);
ssaoShader.SetNormalTexture(normalTexture);
ssaoShader.SetProjectionMatrix(projectionMatrix);
ssaoShader.SetRadius(radius);
ssaoShader.SetBias(bias);
ssaoShader.SetKernelSize(kernelSize);
// 生成随机采样核心
GenerateSampleKernel(kernelSize);
// 生成随机旋转纹理
GenerateNoiseTexture();
ssaoShader.SetNoiseTexture(noiseTexture);
// 渲染SSAO到目标纹理
ssaoShader.Begin();
renderer.SetRenderTarget(target);
renderer.Clear(Color(1.0f, 1.0f, 1.0f, 1.0f));
renderer.DrawFullscreenQuad();
ssaoShader.End();
// 模糊SSAO结果以减少噪点
BlurSSAO(target);
}
private:
SSAOShader ssaoShader;
Texture* noiseTexture;
std::vector<Vector3> sampleKernel;
// 生成采样核心
void GenerateSampleKernel(int kernelSize) {
sampleKernel.resize(kernelSize);
for (int i = 0; i < kernelSize; ++i) {
// 在半球内生成随机采样点
Vector3 sample(
Random::Range(-1.0f, 1.0f),
Random::Range(-1.0f, 1.0f),
Random::Range(0.0f, 1.0f)
);
sample = Vector3::Normalize(sample);
sample = Vector3::Multiply(sample, Random::Range(0.0f, 1.0f));
// 使样本更靠近原点
float scale = static_cast<float>(i) / kernelSize;
scale = Lerp(0.1f, 1.0f, scale * scale);
sample = Vector3::Multiply(sample, scale);
sampleKernel[i] = sample;
}
// 设置到着色器
ssaoShader.SetSampleKernel(sampleKernel.data(), kernelSize);
}
// 生成噪声纹理
void GenerateNoiseTexture() {
const int noiseSize = 4;
std::vector<Vector3> noise(noiseSize * noiseSize);
for (int i = 0; i < noise.size(); ++i) {
// 生成XY平面上的随机向量
noise[i] = Vector3(
Random::Range(-1.0f, 1.0f),
Random::Range(-1.0f, 1.0f),
0.0f
);
noise[i] = Vector3::Normalize(noise[i]);
}
// 创建噪声纹理
noiseTexture = new Texture();
noiseTexture->Create(noiseSize, noiseSize, TextureFormat::RGB16F);
noiseTexture->SetData(noise.data(), noise.size() * sizeof(Vector3));
}
// 模糊SSAO结果
void BlurSSAO(RenderTarget* target) {
// 使用简单的高斯模糊处理SSAO结果
gaussianBlurShader.SetInputTexture(target->GetColorTexture());
gaussianBlurShader.SetBlurRadius(2.0f);
// 水平模糊
gaussianBlurShader.SetDirection(Vector2(1.0f, 0.0f));
gaussianBlurShader.Begin();
renderer.SetRenderTarget(tempTarget);
renderer.DrawFullscreenQuad();
gaussianBlurShader.End();
// 垂直模糊
gaussianBlurShader.SetInputTexture(tempTarget->GetColorTexture());
gaussianBlurShader.SetDirection(Vector2(0.0f, 1.0f));
gaussianBlurShader.Begin();
renderer.SetRenderTarget(target);
renderer.DrawFullscreenQuad();
gaussianBlurShader.End();
}
GaussianBlurShader gaussianBlurShader;
RenderTarget* tempTarget;
};
// 图像基照明(IBL)
class ImageBasedLighting {
public:
// 设置环境贴图
void SetEnvironmentMap(Texture* envMap) {
environmentMap = envMap;
// 预计算辐照度图和预滤波环境贴图
if (environmentMap) {
PrecomputeIrradianceMap();
PrecomputePrefilterMap();
PrecomputeBRDFLUT();
}
}
// 应用IBL
Vector3 ApplyIBL(
const Vector3& position,
const Vector3& normal,
const Vector3& viewDir,
float roughness,
float metallic,
const Vector3& albedo,
float ao) const {
// 计算反射方向
Vector3 reflectDir = Vector3::Reflect(Vector3::Negate(viewDir), normal);
// 漫反射IBL
Vector3 irradiance = SampleIrradianceMap(normal);
Vector3 diffuse = Vector3::Multiply(irradiance, albedo);
// 镜面反射IBL
Vector3 prefilteredColor = SamplePrefilterMap(reflectDir, roughness);
Vector2 brdf = SampleBRDFLUT(Vector3::Dot(normal, viewDir), roughness);
Vector3 specular = Vector3::Multiply(prefilteredColor, Vector3::Add(Vector3::Multiply(Vector3(brdf.x, brdf.x, brdf.x), metallic), Vector3(brdf.y, brdf.y, brdf.y)));
// 菲涅尔项
Vector3 F0 = Vector3(0.04f, 0.04f, 0.04f);
F0 = Vector3::Lerp(F0, albedo, metallic);
float cosTheta = max(Vector3::Dot(normal, viewDir), 0.0f);
Vector3 F = Vector3::Add(F0, Vector3::Multiply(Vector3::Subtract(Vector3(1.0f, 1.0f, 1.0f), F0), pow(1.0f - cosTheta, 5.0f)));
// 组合漫反射和镜面反射IBL
Vector3 kS = F;
Vector3 kD = Vector3::Subtract(Vector3(1.0f, 1.0f, 1.0f), kS);
kD = Vector3::Multiply(kD, 1.0f - metallic);
Vector3 ambient = Vector3::Add(Vector3::Multiply(kD, diffuse), specular);
ambient = Vector3::Multiply(ambient, ao);
return ambient;
}
private:
Texture* environmentMap;
Texture* irradianceMap;
Texture* prefilterMap;
Texture* brdfLUT;
// 预计算辐照度图(用于漫反射IBL)
void PrecomputeIrradianceMap() {
// 创建辐照度图
irradianceMap = new Texture();
irradianceMap->Create(32, 32, TextureFormat::RGB16F, 1, TextureType::Cubemap);
// 设置渲染目标为辐照度图的每个面
for (int face = 0; face < 6; ++face) {
renderer.SetRenderTarget(irradianceMap, face);
renderer.Clear(Color(0, 0, 0, 0));
// 设置着色器
irradianceShader.SetEnvironmentMap(environmentMap);
irradianceShader.SetFace(face);
// 渲染
irradianceShader.Begin();
renderer.DrawFullscreenQuad();
irradianceShader.End();
}
}
// 预计算预滤波环境贴图(用于镜面反射IBL)
void PrecomputePrefilterMap() {
// 创建预滤波图
prefilterMap = new Texture();
prefilterMap->Create(128, 128, TextureFormat::RGB16F, 5, TextureType::Cubemap);
// 为每个粗糙度级别预计算
for (int mip = 0; mip < 5; ++mip) {
// 根据mip级别计算粗糙度
float roughness = static_cast<float>(mip) / 4.0f;
// 渲染立方体贴图的每个面
for (int face = 0; face < 6; ++face) {
renderer.SetRenderTarget(prefilterMap, face, mip);
renderer.Clear(Color(0, 0, 0, 0));
// 设置着色器
prefilterShader.SetEnvironmentMap(environmentMap);
prefilterShader.SetFace(face);
prefilterShader.SetRoughness(roughness);
// 渲染
prefilterShader.Begin();
renderer.DrawFullscreenQuad();
prefilterShader.End();
}
}
}
// 预计算BRDF查找表
void PrecomputeBRDFLUT() {
// 创建BRDF查找表
brdfLUT = new Texture();
brdfLUT->Create(512, 512, TextureFormat::RG16F);
// 渲染BRDF LUT
renderer.SetRenderTarget(brdfLUT);
renderer.Clear(Color(0, 0, 0, 0));
// 设置着色器
brdfShader.Begin();
renderer.DrawFullscreenQuad();
brdfShader.End();
}
// 采样辐照度图
Vector3 SampleIrradianceMap(const Vector3& direction) const {
if (!irradianceMap) return Vector3(0, 0, 0);
return irradianceMap->SampleCube(direction);
}
// 采样预滤波环境贴图
Vector3 SamplePrefilterMap(const Vector3& direction, float roughness) const {
if (!prefilterMap) return Vector3(0, 0, 0);
// 计算mip级别
float mipLevel = roughness * 4.0f;
return prefilterMap->SampleCubeLod(direction, mipLevel);
}
// 采样BRDF查找表
Vector2 SampleBRDFLUT(float NdotV, float roughness) const {
if (!brdfLUT) return Vector2(0, 0);
// 采样BRDF LUT
return brdfLUT->Sample(Vector2(NdotV, roughness)).xy();
}
IBLIrradianceShader irradianceShader;
IBLPrefilterShader prefilterShader;
IBLBRDFShader brdfShader;
};
10.3.3 阴影技术
阴影是增强场景真实感的重要元素,现代游戏引擎通常支持多种阴影技术:
cpp
// 阴影类型枚举
enum class ShadowType {
None, // 无阴影
ShadowMap, // 基本阴影映射
PCF, // 百分比接近滤波阴影
VSM, // 方差阴影映射
PCSS // 百分比接近软阴影
};
// 阴影映射器
class ShadowMapper {
public:
ShadowMapper() : shadowType(ShadowType::PCF), shadowMapSize(1024), shadowBias(0.005f), softness(1.0f) {}
// 创建阴影贴图
bool Initialize() {
// 创建点光源阴影贴图
pointShadowMap = new Texture();
if (!pointShadowMap->Create(shadowMapSize, shadowMapSize, TextureFormat::Depth24, 1, TextureType::Cubemap)) {
return false;
}
// 创建方向光阴影贴图
directionalShadowMap = new Texture();
if (!directionalShadowMap->Create(shadowMapSize, shadowMapSize, TextureFormat::Depth24)) {
return false;
}
// 创建VSM需要的额外纹理
if (shadowType == ShadowType::VSM) {
vsmShadowMap = new Texture();
if (!vsmShadowMap->Create(shadowMapSize, shadowMapSize, TextureFormat::RG32F)) {
return false;
}
}
return true;
}
// 渲染阴影贴图
void RenderShadowMaps(const Scene& scene) {
// 渲染所有灯光的阴影贴图
for (const auto& light : scene.GetLights()) {
if (!light.IsCastingShadow()) {
continue;
}
switch (light.GetType()) {
case LightType::Directional:
RenderDirectionalLightShadowMap(light, scene);
break;
case LightType::Point:
RenderPointLightShadowMap(light, scene);
break;
case LightType::Spot:
RenderSpotLightShadowMap(light, scene);
break;
}
}
// 如果使用VSM,需要进行额外处理
if (shadowType == ShadowType::VSM) {
ProcessVSM();
}
}
// 设置阴影类型
void SetShadowType(ShadowType type) {
shadowType = type;
}
// 设置阴影贴图大小
void SetShadowMapSize(int size) {
shadowMapSize = size;
}
// 设置阴影偏移
void SetShadowBias(float bias) {
shadowBias = bias;
}
// 设置阴影软度
void SetSoftness(float value) {
softness = value;
}
// 获取方向光阴影贴图
Texture* GetDirectionalShadowMap() const {
return (shadowType == ShadowType::VSM) ? vsmShadowMap : directionalShadowMap;
}
// 获取点光源阴影贴图
Texture* GetPointShadowMap() const {
return pointShadowMap;
}
// 获取阴影类型
ShadowType GetShadowType() const {
return shadowType;
}
// 获取阴影偏移
float GetShadowBias() const {
return shadowBias;
}
// 获取阴影软度
float GetSoftness() const {
return softness;
}
private:
ShadowType shadowType;
int shadowMapSize;
float shadowBias;
float softness;
Texture* directionalShadowMap;
Texture* pointShadowMap;
Texture* vsmShadowMap;
// 渲染方向光阴影贴图
void RenderDirectionalLightShadowMap(const Light& light, const Scene& scene) {
// 计算光源视图和投影矩阵
Matrix4x4 lightView = CalculateDirectionalLightViewMatrix(light, scene);
Matrix4x4 lightProj = CalculateDirectionalLightProjectionMatrix(light, scene);
// 设置渲染目标为阴影贴图
renderer.SetRenderTarget(directionalShadowMap);
renderer.Clear(1.0f);
// 设置阴影着色器
shadowShader.SetViewMatrix(lightView);
shadowShader.SetProjectionMatrix(lightProj);
// 渲染场景到阴影贴图
for (const auto& object : scene.GetObjects()) {
if (!object.IsCastingShadow()) {
continue;
}
// 设置模型矩阵
shadowShader.SetModelMatrix(object.GetTransformMatrix());
// 渲染对象
shadowShader.Begin();
renderer.DrawMesh(object.GetMesh());
shadowShader.End();
}
// 如果使用VSM,将深度转换为均值和方差
if (shadowType == ShadowType::VSM) {
ConvertToVSM(directionalShadowMap, vsmShadowMap);
}
}
// 渲染点光源阴影贴图
void RenderPointLightShadowMap(const Light& light, const Scene& scene) {
// 对立方体贴图的每个面渲染阴影
for (int face = 0; face < 6; ++face) {
// 计算视图矩阵(看向立方体贴图的六个方向)
Matrix4x4 lightView = CalculatePointLightViewMatrix(light, face);
// 计算投影矩阵(90度视野的透视投影)
Matrix4x4 lightProj = Matrix4x4::PerspectiveFov(
90.0f * (3.14159f / 180.0f), 1.0f, 0.1f, light.GetRange());
// 设置渲染目标为立方体阴影贴图的当前面
renderer.SetRenderTarget(pointShadowMap, face);
renderer.Clear(1.0f);
// 设置点光源阴影着色器
pointShadowShader.SetViewMatrix(lightView);
pointShadowShader.SetProjectionMatrix(lightProj);
pointShadowShader.SetLightPosition(light.GetPosition());
pointShadowShader.SetLightRange(light.GetRange());
// 渲染场景到阴影贴图
for (const auto& object : scene.GetObjects()) {
if (!object.IsCastingShadow()) {
continue;
}
// 设置模型矩阵
pointShadowShader.SetModelMatrix(object.GetTransformMatrix());
// 渲染对象
pointShadowShader.Begin();
renderer.DrawMesh(object.GetMesh());
pointShadowShader.End();
}
}
}
// 渲染聚光灯阴影贴图
void RenderSpotLightShadowMap(const Light& light, const Scene& scene) {
// 计算光源视图矩阵
Matrix4x4 lightView = CalculateSpotLightViewMatrix(light);
// 计算光源投影矩阵
float fov = acos(light.GetSpotCutoff()) * 2.0f;
Matrix4x4 lightProj = Matrix4x4::PerspectiveFov(fov, 1.0f, 0.1f, light.GetRange());
// 设置渲染目标为阴影贴图
renderer.SetRenderTarget(directionalShadowMap);
renderer.Clear(1.0f);
// 设置阴影着色器
shadowShader.SetViewMatrix(lightView);
shadowShader.SetProjectionMatrix(lightProj);
// 渲染场景到阴影贴图
for (const auto& object : scene.GetObjects()) {
if (!object.IsCastingShadow()) {
continue;
}
// 设置模型矩阵
shadowShader.SetModelMatrix(object.GetTransformMatrix());
// 渲染对象
shadowShader.Begin();
renderer.DrawMesh(object.GetMesh());
shadowShader.End();
}
// 如果使用VSM,将深度转换为均值和方差
if (shadowType == ShadowType::VSM) {
ConvertToVSM(directionalShadowMap, vsmShadowMap);
}
}
// 处理方差阴影贴图
void ProcessVSM() {
// 模糊VSM贴图以减少锯齿和自阴影问题
gaussianBlurShader.SetInputTexture(vsmShadowMap);
gaussianBlurShader.SetBlurRadius(softness);
// 水平模糊
gaussianBlurShader.SetDirection(Vector2(1.0f, 0.0f));
gaussianBlurShader.Begin();
renderer.SetRenderTarget(tempTarget);
renderer.DrawFullscreenQuad();
gaussianBlurShader.End();
// 垂直模糊
gaussianBlurShader.SetInputTexture(tempTarget->GetColorTexture());
gaussianBlurShader.SetDirection(Vector2(0.0f, 1.0f));
gaussianBlurShader.Begin();
renderer.SetRenderTarget(vsmShadowMap);
renderer.DrawFullscreenQuad();
gaussianBlurShader.End();
}
// 将深度贴图转换为VSM格式(均值和方差)
void ConvertToVSM(Texture* depthTexture, Texture* vsmTexture) {
vsmConvertShader.SetDepthTexture(depthTexture);
vsmConvertShader.Begin();
renderer.SetRenderTarget(vsmTexture);
renderer.DrawFullscreenQuad();
vsmConvertShader.End();
}
// 计算方向光的视图矩阵
Matrix4x4 CalculateDirectionalLightViewMatrix(const Light& light, const Scene& scene) {
// 计算场景包围盒
BoundingBox sceneBounds = scene.CalculateBounds();
// 计算光源方向和位置
Vector3 lightDir = Vector3::Normalize(light.GetDirection());
Vector3 lightPos = Vector3::Subtract(sceneBounds.center, Vector3::Multiply(lightDir, sceneBounds.extents.length() * 2.0f));
// 创建视图矩阵
return Matrix4x4::LookAt(lightPos, sceneBounds.center, Vector3(0, 1, 0));
}
// 计算方向光的投影矩阵
Matrix4x4 CalculateDirectionalLightProjectionMatrix(const Light& light, const Scene& scene) {
// 计算场景包围盒
BoundingBox sceneBounds = scene.CalculateBounds();
// 计算正交投影的大小
float size = sceneBounds.extents.length() * 2.0f;
// 创建正交投影矩阵
return Matrix4x4::Orthographic(-size, size, -size, size, 0.1f, size * 2.0f);
}
// 计算点光源的视图矩阵
Matrix4x4 CalculatePointLightViewMatrix(const Light& light, int face) {
Vector3 lightPos = light.GetPosition();
Vector3 upDir;
Vector3 lookDir;
// 确定六个面的观察方向
switch (face) {
case 0: // 正X
lookDir = Vector3(1, 0, 0);
upDir = Vector3(0, -1, 0);
break;
case 1: // 负X
lookDir = Vector3(-1, 0, 0);
upDir = Vector3(0, -1, 0);
break;
case 2: // 正Y
lookDir = Vector3(0, 1, 0);
upDir = Vector3(0, 0, 1);
break;
case 3: // 负Y
lookDir = Vector3(0, -1, 0);
upDir = Vector3(0, 0, -1);
break;
case 4: // 正Z
lookDir = Vector3(0, 0, 1);
upDir = Vector3(0, -1, 0);
break;
case 5: // 负Z
lookDir = Vector3(0, 0, -1);
upDir = Vector3(0, -1, 0);
break;
}
return Matrix4x4::LookAt(lightPos, Vector3::Add(lightPos, lookDir), upDir);
}
// 计算聚光灯的视图矩阵
Matrix4x4 CalculateSpotLightViewMatrix(const Light& light) {
Vector3 lightPos = light.GetPosition();
Vector3 lightDir = Vector3::Normalize(light.GetDirection());
Vector3 target = Vector3::Add(lightPos, lightDir);
// 计算上方向(假设世界up为(0,1,0))
Vector3 up = Vector3(0, 1, 0);
if (fabs(Vector3::Dot(lightDir, up)) > 0.99f) {
up = Vector3(0, 0, 1); // 如果光线方向接近上方向,使用另一个上方向
}
return Matrix4x4::LookAt(lightPos, target, up);
}
// 阴影渲染着色器
ShadowShader shadowShader;
PointShadowShader pointShadowShader;
GaussianBlurShader gaussianBlurShader;
VSMConvertShader vsmConvertShader;
RenderTarget* tempTarget;
};
10.4 视觉效果和覆盖层
10.4.1 后处理效果
后处理效果是在渲染完成后应用于整个图像的效果,可以显著提升视觉质量:
cpp
// 后处理效果接口
class IPostProcessEffect {
public:
virtual ~IPostProcessEffect() {}
// 应用效果
virtual void Apply(Texture* source, RenderTarget* destination) = 0;
// 启用/禁用效果
virtual void SetEnabled(bool enabled) = 0;
// 检查效果是否启用
virtual bool IsEnabled() const = 0;
};
// 后处理管理器
class PostProcessManager {
public:
PostProcessManager() : enabled(true) {}
// 初始化
bool Initialize(int width, int height) {
screenWidth = width;
screenHeight = height;
// 创建渲染目标
mainTarget = new RenderTarget();
if (!mainTarget->Create(width, height, TextureFormat::RGBA16F)) {
return false;
}
tempTarget1 = new RenderTarget();
if (!tempTarget1->Create(width, height, TextureFormat::RGBA16F)) {
return false;
}
tempTarget2 = new RenderTarget();
if (!tempTarget2->Create(width, height, TextureFormat::RGBA16F)) {
return false;
}
return true;
}
// 添加效果
void AddEffect(IPostProcessEffect* effect) {
effects.push_back(effect);
}
// 应用所有后处理效果
void ApplyEffects(Texture* source, RenderTarget* destination) {
if (!enabled || effects.empty()) {
// 如果禁用或没有效果,直接复制
CopyTexture(source, destination);
return;
}
// 复制源图像到主目标
CopyTexture(source, mainTarget);
// 应用所有启用的效果
RenderTarget* currentSource = mainTarget;
RenderTarget* currentDestination = tempTarget1;
for (size_t i = 0; i < effects.size(); ++i) {
if (!effects[i]->IsEnabled()) {
continue;
}
effects[i]->Apply(currentSource->GetColorTexture(), currentDestination);
// 交换源和目标
RenderTarget* temp = currentSource;
currentSource = currentDestination;
currentDestination = (currentDestination == tempTarget1) ? tempTarget2 : tempTarget1;
}
// 复制最终结果到目标
CopyTexture(currentSource->GetColorTexture(), destination);
}
// 启用/禁用所有后处理
void SetEnabled(bool enable) {
enabled = enable;
}
// 获取主渲染目标
RenderTarget* GetMainTarget() const {
return mainTarget;
}
private:
bool enabled;
int screenWidth;
int screenHeight;
std::vector<IPostProcessEffect*> effects;
RenderTarget* mainTarget;
RenderTarget* tempTarget1;
RenderTarget* tempTarget2;
// 复制纹理
void CopyTexture(Texture* source, RenderTarget* destination) {
copyShader.SetSourceTexture(source);
copyShader.Begin();
renderer.SetRenderTarget(destination);
renderer.DrawFullscreenQuad();
copyShader.End();
}
CopyShader copyShader;
};
// 色调映射效果
class ToneMapping : public IPostProcessEffect {
public:
enum class ToneMappingMethod {
Linear,
Reinhard,
ACES,
Uncharted2
};
ToneMapping() : enabled(true), method(ToneMappingMethod::ACES), exposure(1.0f) {}
void Apply(Texture* source, RenderTarget* destination) override {
// 设置着色器参数
toneMappingShader.SetSourceTexture(source);
toneMappingShader.SetMethod(static_cast<int>(method));
toneMappingShader.SetExposure(exposure);
// 应用效果
toneMappingShader.Begin();
renderer.SetRenderTarget(destination);
renderer.DrawFullscreenQuad();
toneMappingShader.End();
}
void SetEnabled(bool enable) override {
enabled = enable;
}
bool IsEnabled() const override {
return enabled;
}
// 设置色调映射方法
void SetMethod(ToneMappingMethod method) {
this->method = method;
}
// 设置曝光度
void SetExposure(float exposure) {
this->exposure = exposure;
}
private:
bool enabled;
ToneMappingMethod method;
float exposure;
ToneMappingShader toneMappingShader;
};
// 泛光效果
class Bloom : public IPostProcessEffect {
public:
Bloom() : enabled(true), threshold(0.8f), intensity(0.5f) {}
bool Initialize(int width, int height) {
// 创建降采样目标
for (int i = 0; i < 5; ++i) {
int w = width >> (i + 1);
int h = height >> (i + 1);
downsampleTargets[i] = new RenderTarget();
if (!downsampleTargets[i]->Create(w, h, TextureFormat::RGBA16F)) {
return false;
}
upsampleTargets[i] = new RenderTarget();
if (!upsampleTargets[i]->Create(w, h, TextureFormat::RGBA16F)) {
return false;
}
}
// 创建阈值目标
thresholdTarget = new RenderTarget();
if (!thresholdTarget->Create(width, height, TextureFormat::RGBA16F)) {
return false;
}
return true;
}
void Apply(Texture* source, RenderTarget* destination) override {
// 1. 亮度阈值提取
thresholdShader.SetSourceTexture(source);
thresholdShader.SetThreshold(threshold);
thresholdShader.Begin();
renderer.SetRenderTarget(thresholdTarget);
renderer.DrawFullscreenQuad();
thresholdShader.End();
// 2. 降采样
Texture* currentSource = thresholdTarget->GetColorTexture();
for (int i = 0; i < 5; ++i) {
downsampleShader.SetSourceTexture(currentSource);
downsampleShader.Begin();
renderer.SetRenderTarget(downsampleTargets[i]);
renderer.DrawFullscreenQuad();
downsampleShader.End();
currentSource = downsampleTargets[i]->GetColorTexture();
}
// 3. 升采样并混合
currentSource = downsampleTargets[4]->GetColorTexture();
for (int i = 3; i >= 0; --i) {
upsampleShader.SetSourceTexture(currentSource);
upsampleShader.SetBloomTexture(downsampleTargets[i]->GetColorTexture());
upsampleShader.Begin();
renderer.SetRenderTarget(upsampleTargets[i]);
renderer.DrawFullscreenQuad();
upsampleShader.End();
currentSource = upsampleTargets[i]->GetColorTexture();
}
// 4. 合并原始图像和泛光
blendShader.SetSourceTexture(source);
blendShader.SetBloomTexture(currentSource);
blendShader.SetBloomIntensity(intensity);
blendShader.Begin();
renderer.SetRenderTarget(destination);
renderer.DrawFullscreenQuad();
blendShader.End();
}
void SetEnabled(bool enable) override {
enabled = enable;
}
bool IsEnabled() const override {
return enabled;
}
// 设置亮度阈值
void SetThreshold(float value) {
threshold = value;
}
// 设置泛光强度
void SetIntensity(float value) {
intensity = value;
}
private:
bool enabled;
float threshold;
float intensity;
RenderTarget* thresholdTarget;
RenderTarget* downsampleTargets[5];
RenderTarget* upsampleTargets[5];
ThresholdShader thresholdShader;
DownsampleShader downsampleShader;
UpsampleShader upsampleShader;
BlendShader blendShader;
};
// 屏幕空间反射效果
class ScreenSpaceReflection : public IPostProcessEffect {
public:
ScreenSpaceReflection() : enabled(true), maxSteps(64), stepSize(0.1f), maxDistance(50.0f), fadeStart(0.8f) {}
void Apply(Texture* source, RenderTarget* destination) override {
// 设置着色器参数
ssrShader.SetColorTexture(source);
ssrShader.SetPositionTexture(gBuffer->GetPositionTexture());
ssrShader.SetNormalTexture(gBuffer->GetNormalTexture());
ssrShader.SetDepthTexture(gBuffer->GetDepthTexture());
ssrShader.SetMaxSteps(maxSteps);
ssrShader.SetStepSize(stepSize);
ssrShader.SetMaxDistance(maxDistance);
ssrShader.SetFadeStart(fadeStart);
ssrShader.SetViewMatrix(viewMatrix);
ssrShader.SetProjectionMatrix(projectionMatrix);
// 应用效果
ssrShader.Begin();
renderer.SetRenderTarget(destination);
renderer.DrawFullscreenQuad();
ssrShader.End();
}
void SetEnabled(bool enable) override {
enabled = enable;
}
bool IsEnabled() const override {
return enabled;
}
// 设置G-Buffer
void SetGBuffer(GBuffer* buffer) {
gBuffer = buffer;
}
// 设置视图和投影矩阵
void SetMatrices(const Matrix4x4& view, const Matrix4x4& proj) {
viewMatrix = view;
projectionMatrix = proj;
}
// 设置光线步进参数
void SetRayParameters(int steps, float size, float distance) {
maxSteps = steps;
stepSize = size;
maxDistance = distance;
}
// 设置边缘淡出开始距离
void SetFadeStart(float value) {
fadeStart = value;
}
private:
bool enabled;
int maxSteps;
cpp
float stepSize;
float maxDistance;
float fadeStart;
Matrix4x4 viewMatrix;
Matrix4x4 projectionMatrix;
GBuffer* gBuffer;
SSRShader ssrShader;
};
// 景深效果
class DepthOfField : public IPostProcessEffect {
public:
DepthOfField() : enabled(true), focalDistance(10.0f), focalRange(5.0f), maxBlur(1.0f) {}
void Apply(Texture* source, RenderTarget* destination) override {
// 创建景深遮罩
dofShader.SetSourceTexture(source);
dofShader.SetDepthTexture(depthTexture);
dofShader.SetFocalDistance(focalDistance);
dofShader.SetFocalRange(focalRange);
dofShader.SetMaxBlur(maxBlur);
// 首先创建景深遮罩
dofShader.Begin();
renderer.SetRenderTarget(cocTarget); // Circle of Confusion (CoC)
renderer.DrawFullscreenQuad();
dofShader.End();
// 对背景进行模糊
blurShader.SetSourceTexture(source);
blurShader.SetCoCTexture(cocTarget->GetColorTexture());
// 水平模糊
blurShader.SetDirection(Vector2(1.0f, 0.0f));
blurShader.Begin();
renderer.SetRenderTarget(tempTarget1);
renderer.DrawFullscreenQuad();
blurShader.End();
// 垂直模糊
blurShader.SetSourceTexture(tempTarget1->GetColorTexture());
blurShader.SetDirection(Vector2(0.0f, 1.0f));
blurShader.Begin();
renderer.SetRenderTarget(tempTarget2);
renderer.DrawFullscreenQuad();
blurShader.End();
// 合并原始图像和模糊图像
dofCompositShader.SetSourceTexture(source);
dofCompositShader.SetBlurTexture(tempTarget2->GetColorTexture());
dofCompositShader.SetCoCTexture(cocTarget->GetColorTexture());
dofCompositShader.Begin();
renderer.SetRenderTarget(destination);
renderer.DrawFullscreenQuad();
dofCompositShader.End();
}
void SetEnabled(bool enable) override {
enabled = enable;
}
bool IsEnabled() const override {
return enabled;
}
// 设置深度纹理
void SetDepthTexture(Texture* texture) {
depthTexture = texture;
}
// 设置焦距
void SetFocalDistance(float distance) {
focalDistance = distance;
}
// 设置焦点范围
void SetFocalRange(float range) {
focalRange = range;
}
// 设置最大模糊程度
void SetMaxBlur(float blur) {
maxBlur = blur;
}
private:
bool enabled;
float focalDistance;
float focalRange;
float maxBlur;
Texture* depthTexture;
RenderTarget* cocTarget;
RenderTarget* tempTarget1;
RenderTarget* tempTarget2;
DOFShader dofShader;
DOFBlurShader blurShader;
DOFCompositShader dofCompositShader;
};
10.4.2 高动态范围渲染
高动态范围(HDR)渲染允许处理更广泛的亮度值,使场景看起来更自然:
cpp
// HDR渲染管理器
class HDRRenderer {
public:
HDRRenderer() : exposure(1.0f), initialized(false) {}
// 初始化HDR渲染器
bool Initialize(int width, int height) {
screenWidth = width;
screenHeight = height;
// 创建HDR渲染目标
hdrTarget = new RenderTarget();
if (!hdrTarget->Create(width, height, TextureFormat::RGBA16F)) {
return false;
}
// 创建亮度提取目标
luminanceTarget = new RenderTarget();
if (!luminanceTarget->Create(width, height, TextureFormat::R16F)) {
return false;
}
// 创建降采样亮度目标(用于自动曝光)
int w = width;
int h = height;
for (int i = 0; i < 6; ++i) {
w = max(1, w / 2);
h = max(1, h / 2);
downsampleTargets[i] = new RenderTarget();
if (!downsampleTargets[i]->Create(w, h, TextureFormat::R16F)) {
return false;
}
}
// 创建自动曝光采样目标
autoExposureTarget = new RenderTarget();
if (!autoExposureTarget->Create(1, 1, TextureFormat::R16F)) {
return false;
}
// 创建后处理管理器
postProcess = new PostProcessManager();
if (!postProcess->Initialize(width, height)) {
return false;
}
// 添加色调映射效果
toneMapping = new ToneMapping();
postProcess->AddEffect(toneMapping);
initialized = true;
return true;
}
// 开始HDR渲染
void BeginHDRRendering() {
if (!initialized) return;
// 设置HDR渲染目标
renderer.SetRenderTarget(hdrTarget);
renderer.Clear(Color(0, 0, 0, 0));
}
// 结束HDR渲染并应用色调映射
void EndHDRRendering() {
if (!initialized) return;
// 计算场景亮度
if (useAutoExposure) {
CalculateSceneLuminance();
}
// 应用色调映射和其他后处理效果
toneMapping->SetExposure(exposure);
postProcess->ApplyEffects(hdrTarget->GetColorTexture(), finalTarget);
}
// 启用/禁用自动曝光
void SetAutoExposure(bool enable) {
useAutoExposure = enable;
}
// 手动设置曝光值
void SetExposure(float value) {
exposure = value;
}
// 获取HDR渲染目标
RenderTarget* GetHDRTarget() const {
return hdrTarget;
}
// 获取后处理管理器
PostProcessManager* GetPostProcessManager() const {
return postProcess;
}
private:
bool initialized;
int screenWidth;
int screenHeight;
float exposure;
bool useAutoExposure;
RenderTarget* hdrTarget;
RenderTarget* luminanceTarget;
RenderTarget* downsampleTargets[6];
RenderTarget* autoExposureTarget;
RenderTarget* finalTarget;
ToneMapping* toneMapping;
PostProcessManager* postProcess;
// 计算场景亮度(用于自动曝光)
void CalculateSceneLuminance() {
// 提取亮度
luminanceShader.SetSourceTexture(hdrTarget->GetColorTexture());
luminanceShader.Begin();
renderer.SetRenderTarget(luminanceTarget);
renderer.DrawFullscreenQuad();
luminanceShader.End();
// 逐步降采样以计算平均亮度
Texture* currentSource = luminanceTarget->GetColorTexture();
for (int i = 0; i < 6; ++i) {
downsampleShader.SetSourceTexture(currentSource);
downsampleShader.Begin();
renderer.SetRenderTarget(downsampleTargets[i]);
renderer.DrawFullscreenQuad();
downsampleShader.End();
currentSource = downsampleTargets[i]->GetColorTexture();
}
// 最终降采样到1x1纹理,获取平均亮度
downsampleShader.SetSourceTexture(currentSource);
downsampleShader.Begin();
renderer.SetRenderTarget(autoExposureTarget);
renderer.DrawFullscreenQuad();
downsampleShader.End();
// 使用平均亮度调整曝光值
float avgLuminance = autoExposureTarget->GetColorTexture()->Sample(Vector2(0.5f, 0.5f)).x;
// 将亮度映射到曝光值(例如使用简单的对数映射)
float targetExposure = 0.5f / (avgLuminance + 0.001f);
// 平滑过渡到新的曝光值
exposure = Lerp(exposure, targetExposure, 0.05f);
// 限制曝光范围
exposure = Clamp(exposure, 0.1f, 10.0f);
}
// 辅助函数
float Lerp(float a, float b, float t) {
return a + t * (b - a);
}
float Clamp(float value, float min, float max) {
return (value < min) ? min : ((value > max) ? max : value);
}
LuminanceShader luminanceShader;
DownsampleShader downsampleShader;
};
10.4.3 体积光和大气散射
体积光和大气散射效果可以显著提升场景的深度感和真实感:
cpp
// 体积光渲染器
class VolumetricLightRenderer {
public:
VolumetricLightRenderer() : enabled(true), density(0.05f), scatteringCoeff(0.03f), numSamples(64), jitter(true) {}
// 初始化
bool Initialize(int width, int height) {
// 创建体积光渲染目标(一般使用较低的分辨率)
int volumeWidth = width / 2;
int volumeHeight = height / 2;
volumeTarget = new RenderTarget();
if (!volumeTarget->Create(volumeWidth, volumeHeight, TextureFormat::RGBA16F)) {
return false;
}
// 创建模糊目标
blurTarget = new RenderTarget();
if (!blurTarget->Create(volumeWidth, volumeHeight, TextureFormat::RGBA16F)) {
return false;
}
return true;
}
// 渲染体积光
void RenderVolumetricLight(const Scene& scene, const Camera& camera, Texture* depthTexture, RenderTarget* destination) {
if (!enabled) {
// 如果禁用,跳过体积光渲染
return;
}
// 获取视图和投影矩阵
Matrix4x4 viewMatrix = camera.GetViewMatrix();
Matrix4x4 projMatrix = camera.GetProjectionMatrix();
Matrix4x4 viewProjMatrix = Matrix4x4::Multiply(viewMatrix, projMatrix);
Matrix4x4 invViewProjMatrix = Matrix4x4::Inverse(viewProjMatrix);
// 设置着色器参数
volumetricShader.SetDepthTexture(depthTexture);
volumetricShader.SetInvViewProjMatrix(invViewProjMatrix);
volumetricShader.SetCameraPosition(camera.GetPosition());
volumetricShader.SetDensity(density);
volumetricShader.SetScatteringCoefficient(scatteringCoeff);
volumetricShader.SetNumSamples(numSamples);
volumetricShader.SetJitter(jitter);
// 设置场景中的光源
int lightCount = 0;
for (const auto& light : scene.GetLights()) {
if (lightCount >= MAX_LIGHTS) break;
volumetricShader.SetLightPosition(lightCount, light.GetPosition());
volumetricShader.SetLightDirection(lightCount, light.GetDirection());
volumetricShader.SetLightColor(lightCount, light.GetColor());
volumetricShader.SetLightIntensity(lightCount, light.GetIntensity());
volumetricShader.SetLightType(lightCount, static_cast<int>(light.GetType()));
volumetricShader.SetLightRange(lightCount, light.GetRange());
volumetricShader.SetLightSpotParams(lightCount, light.GetSpotCutoff(), light.GetSpotExponent());
// 设置阴影贴图
if (light.IsCastingShadow()) {
Texture* shadowMap = nullptr;
Matrix4x4 lightViewProj;
switch (light.GetType()) {
case LightType::Directional:
shadowMap = shadowMapper->GetDirectionalShadowMap();
lightViewProj = light.GetShadowViewProjMatrix();
break;
case LightType::Point:
shadowMap = shadowMapper->GetPointShadowMap();
// 点光源使用特殊的阴影采样方法,不需要VP矩阵
break;
case LightType::Spot:
shadowMap = shadowMapper->GetDirectionalShadowMap(); // 聚光灯使用相同的格式
lightViewProj = light.GetShadowViewProjMatrix();
break;
}
if (shadowMap) {
volumetricShader.SetShadowMap(lightCount, shadowMap);
volumetricShader.SetLightViewProjMatrix(lightCount, lightViewProj);
volumetricShader.SetLightCastShadow(lightCount, true);
}
} else {
volumetricShader.SetLightCastShadow(lightCount, false);
}
lightCount++;
}
volumetricShader.SetLightCount(lightCount);
// 渲染体积光
volumetricShader.Begin();
renderer.SetRenderTarget(volumeTarget);
renderer.Clear(Color(0, 0, 0, 0));
renderer.DrawFullscreenQuad();
volumetricShader.End();
// 模糊体积光以减少噪点
BlurVolumetricLight();
// 合并体积光和场景
blendShader.SetSourceTexture(destination->GetColorTexture());
blendShader.SetVolumetricTexture(blurTarget->GetColorTexture());
blendShader.Begin();
renderer.SetRenderTarget(destination);
renderer.DrawFullscreenQuad();
blendShader.End();
}
// 设置阴影映射器
void SetShadowMapper(ShadowMapper* mapper) {
shadowMapper = mapper;
}
// 启用/禁用体积光
void SetEnabled(bool enable) {
enabled = enable;
}
// 设置体积光参数
void SetDensity(float value) {
density = value;
}
void SetScatteringCoefficient(float value) {
scatteringCoeff = value;
}
void SetSampleCount(int count) {
numSamples = count;
}
void SetJitter(bool enable) {
jitter = enable;
}
private:
bool enabled;
float density;
float scatteringCoeff;
int numSamples;
bool jitter;
RenderTarget* volumeTarget;
RenderTarget* blurTarget;
ShadowMapper* shadowMapper;
static const int MAX_LIGHTS = 8;
// 模糊体积光
void BlurVolumetricLight() {
// 使用双边滤波以保留体积光边缘
bilateralBlurShader.SetInputTexture(volumeTarget->GetColorTexture());
bilateralBlurShader.SetBlurRadius(2.0f);
bilateralBlurShader.SetSharpness(20.0f);
// 水平模糊
bilateralBlurShader.SetDirection(Vector2(1.0f, 0.0f));
bilateralBlurShader.Begin();
renderer.SetRenderTarget(blurTarget);
renderer.DrawFullscreenQuad();
bilateralBlurShader.End();
// 垂直模糊
bilateralBlurShader.SetInputTexture(blurTarget->GetColorTexture());
bilateralBlurShader.SetDirection(Vector2(0.0f, 1.0f));
bilateralBlurShader.Begin();
renderer.SetRenderTarget(volumeTarget);
renderer.DrawFullscreenQuad();
bilateralBlurShader.End();
// 交换目标,使最终结果在blurTarget中
std::swap(volumeTarget, blurTarget);
}
VolumetricLightShader volumetricShader;
BilateralBlurShader bilateralBlurShader;
VolumetricBlendShader blendShader;
};
// 大气散射渲染器
class AtmosphericScatteringRenderer {
public:
AtmosphericScatteringRenderer()
: enabled(true), rayleighScattering(Vector3(5.8e-6f, 13.5e-6f, 33.1e-6f)),
mieScattering(Vector3(21e-6f)), ozoneAbsorption(Vector3(2.04e-5f, 4.97e-5f, 1.95e-6f)),
atmosphereHeight(80000.0f), mieG(0.76f), sunIntensity(20.0f) {}
// 初始化
bool Initialize(int width, int height) {
// 创建大气散射渲染目标
skyTarget = new RenderTarget();
if (!skyTarget->Create(width, height, TextureFormat::RGBA16F)) {
return false;
}
// 预计算大气散射查找表
return PrecomputeAtmosphereLUT();
}
// 渲染天空和大气散射
void RenderAtmosphere(const Camera& camera, const Vector3& sunDirection, RenderTarget* destination) {
if (!enabled) {
return;
}
// 设置着色器参数
atmosphereShader.SetCameraPosition(camera.GetPosition());
atmosphereShader.SetCameraViewProj(Matrix4x4::Multiply(camera.GetViewMatrix(), camera.GetProjectionMatrix()));
atmosphereShader.SetInvViewProj(Matrix4x4::Inverse(Matrix4x4::Multiply(camera.GetViewMatrix(), camera.GetProjectionMatrix())));
atmosphereShader.SetSunDirection(sunDirection);
atmosphereShader.SetRayleighScattering(rayleighScattering);
atmosphereShader.SetMieScattering(mieScattering);
atmosphereShader.SetOzoneAbsorption(ozoneAbsorption);
atmosphereShader.SetAtmosphereHeight(atmosphereHeight);
atmosphereShader.SetMieG(mieG);
atmosphereShader.SetSunIntensity(sunIntensity);
atmosphereShader.SetTransmittanceLUT(transmittanceLUT);
atmosphereShader.SetScatteringLUT(scatteringLUT);
// 渲染天空
atmosphereShader.Begin();
renderer.SetRenderTarget(skyTarget);
renderer.DrawFullscreenQuad();
atmosphereShader.End();
// 合并天空和场景
compositeShader.SetSceneTexture(destination->GetColorTexture());
compositeShader.SetSkyTexture(skyTarget->GetColorTexture());
compositeShader.SetDepthTexture(depthTexture);
compositeShader.Begin();
renderer.SetRenderTarget(destination);
renderer.DrawFullscreenQuad();
compositeShader.End();
}
// 设置深度纹理
void SetDepthTexture(Texture* texture) {
depthTexture = texture;
}
// 启用/禁用大气散射
void SetEnabled(bool enable) {
enabled = enable;
}
// 设置瑞利散射系数
void SetRayleighScattering(const Vector3& value) {
rayleighScattering = value;
}
// 设置米氏散射系数
void SetMieScattering(const Vector3& value) {
mieScattering = value;
}
// 设置臭氧吸收系数
void SetOzoneAbsorption(const Vector3& value) {
ozoneAbsorption = value;
}
// 设置大气高度
void SetAtmosphereHeight(float height) {
atmosphereHeight = height;
}
// 设置米氏相位函数参数
void SetMieG(float value) {
mieG = value;
}
// 设置太阳强度
void SetSunIntensity(float value) {
sunIntensity = value;
}
private:
bool enabled;
Vector3 rayleighScattering;
Vector3 mieScattering;
Vector3 ozoneAbsorption;
float atmosphereHeight;
float mieG;
float sunIntensity;
RenderTarget* skyTarget;
Texture* depthTexture;
Texture* transmittanceLUT;
Texture* scatteringLUT;
// 预计算大气散射查找表
bool PrecomputeAtmosphereLUT() {
// 创建透射率LUT纹理
transmittanceLUT = new Texture();
if (!transmittanceLUT->Create(256, 64, TextureFormat::RGBA16F)) {
return false;
}
// 创建散射LUT纹理
scatteringLUT = new Texture();
if (!scatteringLUT->Create(32, 128, TextureFormat::RGBA16F, 1, TextureType::Texture3D, 32)) {
return false;
}
// 预计算透射率LUT
transmittanceLUTShader.SetRayleighScattering(rayleighScattering);
transmittanceLUTShader.SetMieScattering(mieScattering);
transmittanceLUTShader.SetOzoneAbsorption(ozoneAbsorption);
transmittanceLUTShader.SetAtmosphereHeight(atmosphereHeight);
transmittanceLUTShader.Begin();
renderer.SetRenderTarget(transmittanceLUT);
renderer.DrawFullscreenQuad();
transmittanceLUTShader.End();
// 预计算散射LUT
scatteringLUTShader.SetRayleighScattering(rayleighScattering);
scatteringLUTShader.SetMieScattering(mieScattering);
scatteringLUTShader.SetOzoneAbsorption(ozoneAbsorption);
scatteringLUTShader.SetAtmosphereHeight(atmosphereHeight);
scatteringLUTShader.SetMieG(mieG);
scatteringLUTShader.SetTransmittanceLUT(transmittanceLUT);
// 对每一个Z切片进行渲染
for (int z = 0; z < 32; ++z) {
scatteringLUTShader.SetZSlice(static_cast<float>(z) / 31.0f);
scatteringLUTShader.Begin();
renderer.SetRenderTarget(scatteringLUT, 0, 0, z);
renderer.DrawFullscreenQuad();
scatteringLUTShader.End();
}
return true;
}
AtmosphereShader atmosphereShader;
TransmittanceLUTShader transmittanceLUTShader;
ScatteringLUTShader scatteringLUTShader;
AtmosphereCompositeShader compositeShader;
};
10.5 延迟阅读
对于想要深入了解现代渲染技术的开发者,以下是一些推荐的延伸阅读资源:
实时渲染第四版 (Real-Time Rendering, 4th Edition) – 这本书涵盖了现代GPU渲染技术的几乎所有方面。
物理渲染 (Physically Based Rendering: From Theory to Implementation) – 深入探讨基于物理的渲染理论。
游戏引擎架构 (Game Engine Architecture) – 详细介绍了游戏引擎的各个组件,包括渲染引擎。
着色器技术 (Shader X系列) – 这个系列包含了许多高级着色器技术和渲染算法。
OpenGL和DirectX编程指南 – 掌握底层图形API对于理解渲染管线至关重要。
SIGGRAPH论文集 – 每年的SIGGRAPH会议都会发布最新的图形学研究成果。
GPU Gems和GPU Pro系列 – 这些书籍收集了业界专家分享的实用渲染技术。
延迟阅读的实现概念
以下是一些值得深入研究的现代渲染技术:
基于物理的渲染(PBR) – 使用物理原理模拟光线行为,创建更逼真的材质。
全局光照 – 包括光线追踪、路径追踪、辐射度和光子映射等技术。
实时光线追踪 – 随着硬件支持的增加,实时光线追踪正变得越来越可行。
屏幕空间技术 – 如SSAO、SSR和SSGI等在屏幕空间工作的近似算法。
体积渲染 – 用于云、烟雾和雾气等效果的体积技术。
程序化生成和纹理合成 – 动态创建和组合纹理以减少内存使用。
高级阴影技术 – 如级联阴影映射、VSSM和光线追踪阴影。
时域技术 – 如TAA、时域超采样和动态模糊。
GPU粒子系统 – 使用计算着色器实现大规模粒子效果。
程序化几何体和曲面细分 – 在GPU上动态生成几何细节。
这些技术代表了现代渲染的前沿,不断推动游戏和实时应用的视觉质量向前发展。通过深入研究这些领域,开发者可以创建更具吸引力和沉浸感的视觉体验。
暂无评论内容