Crates demonstration program (Chapter IX content)

Learning for personal use, please do not reprint. Thank you!

Crates demonstration program (Chapter IX content)

9.9, crates demo

In this section we will show how to add texture to the cube of the crate key steps

9.9.1, specifies the texture coordinates

The following will show how to generate the code cube texture coordinates, so that a texture image can be mapped to each surface of the cube

//顶点结构体
struct Vertex
    {
        Vertex(){}
        Vertex(
            const DirectX::XMFLOAT3& p, 
            const DirectX::XMFLOAT3& n, 
            const DirectX::XMFLOAT3& t, 
            const DirectX::XMFLOAT2& uv) :
            Position(p), 
            Normal(n), 
            TangentU(t), 
            TexC(uv){}
        Vertex(
             //位置
            float px, float py, float pz, 
             //法向量
            float nx, float ny, float nz,
             //切线
            float tx, float ty, float tz,
             //纹理坐标
            float u, float v) : 
             Position(px,py,pz), 
             Normal(nx,ny,nz),
            TangentU(tx, ty, tz), 
             TexC(u,v){}

        DirectX::XMFLOAT3 Position;
        DirectX::XMFLOAT3 Normal;
        DirectX::XMFLOAT3 TangentU;
        DirectX::XMFLOAT2 TexC;
    };
GeometryGenerator::MeshData GeometryGenerator::CreateBox(float width, float height, float depth, uint32 numSubdivisions)
{
    MeshData meshData;

    //
    // 创建顶点
    //

    Vertex v[24];

    float w2 = 0.5f*width;
    float h2 = 0.5f*height;
    float d2 = 0.5f*depth;
    
    // 填入立方体前表面的顶点数据
    v[0] = Vertex(-w2, -h2, -d2, 0.0f, 0.0f, -1.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f);
    v[1] = Vertex(-w2, +h2, -d2, 0.0f, 0.0f, -1.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f);
    v[2] = Vertex(+w2, +h2, -d2, 0.0f, 0.0f, -1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f);
    v[3] = Vertex(+w2, -h2, -d2, 0.0f, 0.0f, -1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f);

    // 填入立方体后表面的顶点数据
    v[4] = Vertex(-w2, -h2, +d2, 0.0f, 0.0f, 1.0f, -1.0f, 0.0f, 0.0f, 1.0f, 1.0f);
    v[5] = Vertex(+w2, -h2, +d2, 0.0f, 0.0f, 1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f);
    v[6] = Vertex(+w2, +h2, +d2, 0.0f, 0.0f, 1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f);
    v[7] = Vertex(-w2, +h2, +d2, 0.0f, 0.0f, 1.0f, -1.0f, 0.0f, 0.0f, 1.0f, 0.0f);

    // 填入立方体上表面的顶点数据
    v[8]  = Vertex(-w2, +h2, -d2, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f);
    v[9]  = Vertex(-w2, +h2, +d2, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f);
    v[10] = Vertex(+w2, +h2, +d2, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f);
    v[11] = Vertex(+w2, +h2, -d2, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f);

    // 填入立方体下表面的顶点数据
    v[12] = Vertex(-w2, -h2, -d2, 0.0f, -1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 1.0f, 1.0f);
    v[13] = Vertex(+w2, -h2, -d2, 0.0f, -1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f);
    v[14] = Vertex(+w2, -h2, +d2, 0.0f, -1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 0.0f);
    v[15] = Vertex(-w2, -h2, +d2, 0.0f, -1.0f, 0.0f, -1.0f, 0.0f, 0.0f, 1.0f, 0.0f);
    ……

9.9.2, create texture

Next we will use to create textures dds files in the initialization phase

//将纹理相关的数据组织在一起的辅助结构体
struct Texture
{
    // 为了便于查找而使用的唯一材质名
    std::string Name;
    //dds文件存储的地址
    std::wstring Filename;

    Microsoft::WRL::ComPtr<ID3D12Resource> Resource = nullptr;
    Microsoft::WRL::ComPtr<ID3D12Resource> UploadHeap = nullptr;
};

std::unordered_map<std::string, std::unique_ptr<Texture>> mTextures;

void CrateApp::LoadTextures()
{
    auto woodCrateTex = std::make_unique<Texture>();
    woodCrateTex->Name = "woodCrateTex";
    woodCrateTex->Filename = L"../../Textures/WoodCrate01.dds";
    ThrowIfFailed(DirectX::CreateDDSTextureFromFile12(md3dDevice.Get(),
        mCommandList.Get(), woodCrateTex->Filename.c_str(),
        woodCrateTex->Resource, woodCrateTex->UploadHeap));
 
    mTextures[woodCrateTex->Name] = std::move(woodCrateTex);
}

We each independent of one another texture mapping is stored in an unordered list (unorder map), and then to find the appropriate texture according to their respective names. But in the actual development process, every time we load the textures need to test to see whether the presence of the disorder texture mapping table to prevent multiple loads of the same texture.

9.9.3, set the texture

If the texture has been created, and the texture of SRV also already exists with the descriptor heap, then we just need to set the texture to the root signature parameters, and then bind to the root signature rendering pipeline, so that the texture can shader used the program.

//获取纹理的着色器资源视图(SRV)
CD3DX12_GPU_DESCRIPTOR_HANDLE tex(mSrvDescriptorHeap->GetGPUDescriptorHandleForHeapStart());
tex.Offset(ri->Mat->DiffuseSrvHeapIndex, mCbvSrvDescriptorSize);
……

//将纹理绑定到根参数0,根参数会指定该纹理要绑定到哪一个着色器寄存器槽位
cmdList->SetGraphicsRootDescriptorTable(0,tex);

9.9.4, update the code HLSL

//Default.hlsli文件
// 光源数量
#ifndef NUM_DIR_LIGHTS
#define NUM_DIR_LIGHTS 3
#endif

#ifndef NUM_POINT_LIGHTS
#define NUM_POINT_LIGHTS 0
#endif

#ifndef NUM_SPOT_LIGHTS
#define NUM_SPOT_LIGHTS 0
#endif

//内含光照所需的结构体和函数(引入头文件如果放在上述光源数量前引入将导致光源失效(即光源数量为0))
#include "LightingUtil.hlsli"

Texture2D gDiffuseMap : register(t0);
SamplerState gsamLinear : register(s0);


// 每帧都有变化的常量数据
cbuffer cbPerObject : register(b0)
{
    float4x4 gWorld;
    float4x4 gTexTransform;
};

// 绘制过程中需要使用的杂项常量数据
cbuffer cbPass : register(b1)
{
    float4x4 gView;
    float4x4 gInvView;
    float4x4 gProj;
    float4x4 gInvProj;
    float4x4 gViewProj;
    float4x4 gInvViewProj;
    float3 gEyePosW;
    float cbPerObjectPad1;
    float2 gRenderTargetSize;
    float2 gInvRenderTargetSize;
    float gNearZ;
    float gFarZ;
    float gTotalTime;
    float gDeltaTime;
    float4 gAmbientLight;

    //gLights[0,NUM_DIR_LIGHTS]表示的是方向光源
    //gLights{NUM_DIR_LIGHTS,NUM_DIR_LIGHTS + NUM_POINT_LIGHTS]表示的是点光源
    //gLights[NUM_DIR_LIGHTS + NUM_POINT_LINGHTS,NUM_DIR_LIGHTS + NUM_POINT_LIGHTS +                
    //NUNM_SPOT_LIGHTS]表示聚光灯光源
    Light gLights[MaxLights];
};

//每种材质都有所区别的常量数据
cbuffer cbMaterial : register(b2)
{
    float4 gDiffuseAlbedo;
    float3 gFresnelR0;
    float gRoughness;
    float4x4 gMatTransform;
};

struct VertexIn
{
    //顶点在局部空间的位置
    float3 PosL : POSITION;
    //顶点在局部空间的法向量
    float3 NormalL : NORMAL;
    //顶点的uv纹理坐标
    float2 TexC : TEXCOORD;
};

struct VertexOut
{
    //顶点在齐次裁剪空间的位置
    float4 PosH : SV_POSITION;
    //顶点在世界空间的位置
    float3 PosW : POSITION;
    //顶点在世界空间的法向量
    float3 NormalW : NORMAL;
    //顶点的uv纹理坐标
    float2 TexC : TEXCOORD;
};
//VS.hlsl文件
#include "Default.hlsli"

VertexOut VS(VertexIn vin)
{
    VertexOut vout = (VertexOut) 0.0f;
    
    // 将局部坐标转换为世界坐标
    float4 posW = mul(float4(vin.PosL, 1.0f), gWorld);
    vout.PosW = posW.xyz;

    // 假设这里进行的是等比缩放,否则需要使用世界矩阵的逆转置矩阵
    vout.NormalW = mul(vin.NormalL, (float3x3) gWorld);

    // 将顶点的坐标从世界空间变换到齐次裁剪空间
    vout.PosH = mul(posW, gViewProj);
    
    // 为了对三角形进行插值操作而输出的顶点属性
    float4 texC = mul(float4(vin.TexC, 0.0f, 1.0f), gTexTransform);
    vout.TexC = mul(texC, gMatTransform).xy;

    return vout;
}
//PS.hlsl文件
#include "Default.hlsli"


float4 PS(VertexOut pin) : SV_Target
{
    float4 diffuseAlbedo = gDiffuseMap.Sample(gsamLinear, pin.TexC) * gDiffuseAlbedo;

    // 对法线插值可能会导致法线不在规范化,所以需要对其再次规范化
    pin.NormalW = normalize(pin.NormalW);

    // 光线经过表面一点反射到观察点的向量
    float3 toEyeW = normalize(gEyePosW - pin.PosW);

    // 光照项
    float4 ambient = gAmbientLight * diffuseAlbedo;

    const float shininess = 1.0f - gRoughness;
    Material mat = { diffuseAlbedo, gFresnelR0, shininess };
    float3 shadowFactor = 1.0f;
    float4 directLight = ComputeLighting(gLights, mat, pin.PosW,
        pin.NormalW, toEyeW, shadowFactor);

    float4 litColor = ambient + directLight;

    // Common convention to take alpha from diffuse material.
    litColor.a = diffuseAlbedo.a;

    return litColor;
    
}

9.9.5, the demonstration effect

Complete source code can go to search on GitHub, where you hold the link!

9.10, texture transform

We have no buffer constants and variables gTexTransform gMatTransform discussion, these two variables in the vertex shader to transform the input texture coordinates.

//为三角形插值而输出顶点属性
float4 texC = mul(float4(vin.TexC,0.0f,1.0f),gTexTransform);
vout.Texc = mul(texC,gMatTransform).xy;

2D texture coordinate point is represented by a texture plane, after such coordinate with, like other 2D we can point to the same point in the texture samples scaling, translation and rotation operations, following are some examples of the use of the texture transform:

Examples
With a brick wall so that the texture of the model stretched, assumed that the current vertex wall texture coordinate range [0,1], we can be enlarged four times the texture coordinates, texture coordinates so that the range becomes [0,4 ], so that the map is repeated 4x4 brick wall along times
How hypothesis clouds flower texture in the sky, we can translate these texture coordinates by a function of time, we will be able to achieve the effect of dynamic floating clouds
Rotating operation can sometimes achieve some similar effect particles

In the "Crate" (crate) demo we did not make any modifications to the input texture coordinates, but the next section of the demo program uses texture transform.

Precautions: After converting 2D texture coordinates to use a 4x4 matrix, we need to complete 4x4 matrix multiplication cast back to 2D vector

在这里我们运用了两个独立的纹理变换矩阵——gTexTransform和gMatTransform,主要是因为一种为材质的纹理变换(针对像水那样的动态材质),一种是关于物体属性的纹理变换。

Guess you like

Origin www.cnblogs.com/yaya12138/p/12233414.html