I'm currently making a simple DirectX engine to brush up some of my graphics programming knowledge, but currently I've been frustratingly stuck for a while. I'm trying to render a simple triangle with a moving camera (only translation for now), but the triangle seems to distort whenever I move the camera. The same happens whenever I move the triangle, but then the distortion happens in the opposite direction ofcourse.
This is how the triangle looks without camera movement:
This is how it looks when I move the camera to the right:
And this is how it looks when I move the camera up:
My first thought was that the WorldViewProjection matrix was off somehow, but I don't really see any problem with it.
void Renderer::CreateViewProjectionMatrix()
{
using namespace DirectX;
// Create world matrix
const XMMATRIX translation = XMMatrixTranslation(0.f, 0.f, 0.f);
const XMMATRIX rotation = XMMatrixRotationRollPitchYaw(0.f, 0.f, 0.f); // In radians
const XMMATRIX scale = XMMatrixScaling(1.f, 1.f, 1.f);
const XMMATRIX worldMatrix = scale * rotation * translation;
XMStoreFloat4x4(&m_VertexConstantBuffer.worldMatrix, worldMatrix);
// Create view matrix
const XMFLOAT3 cameraForward{ 0.f, 0.f, 1.f };
const XMFLOAT3 cameraUp{ 0.f, 1.f, 0.f };
const XMVECTOR worldPos = XMLoadFloat3(&m_CameraPos);
const XMVECTOR worldForward = XMLoadFloat3(&cameraForward);
const XMVECTOR worldUp = XMLoadFloat3(&cameraUp);
const XMMATRIX viewMatrix = XMMatrixLookToLH(worldPos, worldForward, worldUp);
// Create projection matrix
const float aspectRatioX = static_cast<float>(m_BackBufferDescription.Width) / m_BackBufferDescription.Height;
const float FOV{ 45.f };
const float nearZ{ 0.1f };
const float farZ{ 100.f };
const XMMATRIX projectionMatrix = XMMatrixPerspectiveFovLH(XMConvertToRadians(FOV), aspectRatioX, nearZ, farZ);
// Store WVP matrix
const XMMATRIX WVPMatrix = worldMatrix * viewMatrix * projectionMatrix;
XMStoreFloat4x4(&m_VertexConstantBuffer.worldViewProjection, WVPMatrix);
}
Here's how the triangle gets created in case you need it:
HRESULT Renderer::CreateTriangle()
{
// Create triangle geometry
const BaseVertexInput triangleVertices[] =
{
{ DirectX::XMFLOAT3{ -0.5f,-0.5f, 0.0f }, DirectX::XMFLOAT3{}, DirectX::XMFLOAT2{} },
{ DirectX::XMFLOAT3{ 0.5f,-0.5f, 0.0f }, DirectX::XMFLOAT3{}, DirectX::XMFLOAT2{} },
{ DirectX::XMFLOAT3{ 0.0f, 0.3f, 0.0f }, DirectX::XMFLOAT3{}, DirectX::XMFLOAT2{} }
};
// Create vertexBuffer
const CD3D11_BUFFER_DESC vertexDescription{ sizeof(triangleVertices), D3D11_BIND_VERTEX_BUFFER};
D3D11_SUBRESOURCE_DATA vertexData;
ZeroMemory(&vertexData, sizeof(D3D11_SUBRESOURCE_DATA)); // Stops writes being compiled away if it isn't being read immeadiatly
vertexData.pSysMem = triangleVertices; // Initialization data
vertexData.SysMemPitch = 0; // Distance from beginning line of texture to the next line (only for 2D & 3D texture)
vertexData.SysMemSlicePitch = 0; // Distance from beginning of one depth level to the next (only for 3D texture)
HRESULT result = m_pDevice->CreateBuffer
(
&vertexDescription,
&vertexData,
m_pVertexBuffer.GetAddressOf()
);
if (FAILED(result))
{
Logger::Log(L"ERROR - Failed to create a vertexBuffer");
return result;
}
// Create indexBuffer
const unsigned short triangleIndices[]
{
0,2,1
};
m_IndexCount = ARRAYSIZE(triangleIndices);
const CD3D11_BUFFER_DESC indexDescription{ sizeof(triangleIndices), D3D11_BIND_INDEX_BUFFER };
D3D11_SUBRESOURCE_DATA indexData;
ZeroMemory(&indexData, sizeof(D3D11_SUBRESOURCE_DATA));
indexData.pSysMem = triangleIndices;
indexData.SysMemPitch = 0;
indexData.SysMemSlicePitch = 0;
result = m_pDevice->CreateBuffer
(
&indexDescription,
&indexData,
m_pIndexBuffer.GetAddressOf()
);
if (FAILED(result))
{
Logger::Log(L"ERROR - Failed to create an indexBuffer");
return result;
}
// Creation success
m_SuccesfullCreation = true;
return result;
}
I looked into: the WVP matrix, my vertex shader, creation of vertex buffer, ...
I tried to do the Transpose(RightHandMatrix(...)) thingy from the Microsoft guide, but then I just don't see any triangle at all.
Besides that I don't really have any clue where to look anymore. Definitely ask if there's more code I can/need to provide.
EDIT:
Here's my complete vertex shader as well:
cbuffer CB_World : register(b0) // Register for GPU access (b. for constant buffers)
{
matrix g_WorldViewProjection; // World to projection space
matrix g_World; // World space
};
struct VS_INPUT
{
float3 position : POSITION;
float3 normal : NORMAL;
float2 uv : TEXCOORD0;
};
struct VS_OUTPUT
{
float4 position : SV_POSITION; // System value
float3 normal : NORMAL;
float2 uv : TEXCOORD0;
};
VS_OUTPUT VSMain(VS_INPUT input)
{
VS_OUTPUT output;
output.position = mul(float4(input.position, 1.0), g_WorldViewProjection);
output.normal = normalize(mul(input.normal, (float3x3) g_World));
output.uv = input.uv;
return output;
}
GitHub: https://github.com/RenzoDepoortere/DistortedRender_AidProject
I've downloaded and built project, the problem seems to be a combination of two:
XMMatrixLookAtLH
instead of XMMatrixLookToLH
. They treat second argument differently. Passing 0.f, 0.f, 1.f
as second argument to XMMatrixLookToLH means that when moving camera position (passed as first argument) view direction will be kept parallel to z axis. While passing 0.f, 0.f, 1.f as second argument to XMMatrixLookAtLH means that when moving camera position (again passed as first argument) view direction will be changed to point at 0.f, 0.f, 1.f (camera will be rotated that is).const XMMATRIX WVPMatrix = XMMatrixTranspose(worldMatrix * viewMatrix * projectionMatrix);
Result with XMMatrixLookToLH
after camera is moved to the right a bit:
Result with XMMatrixLookAtLH
after camera is moved to the right a bit, which also causes it to ratate to keep looking at the same 0.f, 0.f, 1.f
point: