c++directxdirectx-11direct3d11

Why does Map() fail on default texture despite UnifiedMemoryArchitecture and MapOnDefaultTextures


Docs suggest, that default usage textures can be mapped on UMA architectures like Intel integrated graphics with Direct3D 11.3.

I tried to achieve this, but Map() always fails with E_INVALIDARG.

I am quite new to C++ and DirectX, but below is what I believe to be a minimal test case. Please don't hesitate to point out any stupidity I am committing.

I am running this on a notebook with Windows 10 1809, Intel Skylake i5-6300U with HD Graphics 520.

#include "pch.h"
#include <iostream>
#include <dxgi1_6.h>
#include <d3d.h>
#include <d3d11_4.h>
#include <assert.h>

int main()
{
    HRESULT res = S_OK;
    ID3D11Device *Dev = nullptr;
    ID3D11DeviceContext *Ctx = nullptr;
    D3D_FEATURE_LEVEL Fl;
    D3D_FEATURE_LEVEL fls[1] = { D3D_FEATURE_LEVEL_11_1 };

    res = D3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr, D3D11_CREATE_DEVICE_DEBUG | D3D11_CREATE_DEVICE_BGRA_SUPPORT, fls, 1, D3D11_SDK_VERSION, &Dev, &Fl, &Ctx);
    assert(res == S_OK);
    assert(Fl == D3D_FEATURE_LEVEL_11_1);

    ID3D11Device5 *Dev5 = nullptr;
    res = Dev->QueryInterface<ID3D11Device5>(&Dev5);
    assert(res == S_OK);
    Dev->Release();
    Dev = nullptr;

    ID3D11DeviceContext4 *Ctx4;
    res = Ctx->QueryInterface<ID3D11DeviceContext4>(&Ctx4);
    assert(res == S_OK);
    Ctx->Release();
    Ctx = nullptr;

    D3D11_FEATURE_DATA_D3D11_OPTIONS2 opts2;
    res = Dev5->CheckFeatureSupport(D3D11_FEATURE_D3D11_OPTIONS2, &opts2, sizeof(opts2));
    assert(res == S_OK);
    assert(opts2.MapOnDefaultTextures);
    assert(opts2.UnifiedMemoryArchitecture);

    D3D11_TEXTURE2D_DESC1 texDesc = { 0 };
    texDesc.ArraySize = 1;
    texDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_UNORDERED_ACCESS;
    texDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
    texDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
    texDesc.Height = 256;
    texDesc.Width = 256;
    texDesc.MipLevels = 1;
    texDesc.MiscFlags = 0;
    texDesc.SampleDesc.Count = 1;
    texDesc.SampleDesc.Quality = 0;
    texDesc.TextureLayout = D3D11_TEXTURE_LAYOUT_UNDEFINED;
    texDesc.Usage = D3D11_USAGE_DEFAULT;

    byte mem[256 * 256 * 4];
    ZeroMemory(mem, 256 * 256 * 4);

    D3D11_SUBRESOURCE_DATA data = { 0 };
    data.pSysMem = mem;
    data.SysMemPitch = 256 * 4;

    ID3D11Texture2D1 *tex2d;
    res = Dev5->CreateTexture2D1(&texDesc, &data, &tex2d);
    assert(res == S_OK);

    D3D11_MAPPED_SUBRESOURCE map = { 0 };
    // I believe at least one of these should succeed, but all fail
    res = Ctx4->Map(tex2d, 0, D3D11_MAP_READ, 0, &map);
    //res = Ctx4->Map(tex2d, 0, D3D11_MAP_WRITE, 0, &map);
    //res = Ctx4->Map(tex2d, 0, D3D11_MAP_READ_WRITE, 0, &map);
    assert(res == S_OK); // E_INVALIDARG
}

I believe the Map() call should succeed, but it fails with E_INVALIDARG.

EDIT: I tried D3D11_TEXTURE_LAYOUT_ROW_MAJOR and D3D11_TEXTURE_LAYOUT_64K_STANDARD_SWIZZLE too, but then CreateTexture2D1() fails with E_INVALIDARG. Maybe my hardware doesn't support those modes?


Solution

  • I think the issue is described in the documentation:

    It is illegal to set CPU access flags on default textures without also setting TextureLayout to a value other than D3D11_TEXTURE_LAYOUT_UNDEFINED.