2014-04-01 22 views
0

我試圖場景渲染到兩個紋理(左,右)與Oculus Rift以使用。當我將渲染目標設置爲2D紋理渲染視圖並調用DrawIndexed()時,它將渲染到後臺緩衝區而不是紋理。我正在使用Visual Studio,並且已經在其上運行了圖形診斷。在DrawIndexed()事件中,它顯示渲染目標是紋理,但像素歷史記錄不顯示事件。如果我不清除後緩衝器,場景會顯示在屏幕上。D3D11 DrawIndexed()被繪製到錯誤的渲染目標

在下面的代碼,所述RenderLeft()函數應呈現綠色背景設置爲左渲染紋理渲染目標的平面的圖像。然後RenderRight()應該採用由RenderLeft()渲染的紋理,並將其渲染到平面,然後將其輸出到後臺緩衝區。 (注意:這不是正常的設置,只是爲了看看紋理是否被渲染)

在最終的輸出中,屏幕左側沒有任何東西,在右側應該是黑色背景上的綠色矩形內的源圖像。

相反,我得到這樣的:http://i.imgur.com/dHX5Ed3.png?1

RenderLeft被渲染到後臺緩衝區,即使渲染目標是紋理,所以再由RenderRight使用質地只是用來清除它的顏色。

這是我目前使用的代碼。我想我已經包括了所有相關的東西。

// this is the function used to render a single frame 
void Direct3D::RenderFrame() 
{ 

CreateTransforms(); //this creates matFinalLeft and matFinalRight, which is (world matrix)*(view matrix)*(projection matrix) with the proper offsets for a stereoscopic view. 

setVertices(); //this sets the vertex and index buffers. 

setMainShaders(); // this sets the shaders used to render the 3D scene 

RenderLeft(pTextureLeftRenderView, matFinalLeft, viewportLeft, true); //this renders an image to a plane on a green background. It SHOULD render to a texture. 

RenderRight(backbuffer, matFinalRight, viewportRight, false);//this renders the render target from RenderLeft to the plane and renders to the back buffer. 

swapchain->Present(0, 0); //output back buffer to screen. 
} 

這部分應該呈現一個矩形紋理與一個圖像在渲染紋理的左側。

//Render the scene to the left side of a texture 
void Direct3D::RenderLeft(ID3D11RenderTargetView *RenderTarget, D3DXMATRIX matFinal, D3D11_VIEWPORT viewport, bool clearRenderTarget){ 

devcon->OMSetRenderTargets(1, &RenderTarget, zbuffer); 

devcon->RSSetViewports(1, &viewport); 

// update shader resources 
devcon->UpdateSubresource(pCBufferPrimaryShader, 0, 0, &matFinal, 0, 0); 
devcon->PSSetShaderResources(0, 1, &pTextureLeftResourceView); 

// clear the depth buffer and render target texture 
devcon->ClearDepthStencilView(zbuffer, D3D11_CLEAR_DEPTH, 1.0f, 0); 
if (clearRenderTarget){ 
    devcon->ClearRenderTargetView(RenderTarget, D3DXCOLOR(0.0f, 1.0f, 0.0f, 1.0f)); 
} 

// render to texture on left side (oculus) or full texture 
devcon->DrawIndexed(6, 0, 0); 
} 

這部分應該渲染一個矩形,紋理從RenderLeft()到後臺緩衝區。

//Render the scene to the right side of the back buffer 
void Direct3D::RenderRight(ID3D11RenderTargetView *RenderTarget, D3DXMATRIX matFinal, D3D11_VIEWPORT viewport, bool clearRenderTarget){ 

//render to texture 
devcon->OMSetRenderTargets(1, &RenderTarget, zbuffer); 

devcon->RSSetViewports(1, &viewport); 

// update shader resources 
devcon->UpdateSubresource(pCBufferPrimaryShader, 0, 0, &matFinal, 0, 0); 
devcon->PSSetShaderResources(0, 1, &pRenderTextureLeftResourceView); 


// clear the depth buffer and render target texture 
devcon->ClearDepthStencilView(zbuffer, D3D11_CLEAR_DEPTH, 1.0f, 0); 
if (clearRenderTarget){ 
    devcon->ClearRenderTargetView(RenderTarget, D3DXCOLOR(0.0f, 0.0f, 1.0f, 1.0f)); 
} 

// render to texture on left side (oculus) or full texture 
devcon->DrawIndexed(6, 0, 0); 
} 

最後,創建了各種意見和視口

void Direct3D::InitD3D(HWND hWnd) 
{ 
// create a struct to hold information about the swap chain 
DXGI_SWAP_CHAIN_DESC scd; 

// clear out the struct for use 
ZeroMemory(&scd, sizeof(DXGI_SWAP_CHAIN_DESC)); 

// fill the swap chain description struct 
scd.BufferCount = 1;         // one back buffer 
scd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;  // use 32-bit color 
scd.BufferDesc.Width = screen_width; 
scd.BufferDesc.Height = screen_height; 
scd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;  // how swap chain is to be used 
scd.OutputWindow = hWnd;        // the window to be used 
scd.SampleDesc.Count = 4;        // how many multisamples 
scd.Windowed = TRUE;         // windowed/full-screen mode 
scd.Flags = DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH; 

// create a device, device context and swap chain using the information in the scd struct 
D3D11CreateDeviceAndSwapChain(NULL, 
    D3D_DRIVER_TYPE_HARDWARE, 
    NULL, 
    NULL, 
    NULL, 
    NULL, 
    D3D11_SDK_VERSION, 
    &scd, 
    &swapchain, 
    &dev, 
    NULL, 
    &devcon); 

// create the depth buffer texture 
D3D11_TEXTURE2D_DESC texd; 
ZeroMemory(&texd, sizeof(texd)); 

texd.Width = screen_width; 
texd.Height = screen_height; 
texd.ArraySize = 1; 
texd.MipLevels = 1; 
texd.SampleDesc.Count = 4; 
texd.Format = DXGI_FORMAT_D32_FLOAT; 
texd.BindFlags = D3D11_BIND_DEPTH_STENCIL; 

ID3D11Texture2D *pDepthBuffer; 
dev->CreateTexture2D(&texd, NULL, &pDepthBuffer); 

// create the depth buffer 
D3D11_DEPTH_STENCIL_VIEW_DESC dsvd; 
ZeroMemory(&dsvd, sizeof(dsvd)); 

dsvd.Format = DXGI_FORMAT_D32_FLOAT; 
dsvd.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2DMS; 

dev->CreateDepthStencilView(pDepthBuffer, &dsvd, &zbuffer); 
pDepthBuffer->Release(); 

// get the address of the back buffer 
ID3D11Texture2D *pBackBuffer; 

swapchain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&pBackBuffer); 

// use the back buffer address to create the render target 
dev->CreateRenderTargetView(pBackBuffer, NULL, &backbuffer); 
pBackBuffer->Release(); 

//create intermediate render textures 
ID3D11Texture2D *pRenderTextureLeft; 

D3D11_TEXTURE2D_DESC textureDesc; 
D3D11_RENDER_TARGET_VIEW_DESC renderTargetViewDesc; 
D3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc; 

ZeroMemory(&textureDesc, sizeof(textureDesc)); 


textureDesc.Width = screen_width; 
textureDesc.Height = screen_height; 
if (oculus){ 
    textureDesc.Width = (UINT)((FLOAT)textureDesc.Width * oculus->renderScale); 
    textureDesc.Height = (UINT)((FLOAT)textureDesc.Height *oculus->renderScale); 
} 
textureDesc.MipLevels = 1; 
textureDesc.ArraySize = 1; 
textureDesc.Format = DXGI_FORMAT_R32G32B32A32_FLOAT; 
textureDesc.SampleDesc.Count = 1; 
textureDesc.Usage = D3D11_USAGE_DEFAULT; 
textureDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE; 
textureDesc.CPUAccessFlags = 0; 
textureDesc.MiscFlags = 0; 

dev->CreateTexture2D(&textureDesc, NULL, &pRenderTextureLeft); 

renderTargetViewDesc.Format = textureDesc.Format; 
renderTargetViewDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; 
renderTargetViewDesc.Texture2D.MipSlice = 0; 

dev->CreateRenderTargetView(pRenderTextureLeft, &renderTargetViewDesc, &pTextureLeftRenderView); 

shaderResourceViewDesc.Format = textureDesc.Format; 
shaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D; 
shaderResourceViewDesc.Texture2D.MostDetailedMip = 0; 
shaderResourceViewDesc.Texture2D.MipLevels = 1; 

dev->CreateShaderResourceView(pRenderTextureLeft, &shaderResourceViewDesc, &pRenderTextureLeftResourceView); 

ID3D11Texture2D *pRenderTextureRight; 

dev->CreateTexture2D(&textureDesc, NULL, &pRenderTextureRight); 

dev->CreateRenderTargetView(pRenderTextureRight, &renderTargetViewDesc, &pTextureRightRenderView); 

dev->CreateShaderResourceView(pRenderTextureRight, &shaderResourceViewDesc, &pRenderTextureRightResourceView); 

/*if (oculus){ 
    pOculusOutputDevice = oculus->searchForOculusDisplay(oculus->hmd.DisplayDeviceName); 
    swapchain->SetFullscreenState(TRUE, pOculusOutputDevice); 
}*/ 

// Set the viewport 

ZeroMemory(&viewportLeft, sizeof(D3D11_VIEWPORT)); 
ZeroMemory(&viewportRight, sizeof(D3D11_VIEWPORT)); 
ZeroMemory(&viewportCenter, sizeof(D3D11_VIEWPORT)); 

viewportCenter.TopLeftX = 0.0f; 
viewportCenter.TopLeftY = 0.0f; 
if (oculus){ 
    viewportCenter.Width = (FLOAT)screen_width*oculus->renderScale; 
    viewportCenter.Height = (FLOAT)screen_height*oculus->renderScale; 
} 
else{ 
    viewportCenter.Width = (FLOAT)screen_width; 
    viewportCenter.Height = (FLOAT)screen_height; 
} 
viewportCenter.MinDepth = 0.0f; 
viewportCenter.MaxDepth = 1.0f; 

if (dual_mode){ 
    viewportLeft.TopLeftX = 0.0f; 
    viewportLeft.TopLeftY = 0.0f; 
    viewportLeft.Width = (FLOAT)screen_width/2.0f; 
    viewportLeft.Height = (FLOAT)screen_height; 
    viewportLeft.MinDepth = 0.0f; 
    viewportLeft.MaxDepth = 1.0f; 

    viewportRight.TopLeftX = (FLOAT)screen_width/2.0f; 
    viewportRight.TopLeftY = 0.0f; 
    viewportRight.Width = (FLOAT)screen_width/2.0f; 
    viewportRight.Height = (FLOAT)screen_height; 
    viewportRight.MinDepth = 0.0f; 
    viewportRight.MaxDepth = 1.0f; 
} 

devcon->RSSetViewports(1, &viewportCenter); 

InitPipeline(); 
InitGraphics(); 
} 

每請求的代碼,這裏是一些代碼:

我在內的整個Direct3D的類的頭,所以你可以看到什麼是和不是成員變量。

#pragma once 

#include "Oculus.h" 
#include <OVR.h> 
#include "Camera.h" 

#include <d3d11.h> 
#include <D3DX11.h> 
#include <D3DX10.h> 

#pragma comment (lib, "d3d11.lib") 
#pragma comment (lib, "d3dx11.lib") 
#pragma comment (lib, "d3dx10.lib") 

class Direct3D 
{ 
public: 
struct VERTEX{ FLOAT X, Y, Z; D3DXCOLOR Color; FLOAT U, V; }; 
struct DISTORTION{ 
    FLOAT LensCenter[2]; 
    FLOAT ScreenCenter[2]; 
    FLOAT Scale[2]; 
    FLOAT ScaleIn[2]; 
    FLOAT HmdWarpParam[4]; 
}; 

IDXGISwapChain *swapchain;    // the pointer to the swap chain interface 
ID3D11Device *dev;      // the pointer to our Direct3D device interface 
ID3D11DeviceContext *devcon;   // the pointer to our Direct3D device context 
ID3D11RenderTargetView *backbuffer; 
IDXGIOutput* pOculusOutputDevice; 
ID3D11VertexShader *pVS_Primary; // the vertex shader 
ID3D11PixelShader *pPS_Primary;  // the pixel shader 
ID3D11VertexShader *pVS_Distortion; 
ID3D11PixelShader *pPS_Distortion;  // the pixel shader 
ID3D11Buffer *pVBuffer;  //vertec buffer 
ID3D11Buffer *pIBuffer; 
ID3D11InputLayout *pLayout_Primary; 
ID3D11InputLayout *pLayout_Distortion; 
D3D11_VIEWPORT viewportLeft; 
D3D11_VIEWPORT viewportRight; 
D3D11_VIEWPORT viewportCenter; 
ID3D11Buffer *pCBufferPrimaryShader; 
ID3D11Buffer *pCBufferDistortionShader; 
ID3D11DepthStencilView *zbuffer;  // the pointer to our depth buffer 
ID3D11ShaderResourceView *pTextureLeftResourceView; // the pointer to the texture 
ID3D11ShaderResourceView *pTextureRightResourceView; 
ID3D11ShaderResourceView *pRenderTextureLeftResourceView; 
ID3D11ShaderResourceView *pRenderTextureRightResourceView; 
ID3D11RenderTargetView *pTextureLeftRenderView; 
ID3D11RenderTargetView *pTextureRightRenderView; 
D3DXMATRIX matFinalLeft; 
D3DXMATRIX matFinalRight; 

Camera cameraLeft, cameraRight; 

int screen_width; 
int screen_height; 

bool dual_mode; 

Oculus* oculus; 

Direct3D(Oculus* oculus); 
Direct3D(); 
~Direct3D(); 

void InitD3D(HWND hWnd);  // sets up and initializes Direct3D 
void CleanD3D(void);   // closes Direct3D and releases memory 
void RenderFrame(); 
void InitPipeline(); 
void InitGraphics(); 
void RenderLeft(ID3D11RenderTargetView *RenderTarget, D3DXMATRIX matFinal, D3D11_VIEWPORT viewport, bool clearRenderTarget); 
void RenderRight(ID3D11RenderTargetView *RenderTarget, D3DXMATRIX matFinal, D3D11_VIEWPORT viewport, bool clearRenderTarget); 
void DistortionCorrection(ID3D11RenderTargetView *RenderTarget); 
void CreateTransforms(); 
void setVertices(); 
void setMainShaders(); 
void OVRMatrix4fToD3DXMatrix(OVR::Matrix4f& source, D3DXMATRIX& dest); 
}; 

這裏是初始化圖像的紋理(現在他們加載相同的圖像以兩個不同質地的代碼。它最終將成爲3D圖像的兩面。只是當我搞清楚如何訪問第二圖像文件中)

文件名已經#defined作爲圖像文件的名稱,我顯示

void Direct3D::InitGraphics() 
{ 


D3DX11CreateShaderResourceViewFromFile(dev,  // the Direct3D device 
    FILENAME, // load Wood.png in the local folder 
    NULL,   // no additional information 
    NULL,   // no multithreading 
    &pTextureLeftResourceView,  // address of the shader-resource-view 
    NULL);   // no multithreading 

D3DX11CreateShaderResourceViewFromFile(dev,  // the Direct3D device 
    FILENAME, // load Wood.png in the local folder 
    NULL,   // no additional information 
    NULL,   // no multithreading 
    &pTextureRightResourceView,  // address of the shader-resource-view 
    NULL);   // no multithreading 

// get image size for rectangle mesh size 
D3DX11_IMAGE_INFO info; 
D3DX11GetImageInfoFromFile(FILENAME, NULL, &info, NULL); 

FLOAT textureWidth = info.Width*0.001f; 
FLOAT textureHeight = info.Height*0.001f; 

// create vertices to represent the corners of the cube 
VERTEX OurVertices[] = 
{ 
    { -textureWidth, -textureHeight, 2.0f, D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f), 1.0f, 1.0f }, 
    { textureWidth, -textureHeight, 2.0f, D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f), 0.0f, 1.0f }, 
    { -textureWidth, textureHeight, 2.0f, D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f), 1.0f, 0.0f }, 
    { textureWidth, textureHeight, 2.0f, D3DXCOLOR(1.0f, 1.0f, 1.0f, 1.0f), 0.0f, 0.0f } 
}; 

// create the vertex buffer 
D3D11_BUFFER_DESC bd; 
ZeroMemory(&bd, sizeof(bd)); 

bd.Usage = D3D11_USAGE_DYNAMIC; 
bd.ByteWidth = sizeof(VERTEX)* 4; 
bd.BindFlags = D3D11_BIND_VERTEX_BUFFER; 
bd.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; 

dev->CreateBuffer(&bd, NULL, &pVBuffer); 

// copy the vertices into the buffer 
D3D11_MAPPED_SUBRESOURCE ms; 
devcon->Map(pVBuffer, NULL, D3D11_MAP_WRITE_DISCARD, NULL, &ms); // map the buffer 
memcpy(ms.pData, OurVertices, sizeof(OurVertices));     // copy the data 
devcon->Unmap(pVBuffer, NULL); 


// create the index buffer out of DWORDs 
DWORD OurIndices[] = 
{ 
    0, 1, 2, // side 1 
    2, 1, 3, 
}; 

// create the index buffer 
bd.Usage = D3D11_USAGE_DYNAMIC; 
bd.ByteWidth = sizeof(DWORD)* 6; 
bd.BindFlags = D3D11_BIND_INDEX_BUFFER; 
bd.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; 
bd.MiscFlags = 0; 

dev->CreateBuffer(&bd, NULL, &pIBuffer); 

devcon->Map(pIBuffer, NULL, D3D11_MAP_WRITE_DISCARD, NULL, &ms); // map the buffer 
memcpy(ms.pData, OurIndices, sizeof(OurIndices));     // copy the data 
devcon->Unmap(pIBuffer, NULL); 
} 

以防萬一你需要它,這裏的初始化渲染管道。

void Direct3D::InitPipeline() 
{ 
// compile the shaders 
ID3D10Blob *VS_Primary, *PS_Primary, *VS_Distortion, *PS_Distortion; 
D3DX11CompileFromFile("vs_primary.hlsl", 0, 0, "VShader", "vs_5_0", 0, 0, 0, &VS_Primary, 0, 0); 
D3DX11CompileFromFile("ps_primary.hlsl", 0, 0, "PShader", "ps_5_0", 0, 0, 0, &PS_Primary, 0, 0); 
D3DX11CompileFromFile("vs_distortion.hlsl", 0, 0, "VShader", "vs_5_0", 0, 0, 0, &VS_Distortion, 0, 0); 
D3DX11CompileFromFile("ps_distortion.hlsl", 0, 0, "main", "ps_5_0", 0, 0, 0, &PS_Distortion, 0, 0); 

// create the shader objects 
dev->CreateVertexShader(VS_Primary->GetBufferPointer(), VS_Primary->GetBufferSize(), NULL, &pVS_Primary); 
dev->CreatePixelShader(PS_Primary->GetBufferPointer(), PS_Primary->GetBufferSize(), NULL, &pPS_Primary); 
dev->CreateVertexShader(VS_Distortion->GetBufferPointer(), VS_Distortion->GetBufferSize(), NULL, &pVS_Distortion); 
dev->CreatePixelShader(PS_Distortion->GetBufferPointer(), PS_Distortion->GetBufferSize(), NULL, &pPS_Distortion); 

// set the shader objects 
devcon->VSSetShader(pVS_Primary, 0, 0); 
devcon->PSSetShader(pPS_Primary, 0, 0); 

// create the input element object 
D3D11_INPUT_ELEMENT_DESC ied[] = 
{ 
    { "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 }, 
    { "COLOR", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 }, 
    { "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 28, D3D11_INPUT_PER_VERTEX_DATA, 0 }, 
}; 

// use the input element descriptions to create the input layout 
dev->CreateInputLayout(ied, 3, VS_Primary->GetBufferPointer(), VS_Primary->GetBufferSize(), &pLayout_Primary); 
devcon->IASetInputLayout(pLayout_Primary); 

dev->CreateInputLayout(ied, 3, VS_Distortion->GetBufferPointer(), VS_Distortion->GetBufferSize(), &pLayout_Distortion); 
devcon->IASetInputLayout(pLayout_Distortion); 
// create the constant buffer 

D3D11_BUFFER_DESC bd; 
ZeroMemory(&bd, sizeof(bd)); 

bd.Usage = D3D11_USAGE_DEFAULT; 
bd.ByteWidth = 64; 
bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER; 

dev->CreateBuffer(&bd, NULL, &pCBufferPrimaryShader); 

devcon->VSSetConstantBuffers(0, 1, &pCBufferPrimaryShader); 

ZeroMemory(&bd, sizeof(bd)); 

bd.Usage = D3D11_USAGE_DEFAULT; 
bd.ByteWidth = 48; 
bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER; 
dev->CreateBuffer(&bd, NULL, &pCBufferDistortionShader); 
} 

的Pixel Shader:

Texture2D Texture; 
SamplerState ss; 

float4 PShader(float4 color : COLOR, float2 texcoord : TEXCOORD0) : SV_TARGET 
{ 
return color * Texture.Sample(ss, texcoord); 
} 

頂點着色器:

cbuffer ConstantBuffer 
{ 
float4x4 matFinal; 
} 

struct VOut 
{ 
float4 color : COLOR; 
float2 texcoord : TEXCOORD0; 
float4 position : SV_POSITION; 
}; 

VOut VShader(float4 position : POSITION, float4 color : COLOR, float2 texcoord : TEXCOORD0) 
{ 
VOut output; 

output.position = mul(matFinal, position); 
output.color = color; 
output.texcoord = texcoord; 

return output; 
} 

回答

0

從下面的代碼,我沒有看到你是如何從RenderLeft()來RenderRight通過紋理() 。你只需將backbuffer傳遞給RenderRight()即可。

RenderLeft(pTextureLeftRenderView, matFinalLeft, viewportLeft, true); 
RenderRight(backbuffer, matFinalRight, viewportRight, false); 

所以結果是渲染到左視口的紋理和右視口只顯示backbuffer的顏色(綠色)。

+0

我想我沒有那麼清楚。 – tokomonster

+0

pRenderTextureLeftResourceView是鏈接到與pTextureLeftRenderView相同的紋理的資源視圖。它們都是Direct3D類的成員變量,所以它們不必傳遞給函數。我只添加了參數,因此當我想測試某些內容時,更改事情會更容易。 RenderLeft應呈現給pTextureLeftRenderView,然後在RenderRight中設置相同紋理的着色器資源。 RenderRight沒有使用錯誤的紋理。問題是,當後臺緩衝區不是渲染目標時,RenderLeft正在渲染到後臺緩衝區。 – tokomonster

+0

請問您能展示更多代碼嗎?特別是加載和繪製狗紋理的代碼。 – zdd