2012-04-17 75 views
2

我目前有紋理正在使用行軍四面體算法生成網格的問題。該代碼位於.fx HLSL文件中。 問題的說明可能是紋理似乎按照到照相機的定位移動。例如,如果相機左右移動,紋理也會左右移動。在左右平移相機時也會出現此問題。等值面紋理問題

這是一個奇怪的效果,而且很難描述,所以我編寫了一些圖像,紋理AddressU和AddressV設置爲鉗位,因此更容易展示問題。

http://i.imgur.com/JbyVZ.png

http://i.imgur.com/nDkB1.png

正如你可以看到,移動攝像機向右移動也將紋理的權利。

我可能錯過了一些東西完全明顯,比如通過一定的矩陣相乘(我試過)。任何幫助都將不勝感激。

這裏是我的像素着色器代碼。

float4 DiffusePS(SurfaceVertex IN) : SV_Target 

{

float4 AmbientColor = float4(0.2, 0.2, 0.2, 1); 
float AmbientIntensity = 0.2; 

float4 Kd = 0.5; 
float4 diffuseLight = 0.5; 

float4 Pos = GetWorldSpacePos(IN.Pos); 
float3 N = normalize(IN.N); 
float3 L1 = normalize(LightPos1 - Pos.xyz); 
float3 L2 = normalize(LightPos2 - Pos.xyz); 
float3 L3 = normalize(LightPos3 - Pos.xyz); 

float NdotL1 = max(0, dot(N, L1)); 
float NdotL2 = max(0, dot(N, L2)); 
float NdotL3 = max(0, dot(N, L3)); 

float3 I = normalize(Pos.xyz); 
float3 V = normalize(-Pos.xyz); 

float4 vDiff = diffuseLight * Kd * NdotL1; 
float4 vDiff2 = diffuseLight * Kd * NdotL2; 
float4 vDiff3 = diffuseLight * Kd * NdotL3; 

float3 Color = vDiff + vDiff2 + vDiff3; 
float4 derp = rockTexture.Sample(RockSampler, IN.tex.xy); 

return lerp(derp ,float4(Color, 1), 0.5); 

感謝所有幫助

編輯:

#define MAX_METABALLS 400 
#define IOR    2.5 

#define PI 3.1415 

Buffer<float4> SampleDataBuffer; 

struct SampleData 
{ 
    float4 Pos : SV_Position; 
    float4 Field : TEXCOORD0; // Gradient in .xyz, value in .w 
}; 

struct SurfaceVertex 
{ 
    float4 Pos : SV_Position; 
    float3 N : NORMAL; 
    float2 tex : TEXCOORD; 
}; 

cbuffer constants 
{ 
    float R0Constant = ((1.0 - (1.0/IOR)) * (1.0 - (1.0/IOR)))/((1.0 + (1.0/IOR)) * (1.0 + (1.0/IOR))); 
    float R0Inv = 1.0 - ((1.0 - (1.0/IOR)) * (1.0 - (1.0/IOR)))/((1.0 + (1.0/IOR)) * (1.0 + (1.0/IOR))); 
}; 


cbuffer cb0 : register(b0) 
{ 
    row_major float4x4 ProjInv; 
    row_major float3x3 ViewIT; 
    row_major float4x4 WorldViewProj; 
    row_major float4x4 World; 

    uint NumMetaballs; 
    float4 Metaballs[MAX_METABALLS]; // .xyz -> metaball center, .w -> metaball squared radius 

    float3 ViewportOrg; 
    float3 ViewportSizeInv; 

    float3 LightPos1;  // view-space light position 1 
    float3 LightPos2;  // view-space light position 2 
    float3 LightPos3;  // view-space light position 3 


}; 

Texture2D rockTexture; 

SamplerState RockSampler 
{ 
    Filter = MIN_MAG_MIP_LINEAR; 
    AddressU = Wrap; 
    AddressV = Wrap; 
}; 



float4 GetWorldSpacePos(float4 WindowPos) 
{ 
    float4 ClipPos; 
    ClipPos.x = (2 * ((WindowPos.x - ViewportOrg.x) * ViewportSizeInv.x) - 1); 
    ClipPos.y = (-2 * ((WindowPos.y - ViewportOrg.y) * ViewportSizeInv.y) + 1); 
    ClipPos.z = ((WindowPos.z - ViewportOrg.z) * ViewportSizeInv.z); 
    ClipPos.w = 1; 

    float4 Pos; 
    Pos = mul(ClipPos, ProjInv); // backtransform clipspace position to get viewspace position 
    Pos.xyz /= Pos.w;    // re-normalize 


    return Pos; 
} 


// Metaball function 
// Returns metaball function value in .w and its gradient in .xyz 
float4 Metaball(float3 Pos, float3 Center, float RadiusSq) 
{ 
    float4 o; 

    float3 d = Pos - Center; 
    float DistSq = dot(d, d); 
    float InvDistSq = 1/DistSq; 

    o.xyz = -2 * RadiusSq * InvDistSq * InvDistSq * d; 
    o.w = RadiusSq * InvDistSq; 

    return o; 
} 


SamplerState TriLinearSampler 
{ 
    Filter = MIN_MAG_MIP_LINEAR; 
    AddressU = WRAP; 
    AddressV = WRAP; 
}; 


// Vertex shader calculates field contributions at each grid vertex 
SampleData SampleFieldVS(float3 Pos : POSITION) 
{ 
    SampleData o; 

    float3 WorldPos = mul(float4(Pos, 1), World).xyz; 

    // Sum up contributions from all metaballs 

    o.Field = 0; 

    for (uint i = 0; i<NumMetaballs; i++) 
    { 
     //o.Field += WorldPos.y; 
     o.Field += Metaball(WorldPos, Metaballs[i].xyz, Metaballs[i].w); 


    } 
    // Transform position and normals 


    o.Pos = mul(float4(Pos.xyz, 1), WorldViewProj); 
    o.Field.xyz = -normalize(mul(o.Field.xyz, ViewIT)); // we want normals in view space 


    // Generate in-out flags 

    return o; 
} 



SampleData PassThroughVS(SampleData IN) 
{ 
    SampleData OUT; 
    OUT = IN; 
    return OUT; 
} 

// Estimate where isosurface intersects grid edge with endpoints v0, v1 
SurfaceVertex CalcIntersection(SampleData v0, SampleData v1) 
{ 
    SurfaceVertex o; 

    // We're taking special care to generate bit-exact results regardless of traversal (v0,v1) or (v1, v0) 

    float t = (2.0 - (v0.Field.w + v1.Field.w))/(v1.Field.w - v0.Field.w); 

    o.Pos = 0.5 * (t * (v1.Pos - v0.Pos) + (v1.Pos + v0.Pos)); 
    o.N = 0.5 * (t * (v1.Field.xyz - v0.Field.xyz) + (v1.Field.xyz + v0.Field.xyz));  

    float4 worldPos = mul(World, o.Pos); 
    o.tex = worldPos.xy; 

    return o; 
} 

// This struct stores vertex indices of up to 4 edges from the input tetrahedron. The GS code below 
// uses these indices to index into the input vertex set for interpolation along those edges. 
// It basically encodes topology for the output triangle strip (of up to 2 triangles). 
struct TetrahedronIndices 
{ 
    uint4 e0; 
    uint4 e1; 
}; 

[MaxVertexCount(4)] 
void TessellateTetrahedraGS(lineadj SampleData In[4], inout TriangleStream<SurfaceVertex> Stream) 
{ 
    // construct index for this tetrahedron 
    uint index = (uint(In[0].Field.w > 1) << 3) | (uint(In[1].Field.w > 1) << 2) | (uint(In[2].Field.w > 1) << 1) | uint(In[3].Field.w > 1); 


    // don't bother if all vertices out or all vertices in 
    if (index > 0 && index < 15) 
    { 
     uint4 e0 = EdgeTableGS[index].e0; 
     uint4 e1 = EdgeTableGS[index].e1; 

     // Emit a triangle 
     Stream.Append(CalcIntersection(In[e0.x], In[e0.y])); 
     Stream.Append(CalcIntersection(In[e0.z], In[e0.w])); 
     Stream.Append(CalcIntersection(In[e1.x], In[e1.y])); 

     // Emit additional triangle, if necessary 
     if (e1.z != 0) { 
      Stream.Append(CalcIntersection(In[e1.z], In[e1.w])); 
     } 

    } 
} 

TextureCube EnvMap; 

float FresnelApprox(float3 I, float3 N) 
{ 
    return R0Constant + R0Inv * pow(1.0 - dot(I, N), 5.0); 
} 

float4 ShadeSurfacePS(SurfaceVertex IN) : SV_Target 
{ 
    float4 Pos = GetWorldSpacePos(IN.Pos); 

    float3 N = normalize(IN.N); 
    float3 L1 = normalize(LightPos1 - Pos.xyz); 
    float3 L2 = normalize(LightPos2 - Pos.xyz); 
    float3 L3 = normalize(LightPos3 - Pos.xyz); 
    float3 I = normalize(Pos.xyz); 

    float3 R = reflect(I, N); 

    float4 Reflected = EnvMap.Sample(TriLinearSampler, mul(ViewIT, R)); 

    float NdotL1 = max(0, dot(N, L1)); 
    float NdotL2 = max(0, dot(N, L2)); 
    float NdotL3 = max(0, dot(N, L3)); 

    float3 Color = NdotL1 * float3(1, 1, 1) + pow(max(dot(R, L1), 0), 32) 
        + NdotL2 * float3(0.65, 0.6, 0.45) + pow(max(dot(R, L2), 0), 32) 
        + NdotL3 * float3(0.7, 0.7, 0.8) + pow(max(dot(R, L3), 0), 32); 

    return lerp(EnvMap.Sample(TriLinearSampler, mul(ViewIT, R)), float4(Color, 1), FresnelApprox(I, N) * 0.05); 

} 

float4 SimplePS(SurfaceVertex IN, uniform float4 color) : SV_Target 
{ 
    return color; 
} 

float4 DiffusePS(SurfaceVertex IN) : SV_Target 
{ 

    float4 AmbientColor = float4(0.2, 0.2, 0.2, 1); 
    float AmbientIntensity = 0.2; 

    float4 Kd = 0.5; 
    float4 diffuseLight = 0.5; 

    float4 Pos = GetWorldSpacePos(IN.Pos); 
    float3 N = normalize(IN.N); 
    float3 L1 = normalize(LightPos1 - Pos.xyz); 
    float3 L2 = normalize(LightPos2 - Pos.xyz); 
    float3 L3 = normalize(LightPos3 - Pos.xyz); 

    float NdotL1 = max(0, dot(N, L1)); 
    float NdotL2 = max(0, dot(N, L2)); 
    float NdotL3 = max(0, dot(N, L3)); 

    float3 I = normalize(Pos.xyz); 
    float3 V = normalize(-Pos.xyz); 

    float4 vDiff = diffuseLight * Kd * NdotL1; 
    float4 vDiff2 = diffuseLight * Kd * NdotL2; 
    float4 vDiff3 = diffuseLight * Kd * NdotL3; 

    float3 Color = vDiff + vDiff2 + vDiff3; 
    float4 derp = rockTexture.Sample(RockSampler, IN.tex.xy); 

    return lerp(derp ,float4(Color, 1), 0.5); 
    //return lerp(NoiseTexture.Sample(NoiseSampler, IN.tex), float4(Color, 1), FresnelApprox(V, N) * 0.05); 

    //return saturate(vDiff+vDiff2+vDiff3 + AmbientColor * AmbientIntensity); 


} 

DepthStencilState EnableDepthDSS 
{ 
    DepthEnable = true; 
    DepthWriteMask = 1; 
}; 

RasterizerState WireFrameRS 
{ 
    MultiSampleEnable = True; 
    CullMode = None; 
    FillMode = WireFrame; 
}; 

RasterizerState SolidRS 
{ 
    MultiSampleEnable = True; 
    CullMode = None; 
    FillMode = Solid; 
}; 


technique10 MarchingTetrahedraWireFrame 
{ 
    pass P0 
    { 
     SetRasterizerState(WireFrameRS); 
     SetDepthStencilState(EnableDepthDSS, 0); 

     SetVertexShader(CompileShader(vs_4_0, SampleFieldVS())); 
     SetGeometryShader(CompileShader(gs_4_0, TessellateTetrahedraGS())); 
     SetPixelShader(CompileShader(ps_4_0, SimplePS(float4(0.7, 0.7, 0.7, 1)))); 
    } 
} 

// Tessellate isosurface in a single pass 
technique10 MarchingTetrahedraSinglePassGS 
{ 
    pass P0 
    { 
     SetRasterizerState(SolidRS); 
     SetDepthStencilState(EnableDepthDSS, 0); 

     SetVertexShader(CompileShader(vs_4_0, SampleFieldVS())); 
     SetGeometryShader(CompileShader(gs_4_0, TessellateTetrahedraGS())); 
     SetPixelShader(CompileShader(ps_4_0, DiffusePS())); 
    } 
} 

// Tessellate isosurface in two passes, streaming out VS results in-between 
GeometryShader StreamOutGS = ConstructGSWithSO(CompileShader(vs_4_0, PassThroughVS()), "SV_Position.xyzw; TEXCOORD0.xyzw"); 

technique10 MarchingTetrahedraMultiPassGS 
{ 
    pass P0 
    { 
     SetVertexShader(CompileShader(vs_4_0, SampleFieldVS())); 
     SetGeometryShader(StreamOutGS); 
     SetPixelShader(NULL); 
    } 

    pass P1 
    { 

     SetRasterizerState(SolidRS); 
     SetDepthStencilState(EnableDepthDSS, 0); 

     SetVertexShader(CompileShader (vs_4_0, PassThroughVS())); 
     SetGeometryShader(CompileShader(gs_4_0, TessellateTetrahedraGS())); 
     SetPixelShader(CompileShader(ps_4_0, DiffusePS())); 
    } 
} 
+0

請顯示其餘部分。FX文件 – CarlJohnson 2012-04-17 23:20:45

+0

我已經添加了其餘的代碼。 – programmersblock 2012-04-18 15:37:55

+0

我看到您使用某種方形紋理對其進行紋理化處理,使用方框投影紋理和體積紋理對紋理進行紋理化處理,如果您可以獲得足夠有趣和足夠多變的公式,基本上相當不錯,雖然寫起來有點困難,但需要一整天。 – 2013-12-04 00:04:19

回答

0

紋理座標目前由頂點位置乘以所產生的的的.fx文件的休息由世界矩陣:

CalcIntersection() 
.... 
    float4 worldPos = mul(World, o.Pos); 
    o.tex = worldPos.xy; 
.... 

所以,相機平移,必須改變其中一個輸入。猜測它可能是世界矩陣(即相機平移實際上是被移動的對象),請嘗試切換到不依賴於世界矩陣的紋理座標生成方法。例如:

CalcIntersection() 
... 
    o.tex = o.Pos.xy; 
... 


編輯:由於沒有工作,它必須是由相機平移被修改的位置,並在SampleFieldVS尋找()存在,這似乎是做一個線:

o.Pos = mul(float4(Pos.xyz, 1), WorldViewProj); 

所以,你需要做的是在修改前保存位置。你必須將它保存在的sampleData,所以在sampleData在結構的最後補充一點:

float2 tex1 : TEXCOORD1; 

然後在SampleFieldVS()結束

o.tex1 = mul(float4(Pos.xyz, 0), World).xy; 

最後加儲蓄線刪除'float4 worldPos = mul(World,o.Pos); o.tex = worldPos.xy;在CalcIntersection()的末尾加上「」並替換爲:

o.tex = 0.5 * (t * (v1.tex1 - v0.tex1) + (v1.tex1 + v0.tex1)); 
+0

謝謝,但不幸的是,這並沒有解決它。 我還想補充說,旋轉cameraview左右也會移動紋理的位置。這就像紋理始終集中在屏幕上,如果這是有道理的。 我沒有從.cpp文件傳遞任何紋理座標,我純粹在着色器中生成它們,這是正確的嗎? – programmersblock 2012-04-19 00:01:05

+0

好的,我已經編輯了答案 – CarlJohnson 2012-04-19 08:26:05

+0

非常感謝,這已經解決了單程問題。然而,多通道仍然不正確,這是因爲StreamOutGS? – programmersblock 2012-04-19 21:08:11