Introduction to my galaxy engine 2: Depth of field

参考shaderx2 中DOF via Simulation of Circle of Confusion 算法 or http://amdstaging.wpengine.com/wordpress/media/2012/10/Scheuermann_DepthOfField.pdf,引擎中增加了depth of field 功能模块。基本原理就是根据物体与聚焦平面的距离来设定模糊kernel的大小,对远离聚焦平面的物体做模糊处理。

视频地址:http://my.ku6.com/watch?v=c-wjiz_TdM2S7MkC_MNwpw..#

1.首先将场景中物体的颜色渲染到一个render target上,颜色计算公式可以自己定,以下是我用的方法:

output.Color = saturate( dot(input.Norm, normalize(SunLightDir)) * vLightColor + diffuse + vAmbiantColor );

2.再将场景深度,模糊值blur分别存到另一个render target的R值和G值

深度模糊值计算方法如下:

float blur = saturate(abs(input.Depth - focalDist) / focalRange);

input.Depth为物体深度,focalDist为聚焦平面的值,focalRange为聚焦范围,blue这样计算的话就在0-1范围内。

output.Depth = float4(depth, blur, 0, 1);

下面的话就是按照以上两个信息做模糊处理

首先建立一个屏幕大小的平面,有四个顶点,作为画布传入vertex shader

从第一个贴图中获取物体颜色

float4 colorSum = g_SceneTexture.Sample( g_samClamp, input.Tex );

然后从另一张贴图去除深度值和模糊值

float2 centerDepthBlur = g_DepthTexture.Sample( g_samClamp,  input.Tex );

依据模糊值确定kernel大小

float sizeCoC = centerDepthBlur.y * g_MaxCoC;

下面是关键:

for (int i = 0; i < 12; i++)//kernel中原先有的12个采样点
{
// Compute tap coordinates
float2 tapCoord = input.Tex + g_FilterTaps[i] * sizeCoC;计算采样点的纹理坐标

// Fetch tap sample
float4 tapColor = g_SceneTexture.Sample(g_samClamp, tapCoord); 采样点所在位置的颜色值
float2 tapDepthBlur = g_DepthTexture.Sample(g_samClamp, tapCoord);采样点所在位置的深度和模糊因素

// Compute tap contribution
float tapContribution = (tapDepthBlur.x > centerDepthBlur.x) ? 1.0f : tapDepthBlur.y;防止聚焦平面后面的物体采样到聚焦平面的物体的颜色值

// Accumulate color and contribution
colorSum += tapColor * tapContribution;
totalContribution += tapContribution;
}

// Normalize to get proper luminance
finalColor = colorSum / totalContribution;

以下是截图,分别是没有DOF效果图,深度图,模糊因素图和有DOF的效果图

以下是所有shader代码

  1 SamplerState g_samWrap
  2 {
  3     Filter = MIN_MAG_MIP_LINEAR;
  4     AddressU = Wrap;
  5     AddressV = Wrap;
  6 };
  7 
  8 SamplerState g_samClamp
  9 {
 10     Filter = MIN_MAG_MIP_LINEAR;
 11     AddressU = Clamp;
 12     AddressV = Clamp;
 13 };
 14 
 15 BlendState NoBlending
 16 {
 17     AlphaToCoverageEnable = FALSE;
 18     BlendEnable[0] = FALSE;
 19 };
 20 
 21 BlendState AlphaBlendingOn
 22 {
 23     BlendEnable[0] = TRUE;
 24     SrcBlend = SRC_ALPHA;
 25     DestBlend = INV_SRC_ALPHA;
 26 };
 27 
 28 matrix World;
 29 matrix View;
 30 matrix Projection;
 31 matrix LightView;
 32 float3  SunLightDir;
 33 
 34 float4 vLightColor = float4(0.3f, 0.3f, 0.3f, 1.0f);
 35 float4 vAmbiantColor = float4(0.1f, 0.1f, 0.1f, 1.0f);
 36 
 37 float focalDist = 1.0;
 38 float focalRange = 1.0;
 39 
 40 Texture2D g_SceneTexture;
 41 Texture2D g_DepthTexture;
 42 Texture2D g_ModelTexture;
 43 
 44 float2 g_FilterTaps[12];
 45 
 46 float g_MaxCoC = 3;
 47 
 48 struct VS_MODEL_INPUT
 49 {
 50     float4 Pos            : POSITION;         
 51     float2 Tex            : TEXCOORD0;     
 52     float3 Norm        : NORMAL;       
 53 };
 54 
 55 struct PS_MODEL_INPUT
 56 {
 57     float4 Pos            :SV_POSITION;   
 58     float2 Tex            : TEXCOORD0;     
 59     float3 Norm        : TEXCOORD1;    
 60     float   Depth        : TEXCOORD2;   
 61 };
 62 
 63 struct PS_MODEL_OUTPUT
 64 {
 65     float4 Color    : SV_Target1;
 66     float4 Depth    : SV_Target0;
 67 };
 68 
 69 struct VS_DOF_INPUT
 70 {
 71     float4 Pos            : POSITION;         
 72     float2 Tex            : TEXCOORD0;       
 73 };
 74 
 75 struct PS_DOF_INPUT
 76 {
 77     float4 Pos            :SV_POSITION;   
 78     float2 Tex            : TEXCOORD0;     
 79 };
 80 
 81 PS_MODEL_INPUT VS_SCENE(VS_MODEL_INPUT input)
 82 {
 83     PS_MODEL_INPUT output = (PS_MODEL_INPUT)0;
 84     output.Pos = input.Pos;
 85     output.Pos.w = 1;
 86 
 87     output.Pos = mul( output.Pos, World );
 88     output.Pos = mul( output.Pos, View );
 89     output.Pos = mul( output.Pos, Projection );
 90     output.Depth = output.Pos.z;
 91     output.Pos = output.Pos / output.Pos.w;
 92 
 93     output.Norm = mul( input.Norm, World );
 94     output.Norm = normalize( output.Norm );
 95 
 96     output.Tex = input.Tex;
 97 
 98     return output;
 99 }
100 
101 PS_MODEL_OUTPUT PS_SCENE( PS_MODEL_INPUT input ) : SV_Target
102 {
103     PS_MODEL_OUTPUT output = (PS_MODEL_OUTPUT)0;
104 
105     float4 diffuse = g_ModelTexture.Sample( g_samClamp, input.Tex );
106     output.Color = saturate( dot(input.Norm, normalize(SunLightDir)) * vLightColor + diffuse + vAmbiantColor );
107 
108     float blur = saturate(abs(input.Depth - focalDist) / focalRange);
109     float depth = input.Pos.z;
110     output.Depth = float4(depth, blur, 0, 1);
111 
112     return output;
113 }
114 
115 PS_DOF_INPUT VS_DOF(VS_DOF_INPUT input)
116 {
117     PS_DOF_INPUT output = (PS_DOF_INPUT)0;
118     output.Pos = input.Pos;
119 
120     output.Tex = float2( input.Tex.x,  1 - input.Tex.y );
121 
122     return output;
123 }
124 
125 float4 PS_DOF(PS_DOF_INPUT input) : SV_Target
126 {
127     float4 finalColor = 0;
128 
129      float4 colorSum = g_SceneTexture.Sample( g_samClamp, input.Tex );
130 
131     float2 centerDepthBlur = g_DepthTexture.Sample( g_samClamp,  input.Tex );
132 
133     //return centerDepthBlur.y;
134 
135     float sizeCoC = centerDepthBlur.y * g_MaxCoC;//blur * maxCoC
136 
137     //return sizeCoC;
138     float totalContribution = 1.0f;
139 
140     for (int i = 0; i < 12; i++)
141     {
142         // Compute tap coordinates
143         float2 tapCoord = input.Tex + g_FilterTaps[i] * sizeCoC;
144         
145         // Fetch tap sample
146         float4 tapColor = g_SceneTexture.Sample(g_samClamp, tapCoord); 
147         float2 tapDepthBlur = g_DepthTexture.Sample(g_samClamp, tapCoord);
148 
149         // Compute tap contribution
150         //for obj in front, use tapDepthBlur.y, and if obj is in focus, tapDepthBlur.y is 0, so no leak of front obj color; for back obj, use 1.0f
151         float tapContribution = (tapDepthBlur.x > centerDepthBlur.x) ? 1.0f : tapDepthBlur.y;
152 
153         // Accumulate color and contribution
154         colorSum += tapColor * tapContribution;
155         totalContribution += tapContribution;
156     }
157 
158     // Normalize to get proper luminance
159     finalColor = colorSum / totalContribution;
160 
161     return finalColor;
162 }
163 
164 technique10 DoF
165 {
166     pass p0
167     {
168         SetVertexShader( CompileShader( vs_4_0, VS_SCENE() ) );
169         SetGeometryShader( NULL );
170         SetPixelShader( CompileShader( ps_4_0, PS_SCENE() ) );
171 
172         SetBlendState( NoBlending, float4( 0.0f, 0.0f, 0.0f, 0.0f ), 0xFFFFFFFF );
173     }  
174 
175    pass p1
176     {
177         SetVertexShader( CompileShader( vs_4_0, VS_DOF() ) );
178         SetGeometryShader( NULL );
179         SetPixelShader( CompileShader( ps_4_0, PS_DOF() ) );
180 
181         SetBlendState( NoBlending, float4( 0.0f, 0.0f, 0.0f, 0.0f ), 0xFFFFFFFF );
182     }  
183 }
原文地址:https://www.cnblogs.com/RobinG/p/2514465.html