zoukankan      html  css  js  c++  java
  • Unity Shader 基础(4) 由深度纹理重建坐标

    在PostImage中经常会用到物体本身的位置信息,但是Image Effect自身是不包含这些信息的,因为屏幕后处其实是使用特定的材质渲染一个刚好填满屏幕的四边形面片(四个角对应近剪裁面的四个角)。这篇文章主要介绍几种在Image Effct shader中还原世界坐标的方式。这个问题在《Shader入门精要》中也做了描述,这里可能偏重于个人的一些疑惑。

    这篇文章相关的两外两篇文章:
    Unity Shader 基础(3) 获取深度纹理
    Unity Shader 基础(2) Image Effect

    1. View-Projection 逆矩阵

    虽然在Image Effect中没有实际的顶点信息,但是带有纹理uv'坐标信息以及获得深度信息,根据UV信息可以得知NDC下xy坐标,即:

    [x_{ndc} = 2 * uv.x -1 \ y_{ndc} = 2 * uv.y - 1 ]

    在加上通过深度纹理或者深度法线纹理,可以获得NDC下深度信息,从而可以计算出世界坐标。

    [P_world = {M_{view}}^{-1} * {M_{projection}}^{-1} * P_{ndc} ]

    详情可可以参考:Unity Answer:Reconstructing world pos from depth 以及GPU Gem3 :Chapter 27. Motion Blur as a Post-Processing Effect
    Pixel shader代码*,逆矩阵需要从C#中传递过来:

    fixed4 frag(uoutput o) : COLOR
      {
     		fixed4 col = tex2D(_MainTex, o.uv);
    		float depth = UNITY_SAMPLE_DEPTH(tex2D(_CameraDepthTexture, o.uv_depth));
    		float4 ndcPos = float4(o.uv.x* 2 - 1  ,o.uv.y * 2 - 1 ,depth , 1);
    		
    		//_Matrix_vp_inverse外部传递,具体为:
    		//Matrix4x4 temp = mCam.projectionMatrix * mCam.worldToCameraMatrix;
    		//temp = temp.inverse;
    		//mMat.SetMatrix("_Matrix_vp_inverse", temp);
    		
    		float4 worldHPos = mul(_Matrix_vp_inverse,ndcPos);
    		float4 worldPos  = worldHPos / worldHPos.w;
    		float dis = length(worldPos.xyz);
    		float3 worldPos2 = worldPos.xyz/dis;
    		worldPos2 = worldPos2 * 0.5 + 0.5;
    		return fixed4(worldPos2,1);
      }
    

    mark

    2 远剪裁面插值

    理解一下两点

    1. Image Effect是Post Processing 的一种方式,大致过程就是把Color Buffer的输出当做纹理,然后采用特性的材质渲染和屏大小一样的四角形面片(面片的四角即近剪裁面的四个角)。
    2. Vertex shader输出的数据到Pixel shader输入,经过光栅化会进行插值。远剪裁面的四条射线在Pixel shader后是经过插值的,如下图:
      mark
      基于上面两点: 对从摄像机原点出发,经剪裁面的摄像机射线进行插值获得每个位置的摄像机视线方向信息,再已知深度信息的情况下即可 获得摄像机位置 。

    取其中一个射线:
    mark
    根据图中比例关系:

    [frac{YellowL}{YellowL + GreenL} = frac{vec{BlackV}}{vec{BlackV} + vec{BlueV}} ]

    因为(YellowL + GreenL = 1) , 所以:

    [vec{BlackV} = YellowL * ({vec{BlackV} + vec{BlueV}} ) ]

    其中YellowL为DepthMap中01空间数值,${vec{BlackV} + vec{BlueV}} (为远剪裁面四角向量插值后向量)vec{interpolatedRay}$。

    [wPos = camWPos + vec{BlackV} = camWPos + YellowL * vec{interpolatedRay} ]

    实现过程中:Vertex Shader中计算射线向量,Pixel shader中插值计算结果

    struct uoutput
    {
        float4 pos : SV_POSITION;
        half2 uv : TEXCOORD0;
        float4 uv_depth : TEXCOORD1;
    float4 interpolatedRay : TEXCOORD2;
        float3 cameraToFarPlane : TEXCOORD3;
    
    };
    
      uoutput far_ray_vert(uinput i)
      {
      	uoutput o;
          o.pos = mul(UNITY_MATRIX_MVP, i.pos);
          //o.uv = MultiplyUV(UNITY_MATRIX_TEXTURE0, i.uv);
          o.uv = i.uv ;
          o.uv_depth.xy = o.uv ;
    	#if UNITY_UV_STARTS_AT_TOP
    	if (_MainTex_TexelSize.y < 0)
    	o.uv_depth.y = 1 - o.uv_depth.y;
    	#endif
    
    	 //  计算远剪裁面每个角相对摄像机的向量
          // Clip space X and Y coords
          float2 clipXY = o.pos.xy / o.pos.w;
                 
          // Position of the far plane in clip space
          float4 farPlaneClip = float4(clipXY, 1, 1);
                 
          // Homogeneous world position on the far plane
          farPlaneClip *= float4(1,_ProjectionParams.x,1,1);   
          float4 farPlaneWorld4 = mul(_ClipToWorld, farPlaneClip);
                 
          // World position on the far plane  ?????
          float3 farPlaneWorld = farPlaneWorld4.xyz / farPlaneWorld4.w;
                 
          // Vector from the camera to the far plane
          o.cameraToFarPlane = farPlaneWorld - _WorldSpaceCameraPos;
          
          return o;
    }
    
    
       fixed4 far_ray_frag(uoutput o) : COLOR
       {
         float linearDepth = Linear01Depth(SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, o.uv_depth));
    
         float3 worldPos = _WorldSpaceCameraPos + linearDepth * o.cameraToFarPlane;
    
    	//颜色输出
         float dis = length(worldPos.xyz);
         float3 worldPos2 = worldPos.xyz/dis;
         worldPos2 = worldPos2 * 0.5 + 0.5;
         return fixed4(worldPos2,1);
       }
    

    C#代码:

    mMat.SetMatrix("_ClipToWorld", (mCam.cameraToWorldMatrix * mCam.projectionMatrix).inverse);
    

    mark

    3. 近剪裁面射线插值

    原理上上面类似,实际推导过程,可以参考《Shader 入门精要》或者这里, 代码下载:下载

    计算近近剪裁面四个角相对摄像机向量

    	Matrix4x4 GetFrustumCorners()
    	{
    		Matrix4x4 frustumCorners = Matrix4x4.identity;
    		Camera camera = mCam;
    		Transform cameraTransform = mCam.gameObject.transform;
    
    		float fov = camera.fieldOfView;
    		float near = camera.nearClipPlane;
    		float aspect = camera.aspect;
    
    		float halfHeight = near * Mathf.Tan(fov * 0.5f * Mathf.Deg2Rad);
    		Vector3 toRight = cameraTransform.right * halfHeight * aspect;
    		Vector3 toTop = cameraTransform.up * halfHeight;
    
    		Vector3 topLeft = cameraTransform.forward * near + toTop - toRight;
    		float scale = topLeft.magnitude / near;
    
    		topLeft.Normalize();
    		topLeft *= scale;
    
    		Vector3 topRight = cameraTransform.forward * near + toRight + toTop;
    		topRight.Normalize();
    		topRight *= scale;
    
    		Vector3 bottomLeft = cameraTransform.forward * near - toTop - toRight;
    		bottomLeft.Normalize();
    		bottomLeft *= scale;
    
    		Vector3 bottomRight = cameraTransform.forward * near + toRight - toTop;
    		bottomRight.Normalize();
    		bottomRight *= scale;
    
    		frustumCorners.SetRow(0, bottomLeft);
    		frustumCorners.SetRow(1, bottomRight);
    		frustumCorners.SetRow(2, topRight);
    		frustumCorners.SetRow(3, topLeft);
    
    		return frustumCorners;
    	}
    	//设置
    	mMat.SetMatrix("_FrustumCornersWS", GetFrustumCorners());
    

    Shader

     struct uinput
     {
         float4 pos : POSITION;
         half2 uv : TEXCOORD0;
     };
    
     struct uoutput
     {
         float4 pos : SV_POSITION;
         half2 uv : TEXCOORD0;
         float4 uv_depth : TEXCOORD1;
    	 float4 interpolatedRay : TEXCOORD2;
         float4 cameraToFarPlane : TEXCOORD3;
     };
                
    uoutput near_ray_vert(uinput i)
    {
        uoutput o;
        o.pos = mul(UNITY_MATRIX_MVP, i.pos);
        o.uv = MultiplyUV(UNITY_MATRIX_TEXTURE0, i.uv);
        o.uv = i.uv ;
        o.uv_depth.xy = o.uv ;
    	#if UNITY_UV_STARTS_AT_TOP
    	if (_MainTex_TexelSize.y < 0)
    	o.uv_depth.y = 1 - o.uv_depth.y;
    	#endif
    	
    	int index = 0;
    	if (i.uv.x < 0.5 && i.uv.y < 0.5) 
    	{
    	index = 0;
    	} 
    	else if (i.uv.x > 0.5 && i.uv.y < 0.5) 
    	{
    	index = 1;
    	} 
    	else if (i.uv.x > 0.5 && i.uv.y > 0.5) 
    	{
    	index = 2;
    	} 
    	else 
    	{
    	index = 3;
    	}
    	o.interpolatedRay = _FrustumCornersWS[(int)index];
        return o;
    }
    fixed4 near_ray_frag(uoutput o) : COLOR
    {
    	float linearDepth = LinearEyeDepth(SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, o.uv_depth));
    	float3 worldPos = _WorldSpaceCameraPos + linearDepth * o.interpolatedRay.xyz;
    	return WorldPosTo01(worldPos);     float dis = length(worldPos.xyz);
         float3 worldPos2 = worldPos.xyz/dis;
         worldPos2 = worldPos2 * 0.5 + 0.5;
         return fixed4(worldPos2,1);
     }
    

    mark

    4. 小结

    上面的推到过程,参考到一些<shader入门精要>以及Jim的博客,在Unity提供的Effect(Global Fog以及Emotion Blur)中也有使用类似的方式。对Global Fog中插值使用方式还挺有点不甚理解,使用pos.z作为射线索引,没弄明白这个Z为啥可以作为索引。

    文章源码测试源码下载:http://pan.baidu.com/s/1c2rHVf6

  • 相关阅读:
    Python paramiko模块
    前端基础:JavaScript介绍
    前端基础:CSS属性操作
    前端基础:CSS样式选择器
    前端基础:HTML标签(下)
    20181120-10 Beta阶段第2周/共2周 Scrum立会报告+燃尽图 7
    beta阶段贡献分配实施
    Beta发布—美工+文案
    20181120-8 Beta阶段第2周/共2周 Scrum立会报告+燃尽图 06
    20181120-8 Beta阶段第2周/共2周 Scrum立会报告+燃尽图 05
  • 原文地址:https://www.cnblogs.com/zsb517/p/6665053.html
Copyright © 2011-2022 走看看