zoukankan      html  css  js  c++  java
  • (转) 创建凹凸纹理

    【1】什么事凹凸映射
    凸凹映射这个过程涉及到提取纹理并使用该纹理的内部信息去照射表面,让表面显示比自身实际拥有的更多细节。这个过程是在照射几何图形时,通过使用保存在纹理图像中的法线值而不是表面的法线值完成的。通过改变图像中的法线值或像素,可以得到物体的外观细节。例如,可以使用凸凹映射创建水泥裂缝或是爬虫表皮的鳞。纹理映射和凸凹映射之间的差别在于纹理用于阴影照射表面,而凸凹贴图用于灯光照射表面。例如,可以使用凸凹映射创建水泥裂缝或是爬虫表皮的鳞。纹理映射和凸凹映射之间的差别在于纹理用于阴影照射表面,而凸凹贴图用于灯光照射表面。
    【注意】
    不要混淆了法线映射和凸凹映射这两种技术。虽然这两种技术都使用一种名为“凸凹贴图”或“法线贴图”的图像,但它们还是存有差别。差别在于图像的创建方式。凸凹映射技术使用的图像创建于灰度(从黑到白)2D纹理图像,有时候这个图像也称为“高度图”。法线映射技术使用的图像创建于高分辨率模型(角色、物体等)中的细节,并将其保存到纹理中,这样在凸凹映射中使用时,低分辨率模型看上去就像是或是类似于高分辨率模型。这两个主题都已经超出了本书的讨论范围,但要注意的是法线映射是一种凸凹映射,它用于使低分辨率模型看上去像是高分辨率模型,而凸凹映射提取所有的2D图像,并从中创建法线。
    有了凸凹映射,就可以使用图像提取数据,并将其用于光照,而不是用纹理渲染表面。
    【2】创建凸凹贴图
    有时候凸凹贴图也指的是法线贴图,使用几个工具可以完成创建工作。首先,可以选择诸如Adobe公司的PhotoshopCS软件,在此可以使用一个插件将图像转换成凸凹贴图。第二种办法是使用Direct3D函数D3DXComputeNormalMap()。第三种办法是在编写的代码中计算法线贴图。对第一种方法而言,需要对喜欢的图像编辑软件根据插件进行检查,看是否可用。诸如带有NVIDIA公司法线贴图插件工具的Adobe公司的PhotoshopCS。
    使用Direct3D创建
    HRESULT WINAPI D3DXComputeNormalMap(
    LPDIRECT3DTEXTURE9 pTexture,
    LPDIRECT3DTEXTURE9 pSrcTexture,
    CONST PALETTEENTRY * pSrcPalette,
    DWORD Flags,
    DWORD Channel,
    FLOAT Amplitude
    );   
    D3DXComputeNormalMap()函数原型非常简单,参数pTexture是保存法线贴图的Direct3D纹理对象。参数pSrcTexture是想要转换成法线贴图的原始图像。参数pSrcPalette是原始资源纹理的调色板,参数Amplitude是Normal Map的增加值或减少值。也许不需要使用这些调色板,但至少要知道这些选项可供选择。

         D3DXComputeNormalMap()函数可用作标识符的参数值表

    D3DX_NORMAL_MIRROR_U  
    D3DX_NORMAL_MIRROR_V  
    D3DX_NORMAL_MIRROR  
    D3DX_NORMAL_INVERTSIGN  
    D3DX_NORMAL_COMPUTE_OCCLUSION  

           D3DXComputeNormalMap()函数的Channel参数值列表

    D3DX_CHANNEL_RED 计算法线贴图时使用红色通道
    D3DX_CHANNEL_GREEN 计算法线贴图时使用绿色通道
    D3DX_CHANNEL_BLUE 计算法线贴图时使用蓝色通道
    D3DX_CHANNEL_ALPHA 计算法线贴图时使用alpha通道
    D3DX_CHANNEL_LUMINANCE 计算法线贴图时使用红色、绿色和蓝色通道的亮度值


    【3】示例程序
    /*
    Demo Name:  D3D Normal Maps
    Author:  Allen Sherrod
    Chapter:  Ch 4
    */


    #include<d3d9.h>
    #include<d3dx9.h>

    #define WINDOW_CLASS    "UGPDX"
    #define WINDOW_NAME     "Creating D3D Normal Maps"
    #define WINDOW_WIDTH    640
    #define WINDOW_HEIGHT   480

    // Function Prototypes...
    bool InitializeD3D(HWND hWnd, bool fullscreen);
    bool InitializeObjects();
    void RenderScene();
    void Shutdown();


    // Direct3D object and device.
    LPDIRECT3D9 g_D3D = NULL;
    LPDIRECT3DDEVICE9 g_D3DDevice = NULL;

    // Matrices.
    D3DXMATRIX g_projection;
    D3DXMATRIX g_ViewMatrix;

    // Vertex buffer to hold the geometry.
    LPDIRECT3DVERTEXBUFFER9 g_VertexBuffer = NULL;

    // Holds a texture image.
    LPDIRECT3DTEXTURE9 g_Texture = NULL, g_NormalMap = NULL;

    // A structure for our custom vertex type
    struct stD3DVertex
    {
    float x, y, z;
    unsigned long color;
    float tu, tv;
    };

    // Our custom FVF, which describes our custom vertex structure
    #define D3DFVF_VERTEX (D3DFVF_XYZ | D3DFVF_DIFFUSE | D3DFVF_TEX1)


    LRESULT WINAPI MsgProc(HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam)
    {
    switch(msg)
    {
    case WM_DESTROY:
    PostQuitMessage(0);
    return 0;
    break;

    case WM_KEYUP:
    if(wParam == VK_ESCAPE) PostQuitMessage(0);
    break;
    }

    return DefWindowProc(hWnd, msg, wParam, lParam);
    }


    int WINAPI WinMain(HINSTANCE hInst, HINSTANCE prevhInst, LPSTR cmdLine, int show)
    {
    // Register the window class
    WNDCLASSEX wc = { sizeof(WNDCLASSEX), CS_CLASSDC, MsgProc, 0L, 0L,
    GetModuleHandle(NULL), NULL, NULL, NULL, NULL,
    WINDOW_CLASS, NULL };
    RegisterClassEx(&wc);

    // Create the application's window
    HWND hWnd = CreateWindow(WINDOW_CLASS, WINDOW_NAME, WS_OVERLAPPEDWINDOW,
    100, 100, WINDOW_WIDTH, WINDOW_HEIGHT,
    GetDesktopWindow(), NULL, wc.hInstance, NULL);

    // Initialize Direct3D
    if(InitializeD3D(hWnd, false))
    {
    // Show the window
    ShowWindow(hWnd, SW_SHOWDEFAULT);
    UpdateWindow(hWnd);

    // Enter the message loop
    MSG msg;
    ZeroMemory(&msg, sizeof(msg));

    while(msg.message != WM_QUIT)
    {
    if(PeekMessage(&msg, NULL, 0U, 0U, PM_REMOVE))
    {
    TranslateMessage(&msg);
    DispatchMessage(&msg);
    }
    else
    RenderScene();
    }
    }

    // Release any and all resources.
    Shutdown();

    // Unregister our window.
    UnregisterClass(WINDOW_CLASS, wc.hInstance);
    return 0;
    }


    bool InitializeD3D(HWND hWnd, bool fullscreen)
    {
    D3DDISPLAYMODE displayMode;

    // Create the D3D object.
    g_D3D = Direct3DCreate9(D3D_SDK_VERSION);
    if(g_D3D == NULL) return false;

    // Get the desktop display mode.
    if(FAILED(g_D3D->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &displayMode)))
    return false;

    // Set up the structure used to create the D3DDevice
    D3DPRESENT_PARAMETERS d3dpp;
    ZeroMemory(&d3dpp, sizeof(d3dpp));

    if(fullscreen)
    {
    d3dpp.Windowed = FALSE;
    d3dpp.BackBufferWidth = WINDOW_WIDTH;
    d3dpp.BackBufferHeight = WINDOW_HEIGHT;
    }
    else
    d3dpp.Windowed = TRUE;
    d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
    d3dpp.BackBufferFormat = displayMode.Format;

    // Create the D3DDevice
    if(FAILED(g_D3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, hWnd,
    D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &g_D3DDevice)))
    {
    return false;
    }

    // Initialize any objects we will be displaying.
    if(!InitializeObjects()) return false;

    return true;
    }


    bool InitializeObjects()
    {
    // Fill in our structure to draw an object.
    // x, y, z, color, texture coords.
    stD3DVertex objData[] =
    {
    {-0.3f, -0.4f, 0.0f, D3DCOLOR_XRGB(255,255,255), 0.0f, 1.0f},
    {0.3f, -0.4f, 0.0f, D3DCOLOR_XRGB(255,255,255), 1.0f, 1.0f},
    {0.3f, 0.4f, 0.0f, D3DCOLOR_XRGB(255,255,255), 1.0f, 0.0f},

    {0.3f, 0.4f, 0.0f, D3DCOLOR_XRGB(255,255,255), 1.0f, 0.0f},
    {-0.3f, 0.4f, 0.0f, D3DCOLOR_XRGB(255,255,255), 0.0f, 0.0f},
    {-0.3f, -0.4f, 0.0f, D3DCOLOR_XRGB(255,255,255), 0.0f, 1.0f}
    };

    // Create the vertex buffer.
    if(FAILED(g_D3DDevice->CreateVertexBuffer(sizeof(objData), 0,
    D3DFVF_VERTEX, D3DPOOL_DEFAULT, &g_VertexBuffer, NULL))) return false;

    // Fill the vertex buffer.
    void *ptr;
    if(FAILED(g_VertexBuffer->Lock(0, sizeof(objData), (void**)&ptr, 0))) return false;
    memcpy(ptr, objData, sizeof(objData));
    g_VertexBuffer->Unlock();

    // Load the texture image from file.
    if(D3DXCreateTextureFromFile(g_D3DDevice, "heightMap.tga", &g_Texture) != D3D_OK)
    return false;

    D3DSURFACE_DESC desc;
    g_Texture->GetLevelDesc(0,&desc);

    // Create normal map texture the size of the original.
    if(D3DXCreateTexture(g_D3DDevice, desc.Width, desc.Height, 0, 0, D3DFMT_A8R8G8B8,
    D3DPOOL_MANAGED, &g_NormalMap) != D3D_OK) return false;


    // Compute the normal map.
    if(D3DXComputeNormalMap(g_NormalMap, g_Texture, 0, D3DX_NORMALMAP_MIRROR,
    D3DX_CHANNEL_GREEN, 10) != D3D_OK) return false;

    // Set the image states to get a good quality image.
    g_D3DDevice->SetSamplerState(0, D3DSAMP_MINFILTER, D3DTEXF_LINEAR);
    g_D3DDevice->SetSamplerState(0, D3DSAMP_MAGFILTER, D3DTEXF_LINEAR);



    // Set default rendering states.
    g_D3DDevice->SetRenderState(D3DRS_LIGHTING, FALSE);
    g_D3DDevice->SetRenderState(D3DRS_CULLMODE, D3DCULL_NONE);


    // Set the projection matrix.
    D3DXMatrixPerspectiveFovLH(&g_projection, 45.0f, WINDOW_WIDTH/WINDOW_HEIGHT,
    0.1f, 1000.0f);
    g_D3DDevice->SetTransform(D3DTS_PROJECTION, &g_projection);

    // Define camera information.
    D3DXVECTOR3 cameraPos(0.0f, 0.0f, -1.0f);
    D3DXVECTOR3 lookAtPos(0.0f, 0.0f, 0.0f);
    D3DXVECTOR3 upDir(0.0f, 1.0f, 0.0f);

    // Build view matrix.
    D3DXMatrixLookAtLH(&g_ViewMatrix, &cameraPos, &lookAtPos, &upDir);

    return true;
    }


    void RenderScene()
    {
    // Clear the backbuffer.
    g_D3DDevice->Clear(0, NULL, D3DCLEAR_TARGET,
    D3DCOLOR_XRGB(0,0,0), 1.0f, 0);

    // Begin the scene.  Start rendering.
    g_D3DDevice->BeginScene();

    // Apply the view (camera).
    g_D3DDevice->SetTransform(D3DTS_VIEW, &g_ViewMatrix);

    // Draw square.
    g_D3DDevice->SetTexture(0, g_NormalMap);
    g_D3DDevice->SetStreamSource(0, g_VertexBuffer,
    0, sizeof(stD3DVertex));
    g_D3DDevice->SetFVF(D3DFVF_VERTEX);
    g_D3DDevice->DrawPrimitive(D3DPT_TRIANGLELIST, 0, 2);

    // End the scene.  Stop rendering.
    g_D3DDevice->EndScene();

    // Display the scene.
    g_D3DDevice->Present(NULL, NULL, NULL, NULL);
    }


    void Shutdown()
    {
    if(g_D3DDevice != NULL) g_D3DDevice->Release();
    g_D3DDevice = NULL;

    if(g_D3D != NULL) g_D3D->Release();
    g_D3D = NULL;

    if(g_VertexBuffer != NULL) g_VertexBuffer->Release();
    g_VertexBuffer = NULL;

    if(g_Texture != NULL) g_Texture->Release();
    g_Texture = NULL;

    if(g_NormalMap != NULL) g_NormalMap->Release();
    g_NormalMap = NULL;
    }
  • 相关阅读:
    Element库的Vue版本ElementUI的本地引入方法
    在Win7操作系统上安装VS2017报错:安装程序清单签名验证失败
    [转]五大主流浏览器及四大内核
    [转]idea2021.1破解版 附安装教程免激活码
    [转]Node.js安装详细步骤教程(Windows版)
    [转]Windows系统下彻底删除Windows.old 文件夹的方法
    Springboot+Vue进行Web开发时特别需要注意的小事项
    基带信号与频带信号,基带传输与频带传输各是什么?两者有什么区别?
    springboot的Web项目编译运行时提示错误:Field userService in com.cetc.UserManger.controller.UserController required a bean of type 'com.cetc.UserManger.service.UserService' that could not be found.
    创建springboot项目时出现Selected Java version 11 is not supported by SDK (maximum 8)
  • 原文地址:https://www.cnblogs.com/lancidie/p/1869650.html
Copyright © 2011-2022 走看看