zoukankan      html  css  js  c++  java
  • 谈谈NITE 2的第一个程序HandViewer

        我觉得学习一个新的API,最好的办法是学习它提供的Samples。今天根据自己的了解,分享我自己对新的NITE 2的了解,首先看看NITE2 中提供的HandViewer,将它提供的工程文件夹导入到VS2010中,运行,直接看运行结果:

         当一只手做出“向前推”或者“左右摇摆”时,就能获取手心坐标,并进行跟踪,其它手做相同的动作也能识别并跟踪。现在开始代码的了解。首先看主函数main():

    #include "Viewer.h"
    
    int main(int argc, char** argv)
    {
        openni::Status rc = openni::STATUS_OK;
    
        // 调用SampleViewer类构造函数,初始化
        SampleViewer sampleViewer("叶梅树的手跟踪");
    
        // 调用SampleViewer的init进行初始化跟踪
        rc = sampleViewer.Init(argc, argv);
        if (rc != openni::STATUS_OK)
        {
            return 1;
        }
        // 调用Run函数开始循环跟踪
        sampleViewer.Run();
    }

        现在看看SampleViewer类怎么初始化,怎么获取定位手心,怎么实时跟踪手的:

     1 /************************************************************************/
     2 /* 类初始化,并创建m_pHandTracker对象。                                   */
     3 /************************************************************************/
     4 SampleViewer::SampleViewer(const char* strSampleName)
     5 {
     6     ms_self = this;
     7     strncpy(m_strSampleName, strSampleName, ONI_MAX_STR);
     8     m_pHandTracker = new nite::HandTracker;
     9 }
    10 ...
    11 openni::Status SampleViewer::Init(int argc, char **argv)
    12 {
    13     m_pTexMap = NULL;
    14 
    15     // OpenNI初始化
    16     openni::OpenNI::initialize();
    17 
    18     // 只有一台体感设备时,初始化为openni::ANY_DEVICE。
    19     const char* deviceUri = openni::ANY_DEVICE;
    20     for (int i = 1; i < argc-1; ++i)
    21     {
    22         if (strcmp(argv[i], "-device") == 0)
    23         {
    24             deviceUri = argv[i+1];
    25             break;
    26         }
    27     }
    28 
    29     // 打开体感设备
    30     openni::Status rc = m_device.open(deviceUri);
    31     if (rc != openni::STATUS_OK)
    32     {
    33         printf("Open Device failed:\n%s\n", openni::OpenNI::getExtendedError());
    34         return rc;
    35     }
    36 
    37     /************************************************************************/
    38     /*NITE初始化时其实是调用NiteCAPI.h底层函数 niteInitialize().
    39     static Status initialize()
    40     {
    41     return (Status)niteInitialize();
    42     }                                                                     */
    43     /************************************************************************/
    44     nite::NiTE::initialize();
    45 
    46     /************************************************************************/
    47     /* m_pHandTracker->create(&m_device)创建开始跟踪“指定打开的体感设备的手坐标”,
    48        函数实现为(并且调用NiteCAPI.h底层函数niteInitializeHandTrackerByDevice()):
    49        Status create(openni::Device* pDevice = NULL)
    50        {
    51        if (pDevice == NULL)
    52        {
    53        return (Status)niteInitializeHandTracker(&m_handTrackerHandle);
    54        // Pick a device
    55        }
    56        return (Status)niteInitializeHandTrackerByDevice(pDevice, &m_handTrackerHandle);
    57        }
    58     /************************************************************************/
    59     if (m_pHandTracker->create(&m_device) != nite::STATUS_OK)
    60     {
    61         return openni::STATUS_ERROR;
    62     }
    63 
    64     /************************************************************************/
    65     /*开始跟踪指定设备中手的手势探测,如果做出“手摇摆”或者“推”的手势,则开始定位获取
    66       该手的手心坐标,并跟踪。。。
    67       /** Available gestures types 有效手势一共有三个,如下*/
    68         /*
    69         typedef enum
    70         {
    71             GESTURE_WAVE,
    72             GESTURE_CLICK,
    73             GESTURE_HAND_RAISE
    74         } GestureType;
    75         */
    76     /************************************************************************/
    77     m_pHandTracker->startGestureDetection(nite::GESTURE_WAVE);
    78     m_pHandTracker->startGestureDetection(nite::GESTURE_CLICK);
    79 
    80     return InitOpenGL(argc, argv);
    81 
    82 }
    83 openni::Status SampleViewer::Run()    //Does not return
    84 {
    85     // 开始循环跟踪手了。回调函数为Display()函数。
    86     glutMainLoop();
    87 
    88     return openni::STATUS_OK;
    89 }

        现在看看回调函数Display()函数的前半部分:主要是定位到手的深度图像,并做处理。

    View Code
    void SampleViewer::Display()
    {
        // 该类的作用是
        /** Snapshot of the Hand Tracker algorithm. 
            It holds all the hands identified at this time, 
            as well as the detected gestures 
        */
        nite::HandTrackerFrameRef handFrame;
    
        // 获取深度数据
        openni::VideoFrameRef depthFrame;
    
        // 调用readFrame()类的作用和原型是:
        /** Get the next snapshot of the algorithm */
        /*Status readFrame(HandTrackerFrameRef* pFrame)
        {
            NiteHandTrackerFrame *pNiteFrame = NULL;
            Status rc = (Status)niteReadHandTrackerFrame(m_handTrackerHandle, &pNiteFrame);
            pFrame->setReference(m_handTrackerHandle, pNiteFrame);
    
            return rc;
        }
        */
        nite::Status rc = m_pHandTracker->readFrame(&handFrame);
        if (rc != nite::STATUS_OK)
        {
            printf("GetNextData failed\n");
            return;
        }
        // 读取手的深度数据
        depthFrame = handFrame.getDepthFrame();
    
        if (m_pTexMap == NULL)
        {
            // Texture map init
            m_nTexMapX = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionX(), TEXTURE_SIZE);
            m_nTexMapY = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionY(), TEXTURE_SIZE);
            m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY];
        }
    
    
        glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    
        glMatrixMode(GL_PROJECTION);
        glPushMatrix();
        glLoadIdentity();
        glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0);
    
        if (depthFrame.isValid())
        {
            // 处理获取到的手的深度数据
            const openni::DepthPixel* pDepth = (const openni::DepthPixel*)depthFrame.getData();
            int width = depthFrame.getWidth();
            int height = depthFrame.getHeight();
            // Calculate the accumulative histogram (the yellow display...)
            memset(m_pDepthHist, 0, MAX_DEPTH*sizeof(float));
            int restOfRow = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel) - width;
    
            unsigned int nNumberOfPoints = 0;
            for (int y = 0; y < height; ++y)
            {
                for (int x = 0; x < width; ++x, ++pDepth)
                {
                    if (*pDepth != 0)
                    {
                        m_pDepthHist[*pDepth]++;
                        nNumberOfPoints++;
                    }
                }
                pDepth += restOfRow;
            }
            for (int nIndex=1; nIndex<MAX_DEPTH; nIndex++)
            {
                m_pDepthHist[nIndex] += m_pDepthHist[nIndex-1];
            }
            if (nNumberOfPoints)
            {
                for (int nIndex=1; nIndex<MAX_DEPTH; nIndex++)
                {
                    m_pDepthHist[nIndex] = (unsigned int)(256 * (1.0f - (m_pDepthHist[nIndex] / nNumberOfPoints)));
                }
            }
        }
    
        memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel));
    
        float factor[3] = {1, 1, 1};
        // check if we need to draw depth frame to texture
        if (depthFrame.isValid() && g_drawDepth)
        {
            const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrame.getData();
            openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrame.getCropOriginY() * m_nTexMapX;
            int rowSize = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel);
    
            for (int y = 0; y < depthFrame.getHeight(); ++y)
            {
                const openni::DepthPixel* pDepth = pDepthRow;
                openni::RGB888Pixel* pTex = pTexRow + depthFrame.getCropOriginX();
    
                for (int x = 0; x < depthFrame.getWidth(); ++x, ++pDepth, ++pTex)
                {
                    if (*pDepth != 0)
                    {
                        factor[0] = Colors[colorCount][0];
                        factor[1] = Colors[colorCount][1];
                        factor[2] = Colors[colorCount][2];
    
                        int nHistValue = m_pDepthHist[*pDepth];
                        pTex->r = nHistValue*factor[0];
                        pTex->g = nHistValue*factor[1];
                        pTex->b = nHistValue*factor[2];
    
                        factor[0] = factor[1] = factor[2] = 1;
                    }
                }
    
                pDepthRow += rowSize;
                pTexRow += m_nTexMapX;
            }
        }
    
        glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap);
    
        // Display the OpenGL texture map
        glColor4f(1,1,1,1);
    
        glEnable(GL_TEXTURE_2D);
        glBegin(GL_QUADS);
    
        g_nXRes = depthFrame.getVideoMode().getResolutionX();
        g_nYRes = depthFrame.getVideoMode().getResolutionY();
    
        // upper left
        glTexCoord2f(0, 0);
        glVertex2f(0, 0);
        // upper right
        glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, 0);
        glVertex2f(GL_WIN_SIZE_X, 0);
        // bottom right
        glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, (float)g_nYRes/(float)m_nTexMapY);
        glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y);
        // bottom left
        glTexCoord2f(0, (float)g_nYRes/(float)m_nTexMapY);
        glVertex2f(0, GL_WIN_SIZE_Y);
    
        glEnd();
        glDisable(GL_TEXTURE_2D);

    处理完之后,开始跟踪手心坐标:

    // 获取定位的手的快照信息,读取此时一共有多少个手势
        const nite::Array<nite::GestureData>& gestures = handFrame.getGestures();
        for (int i = 0; i < gestures.getSize(); ++i)
        {
            // 当获取的手势是正确完成了
            if (gestures[i].isComplete())
            {
                // 就开始定位此时手势的坐标
                const nite::Point3f& position = gestures[i].getCurrentPosition();
                printf("Gesture %d at (%f,%f,%f)\n", gestures[i].getType(), position.x, position.y, position.z);
    
                
                // nite::HandId newId ===>typedef short int HandId;
                nite::HandId newId;
                // 开始跟踪该有效手势的手心坐标,并确定该手的Id。
                // 函数原型为:NITE_API NiteStatus niteStartHandTracking(NiteHandTrackerHandle, const NitePoint3f*, NiteHandId* pNewHandId);
                m_pHandTracker->startHandTracking(gestures[i].getCurrentPosition(), &newId);
            }
        }
    
        // 获取定位手。
        const nite::Array<nite::HandData>& hands= handFrame.getHands();
        for (int i = 0; i < hands.getSize(); ++i)
        {
            const nite::HandData& user = hands[i];
    
            if (!user.isTracking())
            {
                printf("Lost hand %d\n", user.getId());
                nite::HandId id = user.getId();
                HistoryBuffer<20>* pHistory = g_histories[id];
                g_histories.erase(g_histories.find(id));
                delete pHistory;
            }
            else
            {
                if (user.isNew())
                {
                    printf("Found hand %d\n", user.getId());
                    g_histories[user.getId()] = new HistoryBuffer<20>;
                }
                // Add to history
                HistoryBuffer<20>* pHistory = g_histories[user.getId()];
                pHistory->AddPoint(user.getPosition());
                // Draw history
                DrawHistory(m_pHandTracker, user.getId(), pHistory);
            }
        }
    
        if (g_drawFrameId)
        {
            DrawFrameId(handFrame.getFrameIndex());
        }
    
        // Swap the OpenGL display buffers
        glutSwapBuffers();
    
    }

         综上,定位跟踪手坐标,基本包括以下几个步骤:
        1. 初始化OpenNI、NITE等;

        2. 创建 new nite::HandTracker对象;

        3. m_pHandTracker->create(&m_device)

        4. 开始探测手势(一共三种手势可以被探测到):m_pHandTracker->startGestureDetection(nite::GESTURE_CLICK);

        5. 每次深度数据快照,获取此时的有效手势,并定位开始跟踪有效“手”,主要代码为:

    // 获取定位的手的快照信息,读取此时一共有多少个手势
        const nite::Array<nite::GestureData>& gestures = handFrame.getGestures();
        for (int i = 0; i < gestures.getSize(); ++i)
        {
            // 当获取的手势是正确完成了
            if (gestures[i].isComplete())
            {
                // 就开始定位此时手势的坐标
                const nite::Point3f& position = gestures[i].getCurrentPosition();
                printf("Gesture %d at (%f,%f,%f)\n", gestures[i].getType(), position.x, position.y, position.z);
    
                
                // nite::HandId newId ===>typedef short int HandId;
                nite::HandId newId;
                // 开始跟踪该有效手势的手心坐标,并确定该手的Id。
                // 函数原型为:NITE_API NiteStatus niteStartHandTracking(NiteHandTrackerHandle, const NitePoint3f*, NiteHandId* pNewHandId);
                m_pHandTracker->startHandTracking(gestures[i].getCurrentPosition(), &newId);
            }
        }

         6. 读取已被确定的手,并做自己需要的处理----->开始自己的想要完成的工作:
            

    // 获取定位手。
        const nite::Array<nite::HandData>& hands= handFrame.getHands();
        for (int i = 0; i < hands.getSize(); ++i)
        {
            const nite::HandData& user = hands[i];
    
            if (!user.isTracking())
            {
                printf("Lost hand %d\n", user.getId());
                nite::HandId id = user.getId();
                HistoryBuffer<20>* pHistory = g_histories[id];
                g_histories.erase(g_histories.find(id));
                delete pHistory;
            }
            else
            {
                if (user.isNew())
                {
                    printf("Found hand %d\n", user.getId());
                    g_histories[user.getId()] = new HistoryBuffer<20>;
                }
                // Add to history
                HistoryBuffer<20>* pHistory = g_histories[user.getId()];
                pHistory->AddPoint(user.getPosition());
                // Draw history
                DrawHistory(m_pHandTracker, user.getId(), pHistory);
            }
        }

         这里就算完成了跟踪手的基本步骤了。再让我们看看NITE2 封装好的底层NiteCAPI.h库有关手的封装函数是什么样的吧:

    // HandTracker
    // 跟踪默认设备的
    NITE_API NiteStatus niteInitializeHandTracker(NiteHandTrackerHandle*); // 跟踪指定设备的
    NITE_API NiteStatus niteInitializeHandTrackerByDevice(
    void*, NiteHandTrackerHandle*); // 停止跟踪
    NITE_API NiteStatus niteShutdownHandTracker(NiteHandTrackerHandle); // 当确定是做出有效手势后,开始定位是新的手,并开始跟踪手的运动
    NITE_API NiteStatus niteStartHandTracking(NiteHandTrackerHandle,
    const NitePoint3f*, NiteHandId* pNewHandId); // 停止指定Id的手的跟踪
    NITE_API
    void niteStopHandTracking(NiteHandTrackerHandle, NiteHandId); // 停止所有手的跟踪
    NITE_API
    void niteStopAllHandTracking(NiteHandTrackerHandle); // 我不知道
    NITE_API NiteStatus niteSetHandSmoothingFactor(NiteHandTrackerHandle,
    float); NITE_API NiteStatus niteGetHandSmoothingFactor(NiteHandTrackerHandle, float*); // 注册和取消注册回调函数(是吧~~~)
    NITE_API NiteStatus niteRegisterHandTrackerCallbacks(NiteHandTrackerHandle, NiteHandTrackerCallbacks
    *, void*); NITE_API void niteUnregisterHandTrackerCallbacks(NiteHandTrackerHandle, NiteHandTrackerCallbacks*); //读取深度信息快照
    NITE_API NiteStatus niteReadHandTrackerFrame(NiteHandTrackerHandle, NiteHandTrackerFrame
    **); // 类似com组件。。。
    NITE_API NiteStatus niteHandTrackerFrameAddRef(NiteHandTrackerHandle, NiteHandTrackerFrame
    *); NITE_API NiteStatus niteHandTrackerFrameRelease(NiteHandTrackerHandle, NiteHandTrackerFrame*); // 探测手势
    NITE_API NiteStatus niteStartGestureDetection(NiteHandTrackerHandle, NiteGestureType); // 停止某种类型手势的探测
    NITE_API
    void niteStopGestureDetection(NiteHandTrackerHandle, NiteGestureType); // 停止所有类型手势的探测
    NITE_API
    void niteStopAllGestureDetection(NiteHandTrackerHandle); // 这个就是有关获取手的坐标信息之后的一个真实场景坐标与使用场景坐标的变换处理。。。
    NITE_API NiteStatus niteConvertJointCoordinatesToDepth(NiteUserTrackerHandle userTracker,
    float x, float y, float z, float* pX, float* pY); NITE_API NiteStatus niteConvertDepthCoordinatesToJoint(NiteUserTrackerHandle userTracker, int x, int y, int z, float* pX, float* pY); NITE_API NiteStatus niteConvertHandCoordinatesToDepth(NiteHandTrackerHandle handTracker, float x, float y, float z, float* pX, float* pY); NITE_API NiteStatus niteConvertDepthCoordinatesToHand(NiteHandTrackerHandle handTracker, int x, int y, int z, float* pX, float* pY);

         总结:要了解这些函数的作用最好的办法是自己写写代码多调用自己,就知道具体是什么作用的,反正NITE提供的手的跟踪函数确实不多,就这么几个,要了解怎么使用也不是很难。我觉得至少被NITE1 直观多了,也没那么复杂了。

  • 相关阅读:
    区块链在零售业和银行业的广泛应用
    云存储平台产品浅析
    LINUX操作系统知识:进程与线程详解
    hibernate实现分页
    Hibernate 映射文件的配置 核心文件的配置 一对一 一对多 多对多 hibernate检索策略 Hibernate中session的关闭问题总结
    留言系统项目总结
    jquery 进行dom操作
    数据库 的outfile 备份与还原 视图 事物 触发器 mysql函数和自定义函数
    数据库的子查询、连接查询
    三 级城市,部门,员工,列表联动的问题解决,获取列表的被选中option对象问题
  • 原文地址:https://www.cnblogs.com/yemeishu/p/NITE2.html
Copyright © 2011-2022 走看看