KINECT+opencv基于骨骼信息对视频进行动作识别

KINECT+opencv基于骨骼信息对视频进行动作识别

环境:kinect1.7+opencv2.4+vc2015

  • 使用kinect获取并按批处理三维空间内的骨骼信息
  • 基于视频帧差计算各关节运动向量并与本地模板匹配

目录

  • KINECTopencv基于骨骼信息对视频进行动作识别
  • 目录
    • 写在前面
    • 对当前帧处理并匹配
      • kinect对帧的处理
      • 与模板的向量余弦计算
      • 根据动态时间规划法匹配
    • 记录并保存模板到本地
      • 使用opencv的FileStorage类生成xml文件

写在前面

  • 自前一篇过去一周了。这次开始对视频流进行识别匹配。大体的框架是:kinect按批读入视频帧,然后将这一批视频流与模板视频流进行匹配识别。即,kinect先读入NUM_OF_FRAME(120)帧的骨骼信息,然后每一帧减去后一帧计算出各关节点的位移向量,将这些位移向量与模板匹配。
  • 同样又写了两个程序,一个对当前视频处理并匹配,另一个记录并保存模板到本地。

1.对当前帧处理并匹配

#include <afxsock.h>
#include<opencv.hpp>
#include<Windows.h>
#include<NuiApi.h>
using namespace std;
using namespace cv;

//---参数--------------------------------------------
#define NUM_OF_MODEL 3              //本地模板的个数
#define NUM_OF_SKELETONS 10         //kinect使用的关节数目,在坐姿模式里只读取10个关节点,编号是从2-11号骨骼
#define NUM_OF_FRAME 120            //一批处理的视频帧的数量
#define SHAKE_THRESHOLD_VALUE 0.1   //关节点防抖动的阈值,只有关节位移大于0.1米才计算位移向量
#define INERRED_THRESHOLD_VALUE 0.3 //关节点丢失后kinect会根据内置算法推断该关节点位置,这个阈值是限制推断出来的关节三维坐标位移超过0.3米

Mat modelskeleton[NUM_OF_MODEL];    //模板放进这里

string int2type(int _intval)        //模板中各个动作的名字写进这里
{
    string time_str;
    switch (_intval)
    {
    case 1:
        time_str = "drinking";      //第一个动作喝水
        break;
    case 2:
        time_str = "writing";
        break;
    case 3:
        time_str = "on_phone";
        break;
    case 0:
    time_str = "no action";
    break;
    }
    return time_str;
}
string int2str(int _intval)
{
    string time_str;
    char c[10];
    sprintf_s(c, "%d", _intval);
    time_str = c;
    return c;
}
Mat setInput0(Mat now, int id)//从本地xml文件里读入模板动作后计算每两帧模板动作的位移向量,返回值存入modelskeleton[NUM_OF_MODEL]
{
    cout << "done" << endl;
    Mat temp(NUM_OF_FRAME, NUM_OF_SKELETONS, CV_32FC3, Scalar(0, 0, 0));
    for (int i = 0;i < NUM_OF_FRAME;i++)
    {
        for (int j = 0;j < NUM_OF_SKELETONS; j++)
        {
            temp.ptr<float>(i)[j * 3] = now.ptr<float>(i + 1)[j * 3] - now.ptr<float>(i)[j * 3];
            temp.ptr<float>(i)[j * 3 + 1] = now.ptr<float>(i + 1)[j * 3 + 1] - now.ptr<float>(i)[j * 3 + 1];
            temp.ptr<float>(i)[j * 3 + 2] = now.ptr<float>(i + 1)[j * 3 + 2] - now.ptr<float>(i)[j * 3 + 2];
        }
    }
    return temp;
}

void drawSkeleton(Mat &image, CvPoint pointSet[], NUI_SKELETON_FRAME now,int userid)//画出骨骼,推断出来的关节点用灰色表示
{
    CvScalar color;
    color = cvScalar(255);
    if ((pointSet[NUI_SKELETON_POSITION_HEAD].x != 0 || pointSet[NUI_SKELETON_POSITION_HEAD].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_HEAD], pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_SPINE].x != 0 || pointSet[NUI_SKELETON_POSITION_SPINE].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SPINE], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_SPINE].x != 0 || pointSet[NUI_SKELETON_POSITION_SPINE].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_SPINE], pointSet[NUI_SKELETON_POSITION_HIP_CENTER], color, 2);

    //左上肢
    if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_HAND_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HAND_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], pointSet[NUI_SKELETON_POSITION_HAND_LEFT], color, 2);

    //右上肢
    if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], pointSet[NUI_SKELETON_POSITION_HAND_RIGHT], color, 2);

    //左下肢
    if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_LEFT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_HIP_LEFT], pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], pointSet[NUI_SKELETON_POSITION_FOOT_LEFT], color, 2);

    //右下肢
    if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT], color, 2);
    for (int i = 0;i < 20 ;i++)
    {if(now.SkeletonData[userid].SkeletonPositions[i].x)
        if (now.SkeletonData[userid].eSkeletonPositionTrackingState[i] == 1|| now.SkeletonData[userid].eSkeletonPositionTrackingState[i] == 0)
            circle(image, pointSet[i], 5, Scalar(128), 2);
        else
            circle(image, pointSet[i], 5, Scalar(255), 2);
    }
}

//用于匹配的类
class Process {
private:
    Mat framess;
    Vector4 mark;//追踪到当前最后一帧人物的中心坐标
    int matching;
    float disvalsModel[NUM_OF_MODEL][NUM_OF_FRAME] = { 0 };
public:
    Process() :matching(0)
    {
        framess.create(NUM_OF_FRAME, NUM_OF_SKELETONS, CV_32FC3);
        mark = { 0 };
    }
    void classify(Mat &outmark)//将匹配到的动作编号转化为文字保存进mark里
    {
        float fx, fy;
        NuiTransformSkeletonToDepthImage(mark, &fx, &fy);
        fx *= 2;
        fy *= 2;
        circle(outmark, cvPoint((int)fx,(int)fy),2,Scalar(64,255,255 ),3);
        const string str = "the pose is " + int2type(matching);
        putText(outmark, str, cvPoint((int)fx, (int)fy), CV_FONT_HERSHEY_COMPLEX,1,Scalar(64, 255, 255));
    }
    void shakeMask(NUI_SKELETON_FRAME past, NUI_SKELETON_FRAME &now, int userid)
    {
            for (int j = 3;j < NUM_OF_SKELETONS;j++)
            {
                if (now.SkeletonData[userid].eSkeletonPositionTrackingState[j] == 1 && (
                    (now.SkeletonData[userid].SkeletonPositions[j].x - past.SkeletonData[userid].SkeletonPositions[j].x)>INERRED_THRESHOLD_VALUE ||
                    (now.SkeletonData[userid].SkeletonPositions[j].y - past.SkeletonData[userid].SkeletonPositions[j].y) > INERRED_THRESHOLD_VALUE ||
                    (now.SkeletonData[userid].SkeletonPositions[j].z - past.SkeletonData[userid].SkeletonPositions[j].z) > INERRED_THRESHOLD_VALUE))
                {
                    now.SkeletonData[userid].SkeletonPositions[j].x = past.SkeletonData[userid].SkeletonPositions[j].x;
                    now.SkeletonData[userid].SkeletonPositions[j].y = past.SkeletonData[userid].SkeletonPositions[j].y;
                    now.SkeletonData[userid].SkeletonPositions[j].z = past.SkeletonData[userid].SkeletonPositions[j].z;
                }
                if (now.SkeletonData[userid].SkeletonPositions[j].x - past.SkeletonData[userid].SkeletonPositions[j].x < SHAKE_THRESHOLD_VALUE)
                    now.SkeletonData[userid].SkeletonPositions[j].x = past.SkeletonData[userid].SkeletonPositions[j].x;
                if (now.SkeletonData[userid].SkeletonPositions[j].y - past.SkeletonData[userid].SkeletonPositions[j].y < SHAKE_THRESHOLD_VALUE)
                    now.SkeletonData[userid].SkeletonPositions[j].y = past.SkeletonData[userid].SkeletonPositions[j].y;
                if (now.SkeletonData[userid].SkeletonPositions[j].z - past.SkeletonData[userid].SkeletonPositions[j].z < SHAKE_THRESHOLD_VALUE)
                    now.SkeletonData[userid].SkeletonPositions[j].z = past.SkeletonData[userid].SkeletonPositions[j].z;
            }
    }
    void setInput(NUI_SKELETON_FRAME past, NUI_SKELETON_FRAME now, int counter, int userid)//前后两帧做差求位移向量
    {
        if (counter == NUM_OF_FRAME-1)
        {
            mark.x = now.SkeletonData[userid].Position.x;
            mark.y = now.SkeletonData[userid].Position.y;
            mark.z = now.SkeletonData[userid].Position.z;
        }

        for (int j = 0;j < NUM_OF_SKELETONS;j++)
        {
            framess.ptr<float>(counter)[3 * j] = now.SkeletonData[userid].SkeletonPositions[j].x - past.SkeletonData[userid].SkeletonPositions[j].x;
            framess.ptr<float>(counter)[3 * j + 1] = now.SkeletonData[userid].SkeletonPositions[j].y - past.SkeletonData[userid].SkeletonPositions[j].y;
            framess.ptr<float>(counter)[3 * j + 2] = now.SkeletonData[userid].SkeletonPositions[j].z - past.SkeletonData[userid].SkeletonPositions[j].z;
        }

    }
    float rtwCal(int flag)
    {
        float rtwMat[3] = { 0 };
        Mat rtwmat(NUM_OF_FRAME, NUM_OF_FRAME, CV_32FC1, Scalar(0));
        for (int i = 0;i < NUM_OF_FRAME;i++)
        {
            for (int j = 0;j < NUM_OF_FRAME;j++)
            {
                for (int k = 0;k < (NUM_OF_SKELETONS * 3);k++)//多维向量余弦计算
                {
                    rtwMat[0] += framess.ptr<float>(i)[k] * modelskeleton[flag].ptr<float>(j)[k];
                    rtwMat[1] += framess.ptr<float>(i)[k] * framess.ptr<float>(i)[k];
                    rtwMat[2] += modelskeleton[flag].ptr<float>(j)[k] * modelskeleton[flag].ptr<float>(j)[k];
                }
                rtwmat.ptr<float>(i)[j] = rtwMat[0] / (sqrt(rtwMat[1])*sqrt(rtwMat[2]));            }
        }

        float routeVal[NUM_OF_FRAME] = { 0 };
        for (int i = 0;i < NUM_OF_FRAME;i++)
        {
            for (int j = 0;j < NUM_OF_FRAME;j++)
            {
                routeVal[i] += rtwmat.ptr<float>(j)[(j + i) % NUM_OF_FRAME];
            }
        }
        float result = routeVal[0];
        for (int i = 0;i < NUM_OF_FRAME-1;i++)
        {
            if (result<routeVal[i+1])
                result = routeVal[i + 1];
        }
        return result;
    }
    void processBegin()
    {
        int result = 0;
        float rtwDistance[NUM_OF_MODEL] = { 0 };
        for (int i = 0;i < NUM_OF_MODEL;i++)
        {
            rtwDistance[i] = rtwCal(i);
            cout << "the distance between " + int2str(i+1) << "is" << rtwDistance[i] << endl;
        }
        float temp = rtwDistance[0];
        for (int i = 0;i < NUM_OF_MODEL - 1;i++)
        {
            if (temp < rtwDistance[i + 1])
            {
                temp = rtwDistance[i + 1];
                result = i + 1;
            }
        }
        matching = result+1;
        if (temp < 0.3)
            matching = 0;
        cout << "this time is type" << int2str(matching) << endl << endl;
    }
};

int main()
{
    Mat image(480, 640, CV_8UC3, Scalar(0, 0, 0));
    //
    HRESULT hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR);
    HANDLE skeletonEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
    hr = NuiSkeletonTrackingEnable(skeletonEvent, NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT);
    //
    HANDLE ColorFrameStream = NULL;
    HANDLE NextFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
    hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, NextFrameEvent, &ColorFrameStream);
    if (FAILED(hr))
    {
        cout << "Could not open image stream video" << endl;
        return hr;
    }
    //
    NUI_SKELETON_FRAME pastFrame = { 0 };
    NUI_SKELETON_FRAME nowFrame = { 0 };
    Mat mark(image.size(), CV_8UC3, Scalar(0, 0, 0));//整个匹配运算的结果保存在mark里,和颜色通道一起叠加输出到屏幕
    int userid = 0;
    bool bFoundSkeleton = FALSE;
    //启动kinect
    while (!bFoundSkeleton)
    {
        if (WaitForSingleObject(skeletonEvent, INFINITE) == 0)
        {
            hr = NuiSkeletonGetNextFrame(0, &nowFrame);
            for (int i = 0;i < NUI_SKELETON_COUNT;i++)
            {
                NUI_SKELETON_TRACKING_STATE trackingState = nowFrame.SkeletonData[i].eTrackingState;
                if (trackingState == NUI_SKELETON_TRACKED)
                {
                    bFoundSkeleton = TRUE;
                    userid = i;
                }
            }
        }
    }

    int idcounter = 0;
    //读入模板初始化
    FileStorage fds("monitor_file.xml", FileStorage::READ);
    for (int i = 0;i < NUM_OF_MODEL;i++)
    {
        modelskeleton[i].create(NUM_OF_FRAME, NUM_OF_SKELETONS, CV_32FC3);
        Mat temp1(NUM_OF_FRAME + 1, NUM_OF_SKELETONS, CV_32FC3);
        fds["mat_id" + int2str(idcounter)] >> temp1;
        modelskeleton[i] = setInput0(temp1, idcounter);
        idcounter++;
    }
    fds.release();
    while (1)
    {
        //
        int frameCounter = 0;
        Process process=Process();
        for (;userid < NUI_SKELETON_COUNT;userid++)
        {
            if (nowFrame.SkeletonData[userid].eTrackingState == NUI_SKELETON_TRACKED &&
                nowFrame.SkeletonData[userid].eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_SHOULDER_CENTER] != NUI_SKELETON_POSITION_NOT_TRACKED)
            {
                while (frameCounter < NUM_OF_FRAME)
                {
                    //显示颜色通道图像
                        const NUI_IMAGE_FRAME * pImageFrame = NULL;
                        if (WaitForSingleObject(skeletonEvent, INFINITE) == 0)
                        {
                            NuiImageStreamGetNextFrame(ColorFrameStream, 0, &pImageFrame);
                            INuiFrameTexture * pTexture = pImageFrame->pFrameTexture;
                            NUI_LOCKED_RECT LockedRect;
                            pTexture->LockRect(0, &LockedRect, NULL, 0);
                            for (int i = 0; i < image.rows; i++)
                            {
                                uchar *ptr = image.ptr<uchar>(i);
                                uchar *pBuffer = (uchar*)(LockedRect.pBits) + i * LockedRect.Pitch;
                                for (int j = 0; j < image.cols; j++)
                                {
                                    ptr[3 * j] = pBuffer[4 * j];
                                    ptr[3 * j + 1] = pBuffer[4 * j + 1];
                                    ptr[3 * j + 2] = pBuffer[4 * j + 2];
                                }
                            }
                            imshow("colorImage", image + mark);
                            pTexture->UnlockRect(0);
                            cvWaitKey(33);
                            NuiImageStreamReleaseFrame(ColorFrameStream, pImageFrame);
                        //
                        pastFrame = nowFrame;
                        hr = NuiSkeletonGetNextFrame(0, &nowFrame);
                        NuiTransformSmooth(&nowFrame, NULL);

                        Mat skeletonimg(240, 320, CV_8UC1, Scalar(0));
                        Vector4 cpoint[20] = { 0 };
                        CvPoint cvp[20];
                        for (int j = 0;j < 20;j++)
                        {
                            cpoint[j].x = nowFrame.SkeletonData[userid].SkeletonPositions[j].x;
                            cpoint[j].y = nowFrame.SkeletonData[userid].SkeletonPositions[j].y;
                            cpoint[j].z = nowFrame.SkeletonData[userid].SkeletonPositions[j].z;
                        }
                        float fx, fy;
                        for (int i = 0;i < 20;i++)
                        {
                            NuiTransformSkeletonToDepthImage(cpoint[i], &fx, &fy);
                            cvp[i].x = (int)fx;
                            cvp[i].y = (int)fy;
                        }
                        drawSkeleton(skeletonimg, cvp,nowFrame,userid);
                        imshow("skeleton", skeletonimg);
                        process.setInput(pastFrame, nowFrame, frameCounter, userid);
                        frameCounter++;
                    }
                }
                process.processBegin();
                Mat mark1(image.size(), CV_8UC3, Scalar(0, 0, 0));
                process.classify(mark1);
                mark = mark1;
                imshow("colorImage", mark1);
                waitKey(33);
                break;
            }
        }
    }
    NuiShutdown();
    return 0;
}

kinect对帧的处理

kinect每读入一帧骨骼数据,消除抖动后,与上一帧做差,得到这一帧里发生的位移向量,存入Mat framess(NUM_OF_FRAME,NUM_OF_SKELETONS,CV_32FC3)矩阵中。

与模板的向量余弦计算

获得了NUM_OF_FRAME帧数据,即framess写满了后,计算framess中每一帧的位移向量分别与模板的各帧位移向量的夹角余弦。当前程序里一批读入120帧,framess是120*10*3的结构,匹配用的模板计算后也是120*10*3的结构,两个结构按行求夹角余弦,即30维向量与30维向量的运算。得到的余弦值放入用于动态时间规划的120*120的矩阵中。

根据动态时间规划法匹配

因为对于两个相同的动作,如果他们存在相位差,且无法判断用以匹配的动作的起始点在哪里时,对动态时间规划的矩阵求其各条斜率为1的直线上的矩阵元素的和。要注意这个方法只适合用一对一对应的动态时间规划。

2. 记录并保存模板到本地

//@这个程序就是随便写的专门用来采集模板动作的程序
#include <afxsock.h>
#include<opencv.hpp>
#include<Windows.h>
#include<NuiApi.h>
using namespace std;
using namespace cv;

#define NUM_OF_SKELETONS 10
#define NUM_OF_FRAMES 121 //文件操作里的帧数必须比识别中的多一
void drawSkeleton(Mat &image, CvPoint pointSet[])
{
    CvScalar color;
    color = cvScalar(255);
    if ((pointSet[NUI_SKELETON_POSITION_HEAD].x != 0 || pointSet[NUI_SKELETON_POSITION_HEAD].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_HEAD], pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_SPINE].x != 0 || pointSet[NUI_SKELETON_POSITION_SPINE].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SPINE], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_SPINE].x != 0 || pointSet[NUI_SKELETON_POSITION_SPINE].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_SPINE], pointSet[NUI_SKELETON_POSITION_HIP_CENTER], color, 2);

    //左上肢
    if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_LEFT], pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_ELBOW_LEFT], pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_LEFT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_HAND_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HAND_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_WRIST_LEFT], pointSet[NUI_SKELETON_POSITION_HAND_LEFT], color, 2);

    //右上肢
    if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_CENTER], pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_SHOULDER_RIGHT], pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_ELBOW_RIGHT], pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HAND_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_WRIST_RIGHT], pointSet[NUI_SKELETON_POSITION_HAND_RIGHT], color, 2);

    //左下肢
    if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_LEFT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_HIP_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_LEFT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_HIP_LEFT], pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_LEFT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_KNEE_LEFT], pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].x != 0 || pointSet[NUI_SKELETON_POSITION_FOOT_LEFT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_ANKLE_LEFT], pointSet[NUI_SKELETON_POSITION_FOOT_LEFT], color, 2);

    //右下肢
    if ((pointSet[NUI_SKELETON_POSITION_HIP_CENTER].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_CENTER].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_HIP_CENTER], pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_HIP_RIGHT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_HIP_RIGHT], pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_KNEE_RIGHT], pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], color, 2);
    if ((pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT].y != 0) &&
        (pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].x != 0 || pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT].y != 0))
        line(image, pointSet[NUI_SKELETON_POSITION_ANKLE_RIGHT], pointSet[NUI_SKELETON_POSITION_FOOT_RIGHT], color, 2);
}
string int2str(int _intval)
{
    string time_str;
    char c[10];
    sprintf_s(c, "%d", _intval);
    time_str = c;
    return c;
}

int main()
{
    Mat image(480, 640, CV_8UC3, Scalar(0, 0, 0));
    //
    HRESULT hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR);
    HANDLE skeletonEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
    hr = NuiSkeletonTrackingEnable(skeletonEvent, NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT);
    int userid = 0;
    if (FAILED(hr))
    {
        cout << "Could not open skeleton stream video" << endl;
        NuiShutdown();
        return hr;
    }
    //
    HANDLE ColorFrameStream = NULL;
    HANDLE NextFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
    hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, NextFrameEvent, &ColorFrameStream);
    if (FAILED(hr))
    {
        cout << "Could not open image stream video" << endl;
        return hr;
    }
    //
    NUI_SKELETON_FRAME nowFrame = { 0 };
    bool bFoundSkeleton = FALSE;
    bool getdata = FALSE;
    int counter = 0;
    Mat temp(NUM_OF_FRAMES, NUM_OF_SKELETONS, CV_32FC3,Scalar(0,0,0));
    //
    FileStorage fs("monitor_file.xml", FileStorage::WRITE);
    fs.release();
    //
    while (!bFoundSkeleton)
    {
        if (WaitForSingleObject(skeletonEvent, INFINITE) == 0)
        {
            hr = NuiSkeletonGetNextFrame(0, &nowFrame);
            for (int i=0;i < NUI_SKELETON_COUNT;i++)
            {
                NUI_SKELETON_TRACKING_STATE trackingState = nowFrame.SkeletonData[i].eTrackingState;
                if (trackingState == NUI_SKELETON_TRACKED)
                {
                    bFoundSkeleton = TRUE;
                    userid = i;
                }
            }
        }
    }
    imshow("colorImage", Mat(480, 640, CV_8UC3, Scalar(64, 255, 255)));
    waitKey(1000);
    while (!getdata)
    {
        int frameCounter = 0;
        for (;userid < NUI_SKELETON_COUNT;userid++)
        {
            if (nowFrame.SkeletonData[userid].eTrackingState == NUI_SKELETON_TRACKED &&
                nowFrame.SkeletonData[userid].eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_SHOULDER_CENTER] != NUI_SKELETON_POSITION_NOT_TRACKED)
            {
                while (frameCounter < NUM_OF_FRAMES)
                {
                    //
                    const NUI_IMAGE_FRAME * pImageFrame = NULL;
                    if (WaitForSingleObject(skeletonEvent, INFINITE) == 0)
                    {
                        NuiImageStreamGetNextFrame(ColorFrameStream, 0, &pImageFrame);
                        INuiFrameTexture * pTexture = pImageFrame->pFrameTexture;
                        NUI_LOCKED_RECT LockedRect;
                        pTexture->LockRect(0, &LockedRect, NULL, 0);
                        for (int i = 0; i < image.rows; i++)
                        {
                            uchar *ptr = image.ptr<uchar>(i);
                            uchar *pBuffer = (uchar*)(LockedRect.pBits) + i * LockedRect.Pitch;
                            for (int j = 0; j < image.cols; j++)
                            {
                                ptr[3 * j] = pBuffer[4 * j];
                                ptr[3 * j + 1] = pBuffer[4 * j + 1];
                                ptr[3 * j + 2] = pBuffer[4 * j + 2];
                            }
                        }
                        imshow("colorImage", image);
                        pTexture->UnlockRect(0);
                        NuiImageStreamReleaseFrame(ColorFrameStream, pImageFrame);
                        cvWaitKey(33);

                        //
                        hr = NuiSkeletonGetNextFrame(0, &nowFrame);
                        NuiTransformSmooth(&nowFrame, 0);
                        Mat skeletonimg(240, 320, CV_8UC1, Scalar(0));//接下来kinect读取到的骨骼数据转换为深度空间图像后保存在image中
                        Vector4 cpoint[20] = { 0 };
                        CvPoint cvp[20];
                        for (int j = 0;j < 20;j++)//kinect一共获取20个关节的空间坐标
                        {
                            cpoint[j].x = nowFrame.SkeletonData[userid].SkeletonPositions[j].x;
                            cpoint[j].y = nowFrame.SkeletonData[userid].SkeletonPositions[j].y;
                            cpoint[j].z = nowFrame.SkeletonData[userid].SkeletonPositions[j].z;
                        }
                        float fx, fy;
                        for (int i = 0;i < 20;i++)
                        {
                            NuiTransformSkeletonToDepthImage(cpoint[i], &fx, &fy);
                            cvp[i].x = (int)fx;
                            cvp[i].y = (int)fy;
                        }
                        drawSkeleton(skeletonimg, cvp);//使用函数drawSkeleton将关节的点连成线
                        imshow("skeleton", skeletonimg);
                        if (FAILED(hr))
                        {
                            cout << "Could not get next skeleton stream " << endl;
                            return hr;
                        }
                        for (int j = 0;j < NUM_OF_SKELETONS;j++)
                        {
                            temp.ptr<float>(frameCounter)[3*j] = nowFrame.SkeletonData[userid].SkeletonPositions[j].x;
                            temp.ptr<float>(frameCounter)[3 * j+1] = nowFrame.SkeletonData[userid].SkeletonPositions[j].y;
                            temp.ptr<float>(frameCounter)[3 * j+2] = nowFrame.SkeletonData[userid].SkeletonPositions[j].z;
                            //cout << "model" + int2str(counter) << " frame" + int2str(frameCounter) << "data is: " << nowFrame.SkeletonData[userid].SkeletonPositions[j].x << nowFrame.SkeletonData[userid].SkeletonPositions[j].y << nowFrame.SkeletonData[userid].SkeletonPositions[j].z << endl;
                        }
                    }
                    frameCounter++;
                }
                FileStorage fs("monitor_file.xml", FileStorage::APPEND);
                fs << "mat_id" + int2str(counter) << temp;
                fs.release();
                counter++;
                break;
            }
        }
        if (counter == 3)
            getdata = TRUE;
        imshow("colorImage", Mat(480, 640, CV_8UC3, Scalar(255, 64, 255)));
        waitKey(1000);
    }
    NuiShutdown();
    return 0;
}

使用opencv的FileStorage类生成xml文件

值得一提的是如果直接用WRITE方式连续写三个数据进去,比如:

fs<<”mat_”<< image1<< image2<< image3;

那么读取的时候出现打开了xml文件却读不到任何数据的情况,所以我在外部先把xml用write方式打开,初始化掉,在循环里用append的方式追加写入。

时间: 2024-08-12 09:39:15

KINECT+opencv基于骨骼信息对视频进行动作识别的相关文章

虚拟现实技术应用和Kinect开发——基于煤层气仿真训练系统

这篇是计算机中系统仿真/虚拟现实类的优质预售推荐<虚拟现实技术应用和Kinect开发--基于煤层气仿真训练系统>. 编辑推荐 本书内容新颖.层次清晰,适合从事相关理论研究及应用的专业人士和高校师生参考使用. 内容简介 本书依托国家科技重大专项项目--煤层气田地面集输信息集成及深度开发技术(2011ZX05039-004-02),借助于虚拟现实技术,开发一套专门应用于煤层气集输系统安全操作的仿真训练系统.本书详细介绍了虚拟现实技术以及煤层气产业的研究现状.煤层气仿真训练系统的应用与需求分析,然后

基于opencv在摄像头ubuntu根据视频获取

 基于opencv在摄像头ubuntu根据视频获取 1  工具 原料 平台 :UBUNTU12.04 安装库  Opencv-2.3 2  安装编译执行步骤 安装编译opencv-2.3  參考http://blog.csdn.net/xiabodan/article/details/23547847 提前下载OPENCV源代码包 官方:http://sourceforge.net/projects/opencvlibrary/files/opencv-unix/ 我的:http://dow

QT creator中使用opencv采集摄像头信息

之前在QT creator上成功编译了opencv,由于课题需要,需要采集摄像头的信息.故搜集了网上的一些资料,依葫芦画瓢的照着做了一下,终于简单的成功采集了信息. 打开QTcreator,新建一个widget工程. 在界面上放两个label 分别用来显示摄像头采集到的数据和照的照片. 在widget.h中的源代码如下: #ifndef WIDGET_H #define WIDGET_H #include <QWidget> #include <QImage> #include &

opencv播放,变换和保存视频

核心函数: 1. cvCreateFileCapture 2. cvGetCaptureProperty 3. cvQueryFrame 4. cvCreateVideoWriter 5. cvWriteFrame 程序: #include "cv.h" #include "cxcore.h" #include "highgui.h" #include <iostream> #include <stdio.h> int V

基于Deep Learning 的视频识别方法概览

深度学习在最近十来年特别火,几乎是带动AI浪潮的最大贡献者.互联网视频在最近几年也特别火,短视频.视频直播等各种新型UGC模式牢牢抓住了用户的消费心里,成为互联网吸金的又一利器.当这两个火碰在一起,会产生什么样的化学反应呢? 不说具体的技术,先上一张福利图,该图展示了机器对一个视频的认知效果.其总红色的字表示objects, 蓝色的字表示scenes,绿色的字表示activities. 图1 人工智能在视频上的应用主要一个课题是视频理解,努力解决"语义鸿沟"的问题,其中包括了:    

基于墨刀的视频编辑软件Xedit 1.0原型化系统

该产品基于墨刀设计,运行环境ios,运行平台ipad,以下是设计思路. 共有登陆.注册.视频.主页.播放.个人信息这六模块. 首先是登陆和注册模块,登陆分为账号登陆和访客登陆,账号登陆可以将用户的视频保存到服务器上,而访客登陆只能将视频保存到用户本地.之后进入主页端,这将是我们的工作目录,在这里我们可以新建项目并导入视频,主页的顶部状态栏共有三个按钮,分别是视频,项目和个人信息.视频用于查看已经编辑过的视频,项目用于查看已经建立的项目,个人信息用于查看账号的信息.演示如下. 接下来我新建一个项目

Unity 捏脸整理及基于骨骼的捏脸功能实现

目前实现捏脸功能的方式主要有两种.一个是Blendshape(融合变形),一个是基于骨骼驱动的方式,通过修改骨骼矩阵(bindpose)来影响SkinMesh.这两种方式的最终原理都是在shader 生效之前修改顶点. 融合变形 优点:可以控制非常细微的变化,通常用于面部动画. 缺点:Blendshape在捏脸制作上工作量非常大,我想把一个结构捏的多么细微就要制作多少张脸,这个细微度和工作量是成正比的.这样会导致后期修改不方便,更重要的是性能消耗非常大.另外跟我们第三方动画软件不兼容. 骨骼驱动

基于 Android 的 3D 视频样本代码

作者:Mark Liu 下载样本代码 简介 在Android 中,创建一个能够播放视频剪辑的应用非常简单:创建一个采用 3D 图形平面的游戏应用也非常简单.但是,创建一个能够在 3D 图形对象上播放视频的应用却不容易.本文介绍了我为应对该挑战创建的应用.该应用可在 3D 平面上渲染视频,并支持用户以交互的方式在视频平面上播放. 该应用需要解决三大实施问题: 如何构建代码以支持用户在播放视频时变更 3D 平面? 虽然 Android 中默认的 MediaPlayer 配备了全面的播放操作,但是难以

opencv 显示一个图片/播放视频

//显示图片 #include"highgui.h" int main(int argc,char **argv) { IplImage *img=NULL; char *imgname="E:/实验/image/1.jpg"; img=cvLoadImage(imgname);//cvLoadImage加载一个图片 并返回一个IplImage指针 cvNamedWindow("test",0);//创建一个叫test的窗口,窗口大小固定 cvS