none
How do I get face positions in C++? RRS feed

  • Question

  • Hi! I'm trying to get face positions in C++.

    I saw FaceTracking Library example on msdn and I made a facetracking project in C++

    it's working but I can't track face... Could you give me some help?

     

    #include <opencv/cv.h>
    #include <opencv/highgui.h>
    #include <opencv/cxcore.h>
    //STL
    #include <stdio.h>
    #include <iostream>

    // window header
    #include <windows.h> 
    #include <NuiApi.h>// Microsoft Kinect SDK

    #include <FaceTrackLib.h>

    using namespace std;
    using namespace cv;

    #define COLOR_WIDTH 640
    #define COLOR_HEIGHT 480

    #define DEPTH_WIDTH 320
    #define DEPTH_HEIGHT 240

    CvPoint points[NUI_SKELETON_POSITION_COUNT];
    RGBQUAD rgb[320*240];

    vector<unsigned char> colorCameraFrameBuffer;
    vector<unsigned char> depthCameraFrameBuffer;
    FT_VECTOR3D m_hint3D[2];

    void InitializeKinect()
    {
    bool FailToConnect = false;

    do {
    HRESULT hr = NuiInitialize( NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON);

    if(FAILED(hr))
    {
    system("cls");
    cout<<"\nFailed to Connect!\n\n";
    FailToConnect = false;
    }

    }while(FailToConnect);
    }// kinect 연결과정 초기화

    RGBQUAD Nui_ShortToQuad_Depth(USHORT s)
    {
    //Depth 데이터에 담겨 나오는 short형 변수 하위 3bit는 playerindex이므로 쉬프트연산으로 제거
    USHORT realDepth = (s&0xfff8) >> 3; 
    BYTE I = 255 - (BYTE)(255 * realDepth / (0xfff));

    RGBQUAD q;
    q.rgbRed = q.rgbBlue = q.rgbGreen = ~I;
    return q;
    }

    int createRGBImage(HANDLE h, IplImage* Color, vector<unsigned char>& buffer)
    {
    const NUI_IMAGE_FRAME* pImageFrame = NULL; //Nuiapi에서 정의한 frame을 갖을 수 있는 자료형

    //다음 프레임에 대한 정보를 받아옴
    HRESULT hr = NuiImageStreamGetNextFrame(h, 1000, &pImageFrame);

    if(FAILED(hr))
    {
    cout<<"Create FGB Image Failed";
    return -1;
    }

    //프레임 정보를 틀 안에 뿌려주기 위해 하나의 Texture로 만드는 과정
    INuiFrameTexture* pTexture = pImageFrame->pFrameTexture;
    NUI_LOCKED_RECT LockedRect;
    pTexture->LockRect(0, &LockedRect, NULL, 0);

    //혹시 버퍼크기가 달라지면 다시 사이즈 맞춤
    if ( LockedRect.size != buffer.size() ) {
    buffer.resize( LockedRect.size );
    }

    if(LockedRect.Pitch != 0) {
    copy(LockedRect.pBits, LockedRect.pBits + LockedRect.size, buffer.begin() ); //카메라 버퍼에 픽셀 복사
    BYTE* pBuffer = (BYTE*)LockedRect.pBits; //rgb 픽셀 버퍼

    //픽셀 버퍼를 가져가기
    //pColorFrame->Attach(640, 480, pBuffer, FTIMAGEFORMAT_UINT8_R8G8B8, 640*3);

    cvSetData(Color, pBuffer, LockedRect.Pitch);
    cvShowImage("Color Image", Color); //Color Image 창에 컬러 프레임 뿌리기
    }

    NuiImageStreamReleaseFrame(h, pImageFrame); //뿌린 프레임 해제
    return 0;
    }

    int createDepthImage(HANDLE h, IplImage* Depth, vector<unsigned char>& buffer)
    {
    const NUI_IMAGE_FRAME* pImageFrame = NULL; //키넥트 이미지 Frame을 담는 변수형 

    HRESULT hr = NuiImageStreamGetNextFrame(h, 1000, &pImageFrame); //변수형에 Depth Frame 담기(1000밀리초마다 1frame씩 가져옴)
    if(FAILED(hr)) {
    cout<<"Creating DepthImage is Failed"<<endl;
    return -1;
    }

    //가져온 프레임을 텍스처로 만들어 준 후에 픽셀에 입력해 윈도우에 출력
    INuiFrameTexture* pTexture = pImageFrame->pFrameTexture;
    NUI_LOCKED_RECT LockedRect;

    pTexture->LockRect(0, &LockedRect, NULL, 0);

    //혹시 버퍼크기가 달라지면 다시 사이즈 맞춤
    if ( LockedRect.size != buffer.size() ) {
    buffer.resize( LockedRect.size );
    }

    if(LockedRect.Pitch != 0) {

    copy(LockedRect.pBits, LockedRect.pBits + LockedRect.size, buffer.begin() ); //카메라 버퍼에 픽셀 복사

    BYTE* pBuffer = (BYTE*)LockedRect.pBits; //pBit안에 뎁스 데이터가 저장되어 있음
    RGBQUAD* rgbrun = rgb; //자동으로 rgb에 대한 속성을 갖게 됨
    USHORT* pBufferRun = (USHORT*) pBuffer;

    for(int y=0; y < DEPTH_HEIGHT; y++)
    {
    for(int x=0; x< DEPTH_WIDTH; x++)
    {
    RGBQUAD quad = Nui_ShortToQuad_Depth(*pBufferRun);
    pBufferRun++;
    //픽셀 단위로 검색하면서 quad의 변환된 데이터를 rgbrun 변수에 넣어준다
    //이때 rgbrun은 우리가 원하는 depth값임!
    *rgbrun = quad;
    rgbrun++;
    }
    }
    //픽셀 버퍼를 가져가기
    //pDepthFrame->Attach(640, 480, rgbrun, FTIsMAGEFORMAT_UINT8_R8G8B8, 640*3);

    cvSetData(Depth,(BYTE*)rgb, Depth->widthStep);
    cvShowImage("DepthImage", Depth);
    }

    NuiImageStreamReleaseFrame(h, pImageFrame);
    return 0;
    }

    void drawSkeleton(const NUI_SKELETON_DATA &position, IplImage* Skeleton)
    {
    for(int i=0; i<NUI_SKELETON_POSITION_COUNT; i++)
    {
    points[i] = SkeletonToScreen(position.SkeletonPositions[i]);
    }

    drawBone(position, NUI_SKELETON_POSITION_SHOULDER_RIGHT, NUI_SKELETON_POSITION_ELBOW_RIGHT, Skeleton);
    drawBone(position, NUI_SKELETON_POSITION_ELBOW_RIGHT, NUI_SKELETON_POSITION_WRIST_RIGHT, Skeleton);
    drawBone(position, NUI_SKELETON_POSITION_WRIST_RIGHT, NUI_SKELETON_POSITION_HAND_RIGHT, Skeleton);

    drawBone(position, NUI_SKELETON_POSITION_SHOULDER_LEFT, NUI_SKELETON_POSITION_ELBOW_LEFT, Skeleton);
    drawBone(position, NUI_SKELETON_POSITION_ELBOW_LEFT, NUI_SKELETON_POSITION_WRIST_LEFT, Skeleton);
    drawBone(position, NUI_SKELETON_POSITION_WRIST_LEFT, NUI_SKELETON_POSITION_HAND_LEFT, Skeleton);
    }

    void createSkeleton(IplImage* Skeleton)
    {
    NUI_SKELETON_FRAME skeletonFrame = {0};

    IplImage* Skeleton_clear = cvCreateImage(cvSize(COLOR_WIDTH, COLOR_HEIGHT), IPL_DEPTH_8U, 4);
    cvCopy(Skeleton_clear, Skeleton);

    HRESULT hr = NuiSkeletonGetNextFrame(0, &skeletonFrame);
    if(FAILED(hr))
    {
    return;
    }

    NuiTransformSmooth(&skeletonFrame, NULL);

    for(int i=0; i<NUI_SKELETON_COUNT; i++)
    {
    NUI_SKELETON_TRACKING_STATE state = skeletonFrame.SkeletonData[i].eTrackingState;

    if(NUI_SKELETON_TRACKED == state)
    {
    drawSkeleton(skeletonFrame.SkeletonData[i], Skeleton);
    }
    cvShowImage("Skeleton Image", Skeleton);
    }
    cvReleaseImage(&Skeleton_clear);
    }

    CvPoint SkeletonToScreen(Vector4 skeletonPoint)
    {
    LONG x, y;
    USHORT depth;
    NuiTransformSkeletonToDepthImage(skeletonPoint, &x, &y, &depth, NUI_IMAGE_RESOLUTION_640x480);

    float screenPointX = static_cast<float> (x);
    float screenPointY = static_cast<float> (y);

    return cvPoint(screenPointX, screenPointY);
    }

    void drawBone(const NUI_SKELETON_DATA &position, NUI_SKELETON_POSITION_INDEX j1, NUI_SKELETON_POSITION_INDEX j2, IplImage* Skeleton)
    {
    NUI_SKELETON_POSITION_TRACKING_STATE j1state = position.eSkeletonPositionTrackingState[j1];
    NUI_SKELETON_POSITION_TRACKING_STATE j2state = position.eSkeletonPositionTrackingState[2];

    if(j1state == NUI_SKELETON_POSITION_TRACKED && j2state == NUI_SKELETON_POSITION_TRACKED)
    {
    cvLine(Skeleton, points[j1], points[j2], CV_RGB(255, 0, 0), 3, 8, 0);
    }
    }




    #include <tchar.h>

    void InitializeKinect()
    {
    bool FailToConnect = false;

    do {
    HRESULT hr = NuiInitialize( NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON);

    if(FAILED(hr))
    {
    system("cls");
    cout<<"\nFailed to Connect!\n\n";
    FailToConnect = false;
    }

    }while(FailToConnect);
    }

    RGBQUAD Nui_ShortToQuad_Depth(USHORT s)
    {
    USHORT realDepth = (s&0xfff8) >> 3; 
    BYTE I = 255 - (BYTE)(255 * realDepth / (0xfff));

    RGBQUAD q;
    q.rgbRed = q.rgbBlue = q.rgbGreen = ~I;
    return q;
    }

    int createRGBImage(HANDLE h, IplImage* Color, vector<unsigned char>& buffer)
    {
    const NUI_IMAGE_FRAME* pImageFrame = NULL;

    HRESULT hr = NuiImageStreamGetNextFrame(h, 1000, &pImageFrame);

    if(FAILED(hr))
    {
    cout<<"Create FGB Image Failed";
    return -1;
    }

    INuiFrameTexture* pTexture = pImageFrame->pFrameTexture;
    NUI_LOCKED_RECT LockedRect;
    pTexture->LockRect(0, &LockedRect, NULL, 0);

    if ( LockedRect.size != buffer.size() ) {
    buffer.resize( LockedRect.size );
    }

    if(LockedRect.Pitch != 0) {
    copy(LockedRect.pBits, LockedRect.pBits + LockedRect.size, buffer.begin() );
    BYTE* pBuffer = (BYTE*)LockedRect.pBits;

    cvSetData(Color, pBuffer, LockedRect.Pitch);
    cvShowImage("Color Image", Color); 
    }

    NuiImageStreamReleaseFrame(h, pImageFrame);
    return 0;
    }

    int createDepthImage(HANDLE h, IplImage* Depth, vector<unsigned char>& buffer)
    {
    const NUI_IMAGE_FRAME* pImageFrame = NULL; 

    HRESULT hr = NuiImageStreamGetNextFrame(h, 1000, &pImageFrame); 
    if(FAILED(hr)) {
    cout<<"Creating DepthImage is Failed"<<endl;
    return -1;
    }

    INuiFrameTexture* pTexture = pImageFrame->pFrameTexture;
    NUI_LOCKED_RECT LockedRect;

    pTexture->LockRect(0, &LockedRect, NULL, 0);

    if ( LockedRect.size != buffer.size() ) {
    buffer.resize( LockedRect.size );
    }

    if(LockedRect.Pitch != 0) {

    copy(LockedRect.pBits, LockedRect.pBits + LockedRect.size, buffer.begin() );

    BYTE* pBuffer = (BYTE*)LockedRect.pBits;
    RGBQUAD* rgbrun = rgb;
    USHORT* pBufferRun = (USHORT*) pBuffer;

    for(int y=0; y < DEPTH_HEIGHT; y++)
    {
    for(int x=0; x< DEPTH_WIDTH; x++)
    {
    RGBQUAD quad = Nui_ShortToQuad_Depth(*pBufferRun);
    pBufferRun++;
    *rgbrun = quad;
    rgbrun++;
    }
    }
    //pDepthFrame->Attach(640, 480, rgbrun, FTIsMAGEFORMAT_UINT8_R8G8B8, 640*3);

    cvSetData(Depth,(BYTE*)rgb, Depth->widthStep);
    cvShowImage("DepthImage", Depth);
    }

    NuiImageStreamReleaseFrame(h, pImageFrame);
    return 0;
    }

    void drawSkeleton(const NUI_SKELETON_DATA &position, IplImage* Skeleton)
    {
    for(int i=0; i<NUI_SKELETON_POSITION_COUNT; i++)
    {
    points[i] = SkeletonToScreen(position.SkeletonPositions[i]);
    }

    drawBone(position, NUI_SKELETON_POSITION_SHOULDER_RIGHT, NUI_SKELETON_POSITION_ELBOW_RIGHT, Skeleton);
    drawBone(position, NUI_SKELETON_POSITION_ELBOW_RIGHT, NUI_SKELETON_POSITION_WRIST_RIGHT, Skeleton);
    drawBone(position, NUI_SKELETON_POSITION_WRIST_RIGHT, NUI_SKELETON_POSITION_HAND_RIGHT, Skeleton);

    drawBone(position, NUI_SKELETON_POSITION_SHOULDER_LEFT, NUI_SKELETON_POSITION_ELBOW_LEFT, Skeleton);
    drawBone(position, NUI_SKELETON_POSITION_ELBOW_LEFT, NUI_SKELETON_POSITION_WRIST_LEFT, Skeleton);
    drawBone(position, NUI_SKELETON_POSITION_WRIST_LEFT, NUI_SKELETON_POSITION_HAND_LEFT, Skeleton);
    }

    void createSkeleton(IplImage* Skeleton)
    {
    NUI_SKELETON_FRAME skeletonFrame = {0};

    IplImage* Skeleton_clear = cvCreateImage(cvSize(COLOR_WIDTH, COLOR_HEIGHT), IPL_DEPTH_8U, 4);
    cvCopy(Skeleton_clear, Skeleton);

    HRESULT hr = NuiSkeletonGetNextFrame(0, &skeletonFrame);
    if(FAILED(hr))
    {
    return;
    }

    NuiTransformSmooth(&skeletonFrame, NULL);

    for(int i=0; i<NUI_SKELETON_COUNT; i++)
    {
    NUI_SKELETON_TRACKING_STATE state = skeletonFrame.SkeletonData[i].eTrackingState;

    if(NUI_SKELETON_TRACKED == state)
    {
    drawSkeleton(skeletonFrame.SkeletonData[i], Skeleton);
    }
    cvShowImage("Skeleton Image", Skeleton);
    }
    cvReleaseImage(&Skeleton_clear);
    }

    CvPoint SkeletonToScreen(Vector4 skeletonPoint)
    {
    LONG x, y;
    USHORT depth;
    NuiTransformSkeletonToDepthImage(skeletonPoint, &x, &y, &depth, NUI_IMAGE_RESOLUTION_640x480);

    float screenPointX = static_cast<float> (x);
    float screenPointY = static_cast<float> (y);

    return cvPoint(screenPointX, screenPointY);
    }

    void drawBone(const NUI_SKELETON_DATA &position, NUI_SKELETON_POSITION_INDEX j1, NUI_SKELETON_POSITION_INDEX j2, IplImage* Skeleton)
    {
    NUI_SKELETON_POSITION_TRACKING_STATE j1state = position.eSkeletonPositionTrackingState[j1];
    NUI_SKELETON_POSITION_TRACKING_STATE j2state = position.eSkeletonPositionTrackingState[2];

    if(j1state == NUI_SKELETON_POSITION_TRACKED && j2state == NUI_SKELETON_POSITION_TRACKED)
    {
    cvLine(Skeleton, points[j1], points[j2], CV_RGB(255, 0, 0), 3, 8, 0);
    }
    }


    int main()
    {
    InitializeKinect();

    HANDLE colorStreamHandle;

    HANDLE depthStreamHandle;

    HANDLE nextColorFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
    HANDLE nextDepthFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
    HANDLE nextSkeletonFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);

    HRESULT hr, hr_color, hr_depth;

    IplImage* Color = cvCreateImage(cvSize(COLOR_WIDTH, COLOR_HEIGHT), IPL_DEPTH_8U, 4);
    IplImage* Depth = cvCreateImage(cvSize(DEPTH_WIDTH, DEPTH_HEIGHT), IPL_DEPTH_8U, 4);
    IplImage* Skeleton = cvCreateImage(cvSize(COLOR_WIDTH, COLOR_HEIGHT), IPL_DEPTH_8U, 4);

    cvNamedWindow("ColorImage", CV_WINDOW_AUTOSIZE);
    cvNamedWindow("DepthImage", CV_WINDOW_AUTOSIZE);
    cvNamedWindow("Skeleton Image", CV_WINDOW_AUTOSIZE);


    hr_color = NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, nextColorFrameEvent, &colorStreamHandle);
    hr_depth = NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_320x240, 0, 2, nextDepthFrameEvent, &depthStreamHandle);
    if(FAILED(hr_color && hr_depth)) {
    cout<<"Could not open ImageStream"<<endl;
    return 0;
    }

    m_hint3D[0] = FT_VECTOR3D(0, 0, 0);
            m_hint3D[1] = FT_VECTOR3D(0, 0, 0);

    //FaceTrack
    // Create an instance of face tracker
    IFTFaceTracker* pFT = FTCreateFaceTracker();
    if(!pFT) {
    return 0; // Handle errors
    }

    FT_CAMERA_CONFIG myColorCameraConfig = {COLOR_WIDTH, COLOR_HEIGHT, NUI_CAMERA_COLOR_NOMINAL_FOCAL_LENGTH_IN_PIXELS};
    FT_CAMERA_CONFIG myDepthCameraConfig = {DEPTH_WIDTH, DEPTH_HEIGHT, NUI_CAMERA_DEPTH_NOMINAL_FOCAL_LENGTH_IN_PIXELS};
    //FT_CAMERA_CONFIG* pmyDepthCameraConfig = NULL;


    //FaceTracking
    HRESULT hr_FT;
    hr_FT = pFT->Initialize(&myColorCameraConfig, &myDepthCameraConfig, NULL, NULL);
    if( FAILED(hr_FT) ) {
    return 0;// Handle errors
    }

    // Create IFTResult to hold a face tracking result
    IFTResult* pFTResult = NULL;
    hr_FT = pFT->CreateFTResult(&pFTResult);
    if(FAILED(hr_FT))
    {
    return 0;// Handle errors
    }

    // prepare Image and SensorData for 640x480 RGB images
    IFTImage* pColorFrame = FTCreateImage();
    if(!pColorFrame) {
    return 0;// Handle errors
    }

    IFTImage* pDepthFrame = FTCreateImage();
    if(!pDepthFrame) {
    return 0;// Handle errors
    }
    ///////////////////////////////////////////////////////     


    colorCameraFrameBuffer.resize( COLOR_WIDTH * 4 * COLOR_HEIGHT );
    depthCameraFrameBuffer.resize( DEPTH_WIDTH * 2 * DEPTH_HEIGHT );

    //FaceTracking
    // Attach assumes that the camera code provided by the application
    // is filling the buffer cameraFrameBuffer

    pColorFrame->Attach(COLOR_WIDTH, COLOR_HEIGHT, &colorCameraFrameBuffer[0], FTIMAGEFORMAT_UINT8_B8G8R8X8, COLOR_WIDTH*4);
    pDepthFrame->Attach(DEPTH_WIDTH, DEPTH_HEIGHT, &depthCameraFrameBuffer[0], FTIMAGEFORMAT_UINT16_D13P3, DEPTH_WIDTH*2);


    FT_SENSOR_DATA sensorData;
    sensorData.pVideoFrame = pColorFrame;
    sensorData.pDepthFrame = pDepthFrame;
    sensorData.ZoomFactor = 1.0f;
    sensorData.ViewOffset = POINT();

    bool isTracked = false;
    ///////////////////////////////////////////////////////////

    hr = NuiSkeletonTrackingEnable(nextSkeletonFrameEvent, 0);
    if(FAILED(hr))
    {
    cout<<"Could not open SkeletonStream"<<endl;
    return hr;
    }

    while(1)
    {
    WaitForSingleObject(nextColorFrameEvent, 1000); 

    createRGBImage(colorStreamHandle, Color, colorCameraFrameBuffer);

    WaitForSingleObject(nextDepthFrameEvent, 1000);
    createDepthImage(depthStreamHandle, Depth, depthCameraFrameBuffer);

    WaitForSingleObject(nextSkeletonFrameEvent, 0);
    createSkeleton(Skeleton);

    //FaceTracking 
    // Check if we are already tracking a face
    if(!isTracked)
    {
    // Initiate face tracking. This call is more expensive and
    // searches the input image for a face.
    hr = pFT->StartTracking(&sensorData, NULL, m_hint3D, pFTResult);
    cout<<m_hint3D[0].x<<endl;
    if(SUCCEEDED(hr) && SUCCEEDED(pFTResult->GetStatus()))
    {
    cout<<"SUCCEEDED Tracking"<<endl;
    isTracked = true;
    }
    else
    {
    // Handle errors
    isTracked = false;
    }
    }
    else
    {
    // Continue tracking. It uses a previously known face position,
    // so it is an inexpensive call.
    hr = pFT->ContinueTracking(&sensorData, m_hint3D, pFTResult);

    // Do something with pFTResult.
    FLOAT scale;
    FLOAT rotationXYZ[3];
    FLOAT translationXYZ[3];
    pFTResult->Get3DPose(&scale, rotationXYZ, translationXYZ);
    cout<<rotationXYZ[0]<<" "<<rotationXYZ[1]<<" "<<rotationXYZ[2]<<endl;
    // Terminate on some criteria.

    if(FAILED(hr) || FAILED (pFTResult->GetStatus()))
    {
    isTracked = false;
    }
    }



    if(cvWaitKey(10) == 0x001b)
    {
    break;
    }
    }


    // Clean up.
    pDepthFrame->Release();
    pColorFrame->Release();
    pFTResult->Release();
    pFT->Release();

    cvReleaseImageHeader(&Depth);
    cvReleaseImageHeader(&Color);

    cvDestroyAllWindows();


    return 0;
    }



    • Edited by SMU_MS Monday, June 9, 2014 8:37 AM
    Monday, June 9, 2014 8:13 AM

All replies