none
the difference between NUI_HANDPOINTER_INFO / RawX and Nui_HANDPOINTER_INFO / X RRS feed

  • Question

  • I ran my code and got the result of NUI_HANDPOINTER_INFO / RawX and NUI_HANDPOINTER_INFO / X.

    the result I got is that RawX = X.

    So, Please tell me the defference .

    #include <iostream>
    #include <Windows.h>
    #include <NuiApi.h>
    #include <KinectInteraction.h>
    #include <opencv2/opencv.hpp>
    #include <FaceTrackLib.h>
    #include <stdlib.h>

    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_core231d.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_highgui231d.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_calib3d231d.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_contrib231d.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_features2d231d.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_flann231d.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_gpu231d.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_haartraining_engined.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_imgproc231d.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_legacy231d.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_ml231d.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_objdetect231d.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_ts231d.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\opencv_video231d.lib")
    #pragma comment(lib,"C:\\opencv\\build\\x86\\vc10\\lib\\Kinect10.lib")
    // #pragma comment(lib,"C:\\FaceTrackLib.lib")
    #pragma comment(lib,"C:\\KinectInteraction170_32.lib")

    void drawJoint(IplImage *image, Vector4 position);

    bool doGripRelease = false;
    bool doGrip        = false;

    double m_scale = 1.0;

    INuiSensor* sensor;
    INuiInteractionStream* interactionStream;

    IplImage *m_Image;
    IplImage *m_scale_img;
    class interactionClient : public INuiInteractionClient
    {
        public:
      interactionClient(){}

            ~interactionClient(){}

            ULONG STDMETHODCALLTYPE AddRef(){
                return 2;
            }

            ULONG STDMETHODCALLTYPE Release(){
                return 1;
            }

      HRESULT STDMETHODCALLTYPE QueryInterface( REFIID riid, void **ppv ){
                return S_OK;
            }

            HRESULT STDMETHODCALLTYPE GetInteractionInfoAtLocation( DWORD skeletonTrackingId, NUI_HAND_TYPE handType, FLOAT x, FLOAT y, _Out_ NUI_INTERACTION_INFO *pInteractionInfo ){
                if( pInteractionInfo ){
                    if( handType == NUI_HAND_TYPE_RIGHT ){
                        pInteractionInfo->IsGripTarget = true;
                        pInteractionInfo->IsPressTarget = true;
                        pInteractionInfo->PressTargetControlId = 1;
                    }
                    else if( handType == NUI_HAND_TYPE_LEFT ){
                        pInteractionInfo->IsGripTarget = true;
                        pInteractionInfo->IsPressTarget = true;
                        pInteractionInfo->PressTargetControlId = 2;
                    }    
                    else if( handType == NUI_HAND_TYPE_NONE ){
                        pInteractionInfo->PressTargetControlId = 0;
                    }    

                    pInteractionInfo->PressAttractionPointX = 0.f;
                    pInteractionInfo->PressAttractionPointY = 0.f;

                    return S_OK;
                }

                return E_POINTER;
            }
    };

    int processColor(HANDLE h)
    {
      NUI_IMAGE_FRAME colorImageFrame = { 0 };
      INuiFrameTexture* colorFrameTexture;
      NUI_LOCKED_RECT colorLockedRect;      

      HRESULT hr = S_OK;

      hr = sensor->NuiImageStreamGetNextFrame( h, 0, &colorImageFrame );
      if( FAILED( hr ) ){
        std::cerr << "Error : INuiSensor::NuiImageStreamGetNextFrame( Color )" << std::endl;
        return -1;
      }
      colorFrameTexture = colorImageFrame.pFrameTexture;
      hr = colorFrameTexture->LockRect( 0, &colorLockedRect, nullptr, 0 );
      if( FAILED( hr ) ){
        std::cerr << "Error : INuiFrameTexture::LockRect( Color )" << std::endl;
        return -1;
      }
      CvSize hoge_size;
      hoge_size.width  = 640;
      hoge_size.height = 480;
      memcpy(m_Image->imageData,   colorLockedRect.pBits, colorLockedRect.size);
      //   memcpy(m_Image->imageData, colorLockedRect.pBits, colorLockedRect.size);

      // cvShowImage("color_image0", m_Image);
      // cvReleaseImage(&image);

      if( cv::waitKey( 30 ) == VK_ESCAPE ){
        exit(1);
      }  
      colorFrameTexture->UnlockRect( 0 );
      sensor->NuiImageStreamReleaseFrame( h, &colorImageFrame );

    }

    int processSkeleton()
    {
      HRESULT hr;

      // Retrieve Skeleton
      NUI_SKELETON_FRAME skeletonFrame = { 0 };
      hr = sensor->NuiSkeletonGetNextFrame( 0, &skeletonFrame );
      if( FAILED( hr ) ){
        std::cout << "Error : INuiSensor::NuiSkeletonGetNextFrame" << std::endl;
        return -1;
      }
      for(int i = 0; i < NUI_SKELETON_COUNT; i++){
        NUI_SKELETON_DATA& skeletonData = skeletonFrame.SkeletonData[i];
        if(skeletonData.eTrackingState == NUI_SKELETON_TRACKED){
          for(int j = 0; j < NUI_SKELETON_POSITION_COUNT; ++j){
    if(skeletonData.eSkeletonPositionTrackingState[j] != NUI_SKELETON_POSITION_NOT_TRACKED){
     // if(j == 11) printf("skeleton_x = %lf\n", skeletonData.SkeletonPositions[j].x);
     drawJoint(m_Image, skeletonData.SkeletonPositions[j]);
    }
          }
        }
        else if(skeletonData.eTrackingState == NUI_SKELETON_POSITION_ONLY){
          drawJoint(m_Image, skeletonData.Position);
        }
      }
      // Retrieve Interaction
      Vector4 gravity = { 0 };
      sensor->NuiAccelerometerGetCurrentReading( &gravity );
      hr = interactionStream->ProcessSkeleton( NUI_SKELETON_COUNT, skeletonFrame.SkeletonData, &gravity, skeletonFrame.liTimeStamp );
      cvShowImage("color_image0", m_Image);    
      if( FAILED( hr ) ){
        std::cout << "Error : INuiInteractionStream::Process" << std::endl;
        return -1;
      }  
    }

    void drawJoint(IplImage *image, Vector4 position)
    {
      FLOAT depthX = 0.0, depthY = 0.0;
      ::NuiTransformSkeletonToDepthImage(position, &depthX, &depthY, NUI_IMAGE_RESOLUTION_640x480);
      LONG colorX = 0.0;
      LONG colorY = 0.0;
      sensor->NuiImageGetColorPixelCoordinatesFromDepthPixelAtResolution(NUI_IMAGE_RESOLUTION_640x480, NUI_IMAGE_RESOLUTION_640x480, 0, (LONG)depthX, (LONG)depthY, 0, &colorX, &colorY);
      cvCircle(m_Image, cvPoint(colorX, colorY), 5,  CV_RGB(0, 255, 0));
    }

    int processInteraction()
    {
      HRESULT hr;
      double scale = 0.0;
      NUI_INTERACTION_FRAME interactionFrame = { 0 };
      hr = interactionStream->GetNextFrame( 0, &interactionFrame );
      FLOAT right_press_extent;
      if( SUCCEEDED( hr ) ){

        for( int user = 0; user <= NUI_SKELETON_COUNT; user++ ){
          NUI_USER_INFO userInfo = interactionFrame.UserInfos[user];
          if(userInfo.SkeletonTrackingId > 0){
    NUI_HAND_EVENT_TYPE left_event_type  = userInfo.HandPointerInfos[0].HandEventType;
    NUI_HAND_EVENT_TYPE right_event_type = userInfo.HandPointerInfos[1].HandEventType;
    #if 1
    // FLOAT X = userInfo.HandPointerInfos[1].X;
    DWORD right_state = userInfo.HandPointerInfos[1].State;

    switch(right_event_type){
    case NUI_HAND_EVENT_TYPE_GRIP:
     doGrip = true;
     doGripRelease = false;
     std::cout << "Grip" << std::endl;
     break;
    case NUI_HAND_EVENT_TYPE_GRIPRELEASE:
     doGrip = false;
     doGripRelease = true;
     std::cout << "GripRelease" << std::endl;
     break;
    case NUI_HAND_EVENT_TYPE_NONE:
     break;
    default:
     break;
    }
    if(doGrip){

    printf("RawX = %lf, X = %lf\n", userInfo.HandPointerInfos[1].RawX,  userInfo.HandPointerInfos[1].RawY);

    }
          }
        }
      }
      else{
        if( hr == E_POINTER ){
          std::cout << "Error : INuiInteractionStream::GetNextFrame( E_POINTER )" << std::endl;
        }
        else if( hr == E_NUI_FRAME_NO_DATA ){
          std::cout << "Error : INuiInteractionStream::GetNextFrame( E_NUI_FRAME_NO_DATA )" << std::endl;
        }
      }

      // printf("scale = %d\n", scale);



    #endif

      return 0;
    }


    int processDepth(HANDLE h)
    {
      INuiFrameTexture* depthFrameTexture = 0;
      NUI_IMAGE_FRAME depthImageFrame = { 0 };
      // printf("pass_depth_process\n");
      // Retrieve Depth
      HRESULT hr = S_OK;
      hr = sensor->NuiImageStreamGetNextFrame( h, 0, &depthImageFrame );
      if( FAILED( hr ) ){
        std::cerr << "Error : INuiSensor::NuiImageStreamGetNextFrame( Depth )" << std::endl;
        return -1;
      }
      BOOL nearMode = TRUE;
      sensor->NuiImageFrameGetDepthImagePixelFrameTexture( h, &depthImageFrame, &nearMode, &depthFrameTexture );
      NUI_LOCKED_RECT depthLockedRect;
      depthFrameTexture->LockRect( 0, &depthLockedRect, nullptr, 0 );
      // m_IplImage->SetBufferSize(depthLockedRect.size);

      CvSize cv_size;
      cv_size.width  = 1;
      cv_size.height = depthLockedRect.size;

      unsigned char *p_data;
      if(depthLockedRect.Pitch){
        // p_data = (unsigned char*)malloc(depthLockedRect.size);
        // memcpy(p_data, PBYTE(depthLockedRect.pBits), depthLockedRect.size);
      }

      if( FAILED( hr ) ){
        std::cerr << "Error : INuiFrameTexture::LockRect( Depth )" << std::endl;
        return -1;
      }

      hr = interactionStream->ProcessDepth(depthLockedRect.size, PBYTE(depthLockedRect.pBits), depthImageFrame.liTimeStamp);
      // hr = interactionStream->ProcessDepth( depthLockedRect.size, p_data, depthImageFrame.liTimeStamp );
      // free(p_data);
      if( FAILED( hr ) ){
        std::cout << "Error : INuiInteractionStream::Process" << std::endl;
        return -1;
      }
      depthFrameTexture->UnlockRect( 0 );
      sensor->NuiImageStreamReleaseFrame( h, &depthImageFrame );
    }


    int main(int argc, char* argv[])
    {
        // Create Sensor

      HRESULT hr = S_OK;

        // m_IplImage = FTCreateImage();    

      CvSize hoge_size;
      hoge_size.width  = 640;
      hoge_size.height = 480;

      m_Image = cvCreateImage(hoge_size, IPL_DEPTH_8U, 4);  

        hr = NuiCreateSensorByIndex( 0, &sensor );
        if( FAILED( hr ) ){
            std::cerr << "Error : NuiCreateSensorByIndex" << std::endl;
            return -1;
        }  

        // Sensor Initialize
        hr = sensor->NuiInitialize( NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX |NUI_INITIALIZE_FLAG_USES_SKELETON );
        if( FAILED( hr ) ){
            std::cerr << "Error : INuiSensor::NuiInitialize" << std::endl;
            return -1;
        }

        // Color Stream Open
        HANDLE colorHandle = INVALID_HANDLE_VALUE;
        HANDLE colorEvent = CreateEvent( nullptr, true, false, nullptr );
        hr = sensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, colorEvent, &colorHandle );
        if( FAILED( hr ) ){
            std::cerr << "Error : INuiSensor::NuiImageStreamOpen( COLOR )" << std::endl;
            return -1;
        }

        // Depth Stream Open
        HANDLE depthHandle = INVALID_HANDLE_VALUE;
        HANDLE depthEvent = CreateEvent( nullptr, true, false, nullptr );
        hr = sensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_640x480, 0, 2, depthEvent, &depthHandle );
        if( FAILED( hr ) ){
            std::cerr << "Error : INuiSensor::NuiImageStreamOpen( DEPTH )" << std::endl;
            return -1;
        }

        // Enable Skeleton Tracking 
        HANDLE skeletonEvent = CreateEvent( nullptr, true, false, nullptr );
        hr = sensor->NuiSkeletonTrackingEnable( skeletonEvent, 0 );
        if( FAILED( hr ) ){
            std::cerr << "Error : INuiSensor::NuiSkeletonTrackingEnable" << std::endl;
            return -1;
        }
        // Create Interaction Stream and Enable    

        interactionClient* client = new interactionClient();
        hr = NuiCreateInteractionStream( sensor, client, &interactionStream );
        if( FAILED( hr ) ){
            std::cerr << "Error : NuiCreateInteractionStream" << std::endl;
            return -1;
        }

        HANDLE interactionEvent = CreateEvent( nullptr, true, false, nullptr );
        hr = interactionStream->Enable(interactionEvent);
        if( FAILED( hr ) )
        {
          std::cout<<"Could not open Interation stream video"<< std::endl;
            return -1;
        }

        HANDLE events[4] = { colorEvent, depthEvent, skeletonEvent, interactionEvent};

        // Main Loop
        while( 1 ){
          // Wait Update
    //       ResetEvent( colorEvent );
    //       ResetEvent( depthEvent );
    //       ResetEvent( skeletonEvent );
    //       ResetEvent( interactionEvent );
          // Retrieve Color
          if(WAIT_OBJECT_0 == WaitForSingleObject(colorEvent, 0)){
    processColor(colorHandle);
          }
          if(WAIT_OBJECT_0 == WaitForSingleObject(depthEvent, 0)){
    processDepth(depthHandle);
          }
          if(WAIT_OBJECT_0 == WaitForSingleObject(skeletonEvent, 0)){
    processSkeleton();
          }
          if(WAIT_OBJECT_0 == WaitForSingleObject(interactionEvent, 0)){
    DWORD skeletonTrackingId;
    NUI_HAND_TYPE handType;
    FLOAT x, y;
    NUI_INTERACTION_INFO pInteractionInfo;
    // client->GetInteractionInfoAtLocation(skeletonTrackingId, &handType, &x, &y, &pInteractionInfo);
    processInteraction();
          }
        }

        // Shutdown
        sensor->NuiShutdown();
        sensor->NuiSkeletonTrackingDisable();
        interactionStream->Disable();

        CloseHandle( colorEvent );
        CloseHandle( depthEvent );
        CloseHandle( skeletonEvent );
        CloseHandle( interactionEvent );    
        CloseHandle( colorHandle );
        CloseHandle( depthHandle );

        cvReleaseImage(&m_Image);

        delete client;

        cv::destroyAllWindows();

        return 0;
    }




                                        
    Wednesday, October 9, 2013 10:49 AM