none
Beginner: Hand tracking issue+sensor.stop doesn't work RRS feed

  • Question

  • Hi,

    I watched Ray Chambers tutorials and I used one of his source code files that he posted in his blog. I'm trying to understand the code and try to modify it to learn. I realized that his code was written using the SDK beta. I modified the code to run in the SDK v1.5. I have two problems from my limited knowledge (and I may have more than those two):

    1- The sensor doesn't track my hand exactly. It tracks a point under my hand, which let me rise my hand as high as possible to reach the point.

    2- After terminating the application, 'sensor.stop();' doesn't work!!! The infrared of Kinect remains working.

    This is the code:

    using System;
    using System.Linq;
    using System.Windows;
    using System.Windows.Controls;
    using System.Windows.Media;
    using System.Media;
    using System.Windows.Shapes;
    using System.Windows.Media.Imaging;
    using Coding4Fun.Kinect.Wpf.Controls;
    using Microsoft.Kinect;
    using Microsoft.Kinect.Toolkit;
    using System.IO;
    using Coding4Fun.Kinect.WinForm;
    using Coding4Fun.Kinect.Wpf;
    using System.Windows.Threading;
    
    namespace KinectingTheDotsUserControl
    {
        public partial class MainWindow : Window
        {
    
         //   KinectSensor sensor;
    
            KinectSensor sensor = KinectSensor.KinectSensors[0];
            Skeleton[] skeletons;
            private static double _topBoundary;
            private static double _bottomBoundary;
            private static double _leftBoundary;
            private static double _rightBoundary;
            private static double _itemLeft;
            private static double _itemTop;
    
          public MainWindow()
            {
    
                //Runtime runtime = new Runtime();
    
                //Runtime runtime;
    
                
    
    
                InitializeComponent();
    
             //   Loaded += new RoutedEventHandler(MainWindow_Loaded);
           //     Unloaded += new RoutedEventHandler(Window_Closing_1);
    
                Song1.Click +=new RoutedEventHandler(Song1_Click);
                this.Loaded += new RoutedEventHandler(MainWindow_Loaded);
           //     runtime.VideoFrameReady += runtime_VideoFrameReady;
            //    runtime.SkeletonFrameReady += runtime_SkeletonFrameReady;
    
            }
    
    
            void Song1_Click(object sender, RoutedEventArgs e)
            {
    
                    SoundPlayer correct = new SoundPlayer("tada.wav");
                    correct.Play();
    
            }
          
           
            private static void CheckButton(HoverButton button, Ellipse thumbStick)
            {
    
    
                if (IsItemMidpointInContainer(button, thumbStick))
                {
                    button.Hovering();
                }
                else
                {
                    button.Release();
                }
            }
    
           
    
            public static bool IsItemMidpointInContainer(FrameworkElement container, FrameworkElement target)
            {
                FindValues(container, target);
    
                if (_itemTop < _topBoundary || _bottomBoundary < _itemTop)
                {
                    //Midpoint of target is outside of top or bottom
                    return false;
                }
    
                if (_itemLeft < _leftBoundary || _rightBoundary < _itemLeft)
                {
                    //Midpoint of target is outside of left or right
                    return false;
                }
    
                return true;
            }
    
    
            private static void FindValues(FrameworkElement container, FrameworkElement target)
            {
                var containerTopLeft = container.PointToScreen(new Point());
                var itemTopLeft = target.PointToScreen(new Point());
    
                _topBoundary = containerTopLeft.Y;
                _bottomBoundary = _topBoundary + container.ActualHeight;
                _leftBoundary = containerTopLeft.X;
                _rightBoundary = _leftBoundary + container.ActualWidth;
    
               // use midpoint of item (width or height divided by 2)
                _itemLeft = itemTopLeft.X + (target.ActualWidth / 2);
                _itemTop = itemTopLeft.Y + (target.ActualHeight / 2);
            }
    
    
            void sensor_SkeletonFrameReady(object sender, SkeletonFrameReadyEventArgs e)
            {
               using (var skeletonFrame = e.OpenSkeletonFrame())
               {
             //   SkeletonFrame skeletonSet = e.OpenSkeletonFrame();
             //   Skeleton[] skeletons;
                   if (skeletonFrame == null)
                        return;
                   if (skeletons == null ||
                        skeletons.Length != skeletonFrame.SkeletonArrayLength)
                    {
                        skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    }
                   skeletonFrame.CopySkeletonDataTo(skeletons);
                Skeleton closestSkeleton = (from s in skeletons
                                                where s.TrackingState == SkeletonTrackingState.Tracked &&
                                                      s.Joints[JointType.Head].TrackingState == JointTrackingState.Tracked
                                                select s).OrderBy(s => s.Joints[JointType.Head].Position.Z)
                                                        .FirstOrDefault();
    
                if (closestSkeleton == null)
                    return;
    
    
    
              //  SetEllipsePosition(ellipseHead, head, false);
              //  SetEllipsePosition(ellipseLeftHand, leftHand, isBackGestureActive);
                SetEllipsePosition(RightHand, closestSkeleton.Joints[JointType.HandRight]);
    
               // ProcessForwardBackGesture(head, rightHand, leftHand);
               }
    
    
    
                CheckButton(Song1, RightHand);
            }
    
    
    
            private void SetEllipsePosition(Ellipse ellipse, Joint joint)
            {
           //     Microsoft.Research.Kinect.Nui vector = new Microsoft.Research.Kinect.Nui.Vector();
                Microsoft.Kinect.SkeletonPoint vector = new Microsoft.Kinect.SkeletonPoint();
                vector.X = ScaleVector(800, joint.Position.X);
                vector.Y = ScaleVector(600, -joint.Position.Y);
                vector.Z = joint.Position.Z;
    
                Joint updatedJoint = new Joint();
             //   updatedJoint.JointType = joint.JointType;
                updatedJoint.TrackingState = JointTrackingState.Tracked;
                updatedJoint.Position = vector;
    
                Canvas.SetLeft(ellipse, updatedJoint.Position.X);
                Canvas.SetTop(ellipse, updatedJoint.Position.Y);
            }
    
            private float ScaleVector(int length, float position)
            {
                float value = (((((float)length) / 1f) / 2f) * position) + (length / 2);
                if (value > length)
                {
                    return (float)length;
                }
                if (value < 0f)
                {
                    return 0f;
                }
                return value;
            }
    
            private void Window_Closing_1(object sender, System.ComponentModel.CancelEventArgs e)
            {
                sensor.Stop();
            }
    
            void MainWindow_Loaded(object sender, RoutedEventArgs e)
            {
                //Since only a color video stream is needed, RuntimeOptions.UseColor is used.
                sensor.Start();
             //   sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
             //   sensor.ColorFrameReady += new EventHandler<ColorImageFrameReadyEventArgs>(sensor_ColorFrameReady);
    
            //    sensor.DepthStream.Enable(DepthImageFormat.Resolution320x240Fps30);
                sensor.SkeletonStream.Enable();
                sensor.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(sensor_SkeletonFrameReady);
    
                sensor.ElevationAngle = 0;
                //You can adjust the resolution here.
             //   runtime.VideoStream.Open(ImageStreamType.Video, 2, ImageResolution.Resolution640x480, ImageType.Color);
            }
    
            void runtime_VideoFrameReady(object sender, SkeletonFrameReadyEventArgs e)
            {
            //    PlanarImage image = e.ImageFrame.Image;
    
                SkeletonFrame frame = e.OpenSkeletonFrame();
                Skeleton[] framesData = new Skeleton[frame.SkeletonArrayLength];
                frame.CopySkeletonDataTo(framesData);
                foreach (Skeleton skltn in framesData)
                {
                    if (skltn.TrackingState == SkeletonTrackingState.Tracked)
                    {
                        Joint right = skltn.Joints[JointType.HandLeft];
                        Joint left = skltn.Joints[JointType.HandRight];
                    }
                }
            }
    
    
        }
    }
    

    Can you help me please with an explanation, because telling where is the door to a blind person will not help, he/she needs the directions to move forward.

    Regards,
    Wednesday, February 27, 2013 4:46 AM

All replies

  • Hi Alihht, Looking at your code yeah it's not aligned with the new versions of the SDK, can I suggest you look at the channel9 tutorials, they are really good and easy to follow with the samples available for download also.

    They can be found at: http://channel9.msdn.com/Series/KinectQuickstart

    Also, I notice you mention SDK 1.5, any reason you did not download the latest 1.6 version, since this version has all the coordinate mapping methods that you can use to align Skelton space to both depth and colour space, this would solve your alignment problem.

    Hope it helps some. George

    Wednesday, February 27, 2013 8:58 AM