Kinect 开发 —— 深度信息(二)

转自(并致谢):http://www.cnblogs.com/yangecnu/archive/2012/04/05/KinectSDK_Depth_Image_Processing_Part2.html


1,简单的景深影像处理

Kinect深度值最大为4096mm,0值通常表示深度值不能确定,一般应该将0值过滤掉。微软建议在开发中使用1220mm(4’)~3810mm(12.5’)范围内的值。在进行其他深度图像处理之前,应该使用阈值方法过滤深度数据至1220mm-3810mm这一范围内。

使用统计方法来处理深度影像数据是一个很常用的方法。阈值可以基于深度数据的平均值或者中值来确定。统计方法可以帮助确定某一点是否是噪声、阴影或者是其他比较有意义的物体,比如说用户的手的一部分。有时候如果不考虑像素的视觉意义,可以对原始深度进行数据挖掘。对景深数据处理的目的是进行形状或者物体的识别。通过这些信息,程序可以确定人体相对于Kinect的位置及动作。

直方图是统计数据分布的一个很有效的工具。在这里我们关心的是一个景深影像图中深度值的分布。直方图能够直观地反映给定数据集中数据的分布状况。从直方图中,我们能够看出深度值出现的频率以及聚集分组。通过这些信息,我们能够确定阈值以及其他能够用来对图像进行过滤的指标,使得能够最大化的揭示深度影像图中的深度信息

<Window x:Class="TestDepthHist.MainWindow"
        xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
        xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
        Title="MainWindow" Height="800" Width="1200" WindowStartupLocation="CenterScreen">
    <Grid>
        <StackPanel>
            <StackPanel Orientation="Horizontal">
                <Image x:Name="DepthImage" Width="640" Height="480" />
                <Image x:Name="FilteredDepthImage" Width="640" Height="480" />
            </StackPanel>
            <ScrollViewer Margin="0,15" HorizontalScrollBarVisibility="Auto" VerticalScrollBarVisibility="Auto">
                <StackPanel x:Name="DepthHistogram" Orientation="Horizontal" Height="300" />
            </ScrollViewer>
        </StackPanel>
    </Grid>
</Window>

上面用到了一些新的标签:

StackPanel元素用于水平或垂直堆叠子元素,StackPanel 要么垂直叠放包含的控件,要么将包含的控件排列在水平行中,具体情况取决于 Orientation 属性的值。 如果将比 StackPanel 的宽度能显示的控件还要多的控件添加到 StackPanel 中,这些控件将被截掉且不显示。

看来很有必要学一下 WPF 的控件 ~~

namespace TestDepthHist
{
    /// <summary>
    /// MainWindow.xaml 的交互逻辑
    /// </summary>
    public partial class MainWindow : Window
    {
        private KinectSensor kinect;
        private WriteableBitmap depthImageBitMap;
        private Int32Rect depthImageBitmapRect;
        private Int32 depthImageStride;
        private DepthImageFrame lastDepthFrame;
        private short[] depthPixelDate;

        private Int32 LoDepthThreshold = 0;
        private Int32 HiDepthThreshold = 3500;


        public KinectSensor Kinnect // 公共访问接口
        {
            get { return kinect; }
            set
            {

                if (kinect != null)
                {
                    UninitializeKinectSensor(this.kinect);  // 首先释放资源
                    kinect = null;
                }
                if (value != null && value.Status == KinectStatus.Connected)
                {
                    kinect = value;
                    InitializeKinectSensor(this.kinect);    // 将新连接状态的传感器赋值,初始化
                }
            }
        }

        public MainWindow()
        {
            InitializeComponent();
            this.Loaded += (s, e) => DiscoverKinectSensor();
            this.Unloaded += (s, e) => this.kinect = null;
        }

        private void DiscoverKinectSensor()
        {
            KinectSensor.KinectSensors.StatusChanged += new EventHandler<StatusChangedEventArgs>(KinectSensors_StatusChanged);
            this.Kinnect = KinectSensor.KinectSensors.FirstOrDefault(sensor => sensor.Status == KinectStatus.Connected);
        }

        void KinectSensors_StatusChanged(object sender, StatusChangedEventArgs e)
        {
            // 处理Kinect的插拔引起的状态改变
            switch (e.Status)
            {
                case KinectStatus.Connected:
                    if (this.kinect == null)
                        this.kinect = e.Sensor;
                    break;
                case KinectStatus.Disconnected:
                    if (this.kinect == e.Sensor)
                    {
                        this.kinect = null;
                        this.kinect = KinectSensor.KinectSensors.FirstOrDefault(x => x.Status == KinectStatus.Connected);
                        if (this.kinect == null)
                        {
                            //TODO:通知用于Kinect已拔出
                        }
                    }
                    break;
                //TODO:处理其他情况下的状态
            }
        }

        private void InitializeKinectSensor(KinectSensor kinectSensor)
        {
            if (kinectSensor != null)
            {
                DepthImageStream depthStream = kinectSensor.DepthStream;
                depthStream.Enable();

                depthImageBitMap = new WriteableBitmap(depthStream.FrameWidth, depthStream.FrameHeight, 96, 96,
                                                                            PixelFormats.Gray16, null);
                depthImageBitmapRect = new Int32Rect(0, 0, depthStream.FrameWidth, depthStream.FrameHeight);
                depthImageStride = depthStream.FrameWidth * depthStream.FrameBytesPerPixel;

                DepthImage.Source = depthImageBitMap;
                kinectSensor.DepthFrameReady += new EventHandler<DepthImageFrameReadyEventArgs>(KineceDevice_DepthFrameReady);
                kinectSensor.Start();
            }
        }

        private void UninitializeKinectSensor(KinectSensor kinect)
        {
            if (kinect != null)
            {
                kinect.Stop();  // 关闭此传感器
                kinect.DepthFrameReady -= new EventHandler<DepthImageFrameReadyEventArgs>(KineceDevice_DepthFrameReady);
            }
        }

        private void CreateDepthHistogram(DepthImageFrame depthFrame, short[] pixelData)
        {
            int depth;
            int[] depths = new int[4096];
            double chartBarWidth = Math.Max(3, DepthHistogram.ActualWidth / depths.Length);
            int maxValue = 0;


            DepthHistogram.Children.Clear();


            //计算并获取深度值.并统计每一个深度值出现的次数
            for (int i = 0; i < pixelData.Length; i++)
            {
                depth = pixelData[i] >> DepthImageFrame.PlayerIndexBitmaskWidth;

                if (depth >= LoDepthThreshold && depth <= HiDepthThreshold)
                {
                    depths[depth]++;
                }
            }


            //查找最大的深度值
            for (int i = 0; i < depths.Length; i++)
            {
                maxValue = Math.Max(maxValue, depths[i]);
            }


            //绘制直方图
            for (int i = 0; i < depths.Length; i++)
            {
                if (depths[i] > 0)
                {
                    Rectangle r = new Rectangle();
                    r.Fill = Brushes.Red;
                    r.Width = chartBarWidth;
                    r.Height = DepthHistogram.ActualHeight * (depths[i] / (double)maxValue);
                    r.Margin = new Thickness(1, 0, 1, 0);
                    r.VerticalAlignment = System.Windows.VerticalAlignment.Bottom;  // 此元素的垂直对齐特征
                    DepthHistogram.Children.Add(r);
                }
            }
        }

        private void CreateBetterShadesOfGray(DepthImageFrame depthFrame, short[] pixelData)
        {
            Int32 depth;
            Int32 gray;
            Int32 loThreashold = 0;
            Int32 bytePerPixel = 4; // 4个通道只用前面3个bgr
            Int32 hiThreshold = 3500;
            byte[] enhPixelData = new byte[depthFrame.Width * depthFrame.Height * bytePerPixel];
            for (int i = 0, j = 0; i < pixelData.Length; i++, j += bytePerPixel)
            {
                depth = pixelData[i] >> DepthImageFrame.PlayerIndexBitmaskWidth;
                if (depth < loThreashold || depth > hiThreshold)
                {
                    gray = 0xFF;
                }
                else
                {
                    gray = (255 * depth / 0xFFF);
                }
                enhPixelData[j] = (byte)gray;
                enhPixelData[j + 1] = (byte)gray;
                enhPixelData[j + 2] = (byte)gray;

            }
            DepthImage.Source = BitmapSource.Create(depthFrame.Width, depthFrame.Height, 96, 96, PixelFormats.Bgr32, null, enhPixelData, depthFrame.Width * bytePerPixel);
        }

        private void KineceDevice_DepthFrameReady(Object sender, DepthImageFrameReadyEventArgs e)
        {
            using (DepthImageFrame frame = e.OpenDepthImageFrame())
            {
                if (frame!=null)
                {
                    depthPixelDate = new short[frame.PixelDataLength];
                    frame.CopyPixelDataTo(this.depthPixelDate);
                    CreateBetterShadesOfGray(frame, this.depthPixelDate);
                    CreateDepthHistogram(frame,this.depthPixelDate);
                }
            }
        }
    }
}

很多情况下,基于Kinect的应用程序不会对深度数据进行很多处理。如果要处理数据,也应该使用一些类库诸如OpenCV库来处理这些数据。深度影像处理经常要耗费大量计算资源,不应该使用诸如C#这类的高级语言来进行影像处理。

Kinect SDK具有分析景深数据和探测人体或者游戏者轮廓的功能,它一次能够识别多达6个游戏者。SDK为每一个追踪到的游戏者编号作为索引。游戏者索引存储在深度数据的前3个位中。景深数据每一个像素占16位,0-2位存储游戏者索引值,3-15为存储深度值。7 (0000 0111)这个位掩码能够帮助我们从深度数据中获取到游戏者索引值。幸运的是,SDK为游戏者索引位定义了一些列常量。他们是DepthImageFrame.PlayerIndexBitmaskWidth和DepthImageFrame.PlayerIndexBitmask。前一个值是3,后一个是7。开发者应该使用SDK定义的常量而不应该硬编码3或者7。

image

游戏者索引位取值范围为0~6,值为0表示该像素不是游戏者。但是初始化了景深数据流并没有开启游戏者追踪。游戏者追踪需要依赖骨骼追踪技术。初始化KinectSensor对象和DepthImageStream对象时,需要同时初始化SkeletonStream对象。只有当SkeletonStream对象初始化了后,景深数据中才会有游戏者索引信息。获取游戏者索引信息并不需要注册SkeletonFrameReady事件。

不要对特定的游戏者索引位进行编码,因为他们是会变化的。实际的游戏者索引位并不总是和Kinect前面的游戏者编号一致。例如, Kinect视野中只有一个游戏者,但是返回的游戏者索引位值可能是3或者4。有时候第一个游戏者的游戏者索引位可能不是1,比如走进Kinect视野,返回的索引位是1,走出去后再次走进,可能索引位变为其他值了。所以开发Kinect应用程序的时候应该注意到这一点。

namespace TestDepthPlayer
{
    /// <summary>
    /// MainWindow.xaml 的交互逻辑
    /// </summary>
    public partial class MainWindow : Window
    {
        #region Member Variables
        private KinectSensor _KinectDevice;
        private WriteableBitmap _RawDepthImage;
        private Int32Rect _RawDepthImageRect;
        private short[] _RawDepthPixelData;
        private int _RawDepthImageStride;
        private WriteableBitmap _EnhDepthImage;
        private Int32Rect _EnhDepthImageRect;
        private short[] _EnhDepthPixelData;
        private int _EnhDepthImageStride;
        private int _TotalFrames;
        private DateTime _StartFrameTime;
        #endregion Member Variables


        #region Constructor
        public MainWindow()
        {
            InitializeComponent();

            KinectSensor.KinectSensors.StatusChanged += KinectSensors_StatusChanged;
            this.KinectDevice = KinectSensor.KinectSensors.FirstOrDefault(x => x.Status == KinectStatus.Connected);
        }
        #endregion Constructor


        #region Methods
        private void KinectSensors_StatusChanged(object sender, StatusChangedEventArgs e)
        {
            switch (e.Status)
            {
                case KinectStatus.Initializing:
                case KinectStatus.Connected:
                case KinectStatus.NotPowered:
                case KinectStatus.NotReady:
                case KinectStatus.DeviceNotGenuine:
                    this.KinectDevice = e.Sensor;
                    break;
                case KinectStatus.Disconnected:
                    //TODO: Give the user feedback to plug-in a Kinect device.                    
                    this.KinectDevice = null;
                    break;
                default:
                    //TODO: Show an error state
                    break;
            }
        }


        private void KinectDevice_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e)
        {
            using (DepthImageFrame frame = e.OpenDepthImageFrame())
            {
                if (frame != null)
                {
                    frame.CopyPixelDataTo(this._RawDepthPixelData);
                    this._RawDepthImage.WritePixels(this._RawDepthImageRect, this._RawDepthPixelData, this._RawDepthImageStride, 0);
                    CreatePlayerDepthImage(frame, this._RawDepthPixelData);
                }
            }


            //FramesPerSecondElement.Text = string.Format("{0:0} fps", (this._TotalFrames++ / DateTime.Now.Subtract(this._StartFrameTime).TotalSeconds));
        }


        private void CreatePlayerDepthImage(DepthImageFrame depthFrame, short[] pixelData)
        {
            int playerIndex;
            int depthBytePerPixel = 4;
            byte[] enhPixelData = new byte[depthFrame.Width * depthFrame.Height * depthBytePerPixel];


            for (int i = 0, j = 0; i < pixelData.Length; i++, j += depthBytePerPixel)
            {
                playerIndex = pixelData[i] & DepthImageFrame.PlayerIndexBitmask;

                if (playerIndex == 0)
                {
                    enhPixelData[j] = 0xFF;
                    enhPixelData[j + 1] = 0xFF;
                    enhPixelData[j + 2] = 0xFF;
                }
                else
                {
                    enhPixelData[j] = 0x00;
                    enhPixelData[j + 1] = 0x00;
                    enhPixelData[j + 2] = 0x00;
                }
            }


            this._EnhDepthImage.WritePixels(this._EnhDepthImageRect, enhPixelData, this._EnhDepthImageStride, 0);
        }
        #endregion Methods


        #region Properties
        public KinectSensor KinectDevice
        {
            get { return this._KinectDevice; }
            set
            {
                if (this._KinectDevice != value)
                {
                    //Uninitialize
                    if (this._KinectDevice != null)
                    {
                        this._KinectDevice.Stop();
                        this._KinectDevice.DepthFrameReady -= KinectDevice_DepthFrameReady;
                        this._KinectDevice.DepthStream.Disable();
                        this._KinectDevice.SkeletonStream.Disable();    // 要获取用户数据必须

                        this.RawDepthImage.Source = null;
                        this.EnhDepthImage.Source = null;
                    }

                    this._KinectDevice = value;

                    //Initialize
                    if (this._KinectDevice != null)
                    {
                        if (this._KinectDevice.Status == KinectStatus.Connected)
                        {
                            this._KinectDevice.SkeletonStream.Enable();
                            this._KinectDevice.DepthStream.Enable();

                            DepthImageStream depthStream = this._KinectDevice.DepthStream;
                            this._RawDepthImage = new WriteableBitmap(depthStream.FrameWidth, depthStream.FrameHeight, 96, 96, PixelFormats.Gray16, null);
                            this._RawDepthImageRect = new Int32Rect(0, 0, (int)Math.Ceiling(this._RawDepthImage.Width), (int)Math.Ceiling(this._RawDepthImage.Height));
                            this._RawDepthImageStride = depthStream.FrameWidth * depthStream.FrameBytesPerPixel;
                            this._RawDepthPixelData = new short[depthStream.FramePixelDataLength];
                            this.RawDepthImage.Source = this._RawDepthImage;    // 此次关联 WriteableBitmap,然后调用WritePixels 即可显示 —— 内存利用率高

                            this._EnhDepthImage = new WriteableBitmap(depthStream.FrameWidth, depthStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null);
                            this._EnhDepthImageRect = new Int32Rect(0, 0, (int)Math.Ceiling(this._EnhDepthImage.Width), (int)Math.Ceiling(this._EnhDepthImage.Height));
                            this._EnhDepthImageStride = depthStream.FrameWidth * 4;
                            this._EnhDepthPixelData = new short[depthStream.FramePixelDataLength];
                            this.EnhDepthImage.Source = this._EnhDepthImage;


                            this._KinectDevice.DepthFrameReady += KinectDevice_DepthFrameReady; // 事件监听器绑定
                            this._KinectDevice.Start();

                            this._StartFrameTime = DateTime.Now;
                        }
                    }
                }
            }
        }
        #endregion Properties
    }

}

关键点总结:

要 SkeletonStream.Enable();

WriteableBitmap 的用法,其他都没啥,很简单的逻辑


对物体进行测量

像素点的X,Y位置和实际的宽度和高度并不一致。但是运用几何知识,通过他们对物体进行测量是可能的。每一个摄像机都有视场,焦距的长度和相机传感器的大小决定了视场角。Kinect中相机的水平和垂直视场角分别为57°和43°。既然我们知道了深度值,利用三角几何知识,就可以计算出物体的实际宽度。

image

摄像头的视场角是一个以人体深度位置为底的一个等腰三角形。人体的实际深度值是这个等腰三角形的高。可以将这个等腰三角形以人所在的位置分为两个直角三角形,这样就可以计算出底边的长度。一旦知道了底边的长度,我们就可以将像素的宽度转换为现实中的宽度。例如:如果我们计算出等腰三角形底边的宽度为1500mm,游戏者所占有的总象元的宽度为100,深度影像数据的总象元宽度为320。那么游戏者实际的宽度为468.75mm((1500/320)*100)。公式中,我们需要知道游戏者的深度值和游戏者占用的总的象元宽度。我们可以将游戏者所在的象元的深度值取平均值作为游戏者的深度值。

计算人物高度也是类似的原理,只不过使用的垂直视场角和深度影像的高度。

机智的相似三角形

用到了新的控件:

<Grid>
        <StackPanel Orientation="Horizontal">
            <Image x:Name="DepthImage"/>
            <ItemsControl x:Name="PlayerDepthData" Width="300" TextElement.FontSize="20">
                <ItemsControl.ItemTemplate>
                    <DataTemplate>
                        <StackPanel Margin="0,15">
                            <StackPanel Orientation="Horizontal">
                                <TextBlock Text="PlayerId:" />
                                <TextBlock Text="{Binding Path=PlayerId}" />
                            </StackPanel>
                            <StackPanel Orientation="Horizontal">
                                <TextBlock Text="Width:" />
                                <TextBlock Text="{Binding Path=RealWidth}" />
                            </StackPanel>
                            <StackPanel Orientation="Horizontal">
                                <TextBlock Text="Height:" />
                                <TextBlock Text="{Binding Path=RealHeight}" />
                            </StackPanel>
                        </StackPanel>
                    </DataTemplate>
                </ItemsControl.ItemTemplate>
            </ItemsControl>
        </StackPanel>
    </Grid>
namespace TestDepthMeasure
{
    /// <summary>
    /// MainWindow.xaml 的交互逻辑
    /// </summary>
    public partial class MainWindow : Window
    {
        private KinectSensor _KinectDevice;
        private WriteableBitmap _DepthImage;
        private Int32Rect _DepthImageRect;
        private short[] _DepthPixelData;
        private int _DepthImageStride;
        private int _TotalFrames;
        private DateTime _StartFrameTime;

        public KinectSensor kinectDevice
        {
            get { return this._KinectDevice; }
            set
            {
                if (this._KinectDevice!=value)
                {
                    if (this._KinectDevice != null)
                    {
                        this._KinectDevice.Stop();
                        this._KinectDevice.DepthFrameReady -= KinectDevice_DepthFrameReady;
                        this._KinectDevice.DepthStream.Disable();
                        this._KinectDevice.SkeletonStream.Disable();
                    }

                    this._KinectDevice = value;
                    if (this._KinectDevice!=null&& this._KinectDevice.Status==KinectStatus.Connected)
                    {
                        this._KinectDevice.SkeletonStream.Enable();
                        this._KinectDevice.DepthStream.Enable();

                        DepthImageStream depthStream = this._KinectDevice.DepthStream;
                        this._DepthImage = new WriteableBitmap(depthStream.FrameWidth, depthStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null);
                        this._DepthImageRect = new Int32Rect(0, 0, (int)Math.Ceiling(this._DepthImage.Width), (int)Math.Ceiling(this._DepthImage.Height));
                        this._DepthImageStride = depthStream.FrameWidth * 4;
                        this._DepthPixelData = new short[depthStream.FramePixelDataLength];
                        this.DepthImage.Source = this._DepthImage;  // 关联

                        this._KinectDevice.DepthFrameReady += KinectDevice_DepthFrameReady;
                        this._KinectDevice.Start();

                        this._StartFrameTime = DateTime.Now;
                    }
                }
            }
        }


        private void CreateBetterShadesOfGray(DepthImageFrame depthFrame, short[] pixelData)
        {
            int depth;
            int gray;
            int bytesPerPixel = 4;
            byte[] enPixelData=new byte[depthFrame.Width*depthFrame.Height*bytesPerPixel];
            int loThreshold = 1220;
            int hiThreshold = 3048;

            for (int i = 0, j = 0; i < pixelData.Length;i++,j+=bytesPerPixel )
            {
                depth = pixelData[i] >> DepthImageFrame.PlayerIndexBitmaskWidth;
                if (depth<loThreshold||depth>hiThreshold)
                {
                    gray = 0xFF;
                } 
                else
                {
                    gray = 255 - (255 * depth / 0xFFF);
                }

                enPixelData[j] = (byte)gray;
                enPixelData[j + 1] = (byte)gray;
                enPixelData[j + 2] = (byte)gray;
            }
            this._DepthImage.WritePixels(this._DepthImageRect, enPixelData, this._DepthImageStride, 0);
        }

        private void KinectDevice_DepthFrameReady(Object sender, DepthImageFrameReadyEventArgs e)
        {
            using (DepthImageFrame frame = e.OpenDepthImageFrame())
            {
                if (frame!=null)
                {
                    frame.CopyPixelDataTo(this._DepthPixelData);
                    CreateBetterShadesOfGray(frame, this._DepthPixelData);
                    CalculatePlayerSize(frame, this._DepthPixelData);
                    
                }
            }
        }
        
        private void KinectSensors_statusChanged(Object sender, StatusChangedEventArgs e)
        {
            switch (e.Status)
            {
                case KinectStatus.Initializing:
                case KinectStatus.Connected:
                case KinectStatus.NotPowered:
                case KinectStatus.NotReady:
                case KinectStatus.DeviceNotGenuine:
                    this._KinectDevice = e.Sensor;
                    break;
                case KinectStatus.Disconnected:
                    this._KinectDevice = null;
                    break;
                default:
                    break;
            }
        }

        private void CalculatePlayerSize(DepthImageFrame depthFrame, short[] pixelData)
        {
            int depth;
            int playerIndex;
            int pixelIndex;
            int bytesPerPixel = depthFrame.BytesPerPixel;
            PlayerDepthData[] players = new PlayerDepthData[6];


            for (int row = 0; row < depthFrame.Height; row++)
            {
                for (int col = 0; col < depthFrame.Width; col++)
                {
                    pixelIndex = col + (row * depthFrame.Width);
                    depth = pixelData[pixelIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth;

                    if (depth != 0)
                    {
                        playerIndex = (pixelData[pixelIndex] & DepthImageFrame.PlayerIndexBitmask) - 1;

                        if (playerIndex > -1)
                        {
                            if (players[playerIndex] == null)
                            {
                                players[playerIndex] = new PlayerDepthData(playerIndex + 1, depthFrame.Width, depthFrame.Height);
                            }

                            players[playerIndex].UpdateData(col, row, depth);
                        }
                    }
                }
            }


            PlayerDepthData.ItemsSource = players;
        }

        public MainWindow()
        {
            InitializeComponent();

            KinectSensor.KinectSensors.StatusChanged += KinectSensors_statusChanged;
            this.kinectDevice = KinectSensor.KinectSensors.FirstOrDefault(x => x.Status == KinectStatus.Connected);
        }
    }
}
namespace TestDepthMeasure
{
    class PlayerDepthData
    {
        #region Member Variables
        private const double MillimetersPerInch = 0.0393700787;
        private static readonly double HorizontalTanA = Math.Tan(57.0 / 2.0 * Math.PI / 180);
        private static readonly double VerticalTanA = Math.Abs(Math.Tan(43.0 / 2.0 * Math.PI / 180));

        private int _DepthSum;
        private int _DepthCount;
        private int _LoWidth;
        private int _HiWidth;
        private int _LoHeight;
        private int _HiHeight;
        #endregion Member Variables


        #region Constructor
        public PlayerDepthData(int playerId, double frameWidth, double frameHeight)
        {
            this.PlayerId = playerId;
            this.FrameWidth = frameWidth;
            this.FrameHeight = frameHeight;


            this._LoWidth = int.MaxValue;
            this._HiWidth = int.MinValue;

            this._LoHeight = int.MaxValue;
            this._HiHeight = int.MinValue;
        }
        #endregion Constructor


        #region Methods
        public void UpdateData(int x, int y, int depth)
        {
            this._DepthCount++;
            this._DepthSum += depth;
            this._LoWidth = Math.Min(this._LoWidth, x);
            this._HiWidth = Math.Max(this._HiWidth, x);
            this._LoHeight = Math.Min(this._LoHeight, y);
            this._HiHeight = Math.Max(this._HiHeight, y);
        }
        #endregion Methods


        #region Properties
        public int PlayerId { get; private set; }
        public double FrameWidth { get; private set; }
        public double FrameHeight { get; private set; }


        public double Depth
        {
            get { return this._DepthSum / (double)this._DepthCount; }
        }


        public int PixelWidth
        {
            get { return this._HiWidth - this._LoWidth; }
        }


        public int PixelHeight
        {
            get { return this._HiHeight - this._LoHeight; }
        }


        public string RealWidth
        {
            get
            {
                double inches = this.RealWidthInches;
                //int feet = (int)(inches / 12);
                //inches %= 12;
                return string.Format("{0:0.0}mm", inches * 25.4);
                //return string.Format("{0:0.0}mm~{1}'{2:0.0}"", inches * 25.4, feet, inches);
            }
        }


        public string RealHeight
        {
            get
            {
                double inches = this.RealHeightInches;
                //int feet = (int)(inches / 12);
                //inches %= 12;
                return string.Format("{0:0.0}mm", inches * 25.4);
                //return string.Format("{0:0.0}mm~{1}'{2:0.0}"", inches * 25.4, feet, inches);
            }
        }


        public double RealWidthInches
        {
            get
            {
                double opposite = this.Depth * HorizontalTanA;
                return this.PixelWidth * 2 * opposite / this.FrameWidth * MillimetersPerInch;
            }
        }

        public double RealHeightInches
        {
            get
            {
                double opposite = this.Depth * VerticalTanA;
                return this.PixelHeight * 2 * opposite / this.FrameHeight * MillimetersPerInch;
            }
        }
        #endregion Properties
    }
}

深度值图像和视频图像的叠加

这个需要深度图像和RGB图像的对应关系了

用深度数据中游戏者所属的象元获取对应的彩色影像数据并叠加到视频图像中。这在电视制作和电影制作中很常见,这种技术叫做绿屏抠像,就是演员或者播音员站在绿色底板前,然后录完节目后,绿色背景抠出,换成其他场景,在一些科幻电影中演员不可能在实景中表演时常采用的造景手法。我们平常照证件照时,背景通常是蓝色或者红色,这样也是便于选取背景颜色方便抠图的缘故。

景深数据影像的象元不能转换到彩色影像中去,即使两者使用相同的分辨率。因为这两个摄像机位于Kinect上的不同位置,所以产生的影像不能够叠加到一起。就像人的两只眼睛一样,当你只睁开左眼看到的景象和只睁开右眼看到的景象是不一样的,人脑将这两只眼睛看到的景物融合成一幅合成的景象。

MapDepthToColorImagePoint,MapDepthToSkeletonPoint,MapSkeletonPointToColor和MapSkeletonPointToDepth。在DepthImageFrame对象中这些方法的名字有点不同(MapFromSkeletonPoint,MapToColorImagePoint及MapToSkeletonPoint),但功能是相似的。在下面的例子中,我们使用MapDepthToColorImagePoint方法来将景深影像中游戏者所属的象元转换到对应的彩色影像中去。细心的读者可能会发现,没有一个方法能够将彩色影像中的象元转换到对应的景深影像中去。

创建一个新的工程,添加两个Image对象。第一个Image是背景图片。第二个Image是前景图像。在这个例子中,为了使景深影像和彩色影像尽可能的接近,我们采用轮询的方式。每一个影像都有一个Timestamp对象,我们通过比较数据帧的这个值来确定他们是否足够近。注册KinectSensor对象的AllFrameReady事件,并不能保证不同数据流产生的数据帧时同步的。这些帧不可能同时产生,但是轮询模式能够使得不同数据源产生的帧能够尽可能的够近。 —— 像素之间不对应,帧流不一致!!(所以不能用AllFrameReady)

namespace DepthGreenScreen
{
    
    public partial class MainWindow : Window
    {
        private KinectSensor _KinectDevice;
        private WriteableBitmap _GreenScreenImage;
        private Int32Rect _GreenScreenImageRect;
        private int _GreenScreenImageStride;
        private short[] _DepthPixelData;
        private byte[] _ColorPixelData;
            // 拉的方式获取数据,原因:像素不对应,帧流不对应


        public KinectSensor KinectDevice
        {
            get { return this._KinectDevice; }
            set
            {
                if (this._KinectDevice != value)
                {
                    //Uninitialize
                    if (this._KinectDevice != null)
                    {
                        UninitializeKinectSensor(this._KinectDevice);
                        this._KinectDevice = null;
                    }

                    this._KinectDevice = value;

                    //Initialize
                    if (this._KinectDevice != null)
                    {
                        if (this._KinectDevice.Status == KinectStatus.Connected)
                        {
                            InitializeKinectSensor(this._KinectDevice);
                        }
                    }
                }
            }
        }

        public MainWindow()
        {
            InitializeComponent();
            CompositionTarget.Rendering += CompositionTarget_Rendering;
        }

        private void UninitializeKinectSensor(KinectSensor sensor)
        {
            if (sensor != null)
            {
                sensor.Stop();
                sensor.ColorStream.Disable();
                sensor.DepthStream.Disable();
                sensor.SkeletonStream.Disable();
            }
        }

        private void InitializeKinectSensor(KinectSensor sensor)
        {
            if (sensor!=null)
            {
                sensor.DepthStream.Range = DepthRange.Default;
                sensor.SkeletonStream.Enable(); // 分割出人形所必需
                sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); // 指定格式
                sensor.ColorStream.Enable(ColorImageFormat.RgbResolution1280x960Fps12);

                DepthImageStream depthStream = sensor.DepthStream;
                this._GreenScreenImage = new WriteableBitmap(depthStream.FrameWidth, depthStream.FrameHeight, 96, 96, PixelFormats.Bgra32, null);
                this._GreenScreenImageRect = new Int32Rect(0,0,(int)Math.Ceiling(this._GreenScreenImage.Width),(int)Math.Ceiling(this._GreenScreenImage.Height));
                this._GreenScreenImageStride = depthStream.FrameWidth * 4;
                this.GreenScreenImage.Source = this._GreenScreenImage;

                this._DepthPixelData = new short[this._KinectDevice.DepthStream.FramePixelDataLength];
                this._ColorPixelData = new byte[this._KinectDevice.ColorStream.FramePixelDataLength];

                sensor.Start();
            }
        }

        private void DiscoverKinect()
        {
            if (this._KinectDevice==null)
            {
                this.KinectDevice = KinectSensor.KinectSensors.FirstOrDefault(x => x.Status == KinectStatus.Connected);

                if (this._KinectDevice!=null)
                {
                    InitializeKinectSensor(this._KinectDevice);
                }
            }
        }

        private void RenderGreenScreen(KinectSensor kinectDevice,ColorImageFrame colorFrame,DepthImageFrame depthFrame)
        {
            if (kinectDevice != null && depthFrame!=null && colorFrame !=null)
            {
                int depthPixelIndex;
                int playerIndex;
                int colorPixelIndex;
                ColorImagePoint colorPoint;
                int colorStride = colorFrame.BytesPerPixel * colorFrame.Width;
                int bytesPerPixel = 4;
                byte[] playerImage= new byte[depthFrame.Height * this._GreenScreenImageStride];
                int playerImageIndex = 0;

                depthFrame.CopyPixelDataTo(this._DepthPixelData);
                colorFrame.CopyPixelDataTo(this._ColorPixelData);

                for (int depthY = 0; depthY < depthFrame.Height;depthY++ )
                {
                    for (int depthX = 0; depthX < depthFrame.Width;depthX++,playerImageIndex+=bytesPerPixel )
                    {
                        depthPixelIndex = depthX + (depthY * depthFrame.Width);
                        playerIndex = this._DepthPixelData[depthPixelIndex] & DepthImageFrame.PlayerIndexBitmask;

                        if (playerIndex != 0)
                        {
                            colorPoint = kinectDevice.MapDepthToColorImagePoint(depthFrame.Format, depthX, depthY, this._DepthPixelData[depthPixelIndex], colorFrame.Format);
                            colorPixelIndex = (colorPoint.X * colorFrame.BytesPerPixel) + (colorPoint.Y * colorStride);

                            playerImage[playerImageIndex] = this._ColorPixelData[colorPixelIndex];         //Blue    
                            playerImage[playerImageIndex + 1] = this._ColorPixelData[colorPixelIndex + 1];     //Green
                            playerImage[playerImageIndex + 2] = this._ColorPixelData[colorPixelIndex + 2];     //Red
                            playerImage[playerImageIndex + 3] = 0xFF;  
                        }
                    }
                }

                this._GreenScreenImage.WritePixels(this._GreenScreenImageRect, playerImage, this._GreenScreenImageStride, 0);
            }

        }
        private void CompositionTarget_Rendering(Object sender, EventArgs e)
        {
            DiscoverKinect();

            if (this._KinectDevice!=null)
            {
                try
                {
                    using (ColorImageFrame colorFrame = this._KinectDevice.ColorStream.OpenNextFrame(100))
                    {
                        using (DepthImageFrame depthFrame = this._KinectDevice.DepthStream.OpenNextFrame(100))
                        {
                            RenderGreenScreen(this.KinectDevice, colorFrame, depthFrame);
                        }
                    }
                }
                catch (System.Exception ex)
                {
                    // do nothing
                }
            }
        }
    }
}
原文地址:https://www.cnblogs.com/sprint1989/p/3838518.html