wxApp
)wxFrame
)wxWindow
, which will display the image from the Kinect sensor)#ifndef _KINECTTESTAPP_H_
#define _KINECTTESTAPP_H_
#include "wx/image.h"
#include "KinectTestMainFrame.h"
class KinectTestApp: public wxApp
{
DECLARE_CLASS( KinectTestApp )
DECLARE_EVENT_TABLE()
public :
KinectTestApp();
void Init();
virtual bool OnInit();
virtual int OnExit();
};
DECLARE_APP(KinectTestApp)
#endif
...
bool KinectTestApp::OnInit()
{
#if wxUSE_LIBPNG
wxImage::AddHandler( new wxPNGHandler);
#endif
#if wxUSE_LIBJPEG
wxImage::AddHandler( new wxJPEGHandler);
#endif
KinectTestMainFrame* mainWindow = new KinectTestMainFrame( NULL );
mainWindow->Show( true );
return true ;
}
class KinectTestMainFrame: public wxFrame, public wxThreadHelper
{
DECLARE_CLASS( KinectTestMainFrame )
DECLARE_EVENT_TABLE()
public :
KinectTestMainFrame();
KinectTestMainFrame( wxWindow* parent,
wxWindowID id = SYMBOL_KINECTTESTMAINFRAME_IDNAME,
const wxString& caption = SYMBOL_KINECTTESTMAINFRAME_TITLE,
const wxPoint& pos = SYMBOL_KINECTTESTMAINFRAME_POSITION,
const wxSize& size = SYMBOL_KINECTTESTMAINFRAME_SIZE,
long style = SYMBOL_KINECTTESTMAINFRAME_STYLE );
bool Create( wxWindow* parent,
wxWindowID id = SYMBOL_KINECTTESTMAINFRAME_IDNAME,
const wxString& caption = SYMBOL_KINECTTESTMAINFRAME_TITLE,
const wxPoint& pos = SYMBOL_KINECTTESTMAINFRAME_POSITION,
const wxSize& size = SYMBOL_KINECTTESTMAINFRAME_SIZE,
long style = SYMBOL_KINECTTESTMAINFRAME_STYLE );
~KinectTestMainFrame();
void Init();
void CreateControls();
wxBitmap GetBitmapResource( const wxString& name );
wxIcon GetIconResource( const wxString& name );
virtual wxThread::ExitCode Entry();
wxGridBagSizer* m_MainSizer;
wxListBox* m_DeviceListBox;
KinectCanvas* m_Canvas;
};
#endif
...
void KinectTestMainFrame::CreateControls()
{
KinectTestMainFrame* itemFrame1 = this ;
m_MainSizer = new wxGridBagSizer(0, 0);
m_MainSizer->SetEmptyCellSize(wxSize(10, 20));
itemFrame1->SetSizer(m_MainSizer);
wxArrayString m_DeviceListBoxStrings;
m_DeviceListBox = new wxListBox( itemFrame1,
ID_DEVICE_LISTBOX, wxDefaultPosition,
wxDefaultSize, m_DeviceListBoxStrings,
wxLB_SINGLE );
m_MainSizer->Add(m_DeviceListBox,
wxGBPosition(0, 0), wxGBSpan(1, 1),
wxGROW|wxGROW|wxALL, 5);
m_Canvas = new KinectCanvas( itemFrame1,
ID_KINECT_CANVAS, wxDefaultPosition,
wxSize(320, 240), wxSIMPLE_BORDER );
m_MainSizer->Add(m_Canvas, wxGBPosition(0, 1),
wxGBSpan(1, 1), wxALIGN_CENTER_HORIZONTAL|
wxALIGN_CENTER_VERTICAL|wxALL, 5);
m_MainSizer->AddGrowableCol(1);
m_MainSizer->AddGrowableRow(0);
}
...
wxThread::ExitCode KinectTestMainFrame::Entry()
{
return NULL;
}
...
class KinectCanvas: public wxWindow
{
DECLARE_DYNAMIC_CLASS( KinectCanvas )
DECLARE_EVENT_TABLE()
public :
KinectCanvas();
KinectCanvas(wxWindow* parent,
wxWindowID id = ID_KINECTCANVAS,
const wxPoint& pos = wxDefaultPosition,
const wxSize& size = wxSize(100, 100),
long style = wxSIMPLE_BORDER);
bool Create(wxWindow* parent,
wxWindowID id = ID_KINECTCANVAS,
const wxPoint& pos = wxDefaultPosition,
const wxSize& size = wxSize(100, 100),
long style = wxSIMPLE_BORDER);
~KinectCanvas();
void Init();
void CreateControls();
void OnPaint( wxPaintEvent& event );
wxImage * GetCurrentImage() const { return m_CurrentImage ; }
void SetCurrentImage(wxImage * value ) { m_CurrentImage = value ; }
wxBitmap GetBitmapResource( const wxString& name );
wxIcon GetIconResource( const wxString& name );
wxImage * m_CurrentImage;
};
#endif
...
IMPLEMENT_DYNAMIC_CLASS( KinectCanvas, wxWindow )
BEGIN_EVENT_TABLE( KinectCanvas, wxWindow )
EVT_PAINT( KinectCanvas::OnPaint )
END_EVENT_TABLE()
...
void KinectCanvas::OnPaint( wxPaintEvent& event )
{
wxAutoBufferedPaintDC dc( this );
if (!m_CurrentImage)
{
dc.SetBackground(wxBrush(GetBackgroundColour(), wxSOLID));
dc.Clear();
dc.DrawLabel(_( "No image" ), wxRect(dc.GetSize()),
wxALIGN_CENTER_HORIZONTAL|wxALIGN_CENTER_VERTICAL);
}
else
{
wxBitmap bmp(*m_CurrentImage);
dc.DrawBitmap(bmp,
(dc.GetSize().GetWidth()-bmp.GetWidth())/2,
(dc.GetSize().GetHeight()-bmp.GetHeight())/2);
}
}
GetIconResource()
, GetBitmapResource()
, Init()
). All this is because the DialogBlocks form designer was used to create the application framework. This is a paid tool, but the functionality of the trial version is enough to create our application.WXUSINGDLL
macro. This macro is also used when building wxWidgets dynamic libraries, and as a result, the settings of our project and wxWidgets will be the same (Fig. 3).wxUSE_NO_MANIFEST=1
to the preprocessor directives in the resource compiler settings. This is necessary in order to avoid conflicts of the manifest specified in the wxWidgets resource file ( % WXWIN% / include / msw / wx.rc ) and the manifest that Visual Studio automatically adds to the application.MSR_NuiGetDeviceCount()
function, which takes a pointer to an integer variable as a parameter, which, if successfully executed, will record the number of available sensors:NUIAPI HRESULT MSR_NuiGetDeviceCount(
int * pCount
);
INuiInstance::MSR_NuiGetPropsBlob()
method. This method takes as parameters:INDEX_UNIQUE_DEVICE_NAME
)virtual bool MSR_NuiGetPropsBlob(
MsrNui::NUI_PROPSINDEX Index,
void * pBlob,
DWORD * pdwInOutSize
);
#pragma once
#include <vector>
interface INuiInstance;
class KinectHelper
{
protected :
typedef std::pair<INuiInstance *, HANDLE> InstanceInfo;
typedef std::vector<InstanceInfo> InstanceVector;
public :
KinectHelper();
virtual ~KinectHelper();
size_t GetDeviceCount();
wxString GetDeviceName(size_t index);
bool IsDeviceOK(size_t deviceIndex);
protected :
InstanceVector m_Instances;
void Finalize();
InstanceInfo * GetInstanceByIndex(size_t index);
};
#include <wx/wx.h>
#include "msr_nuiapi.h"
#include "KinectHelper.h"
KinectHelper::KinectHelper()
{
}
KinectHelper::~KinectHelper()
{
Finalize();
}
size_t KinectHelper::GetDeviceCount()
{
int result(0);
if (FAILED(MSR_NUIGetDeviceCount(&result))) return 0;
return (size_t)result;
}
KinectHelper::InstanceInfo * KinectHelper::GetInstanceByIndex(size_t index)
{
INuiInstance * instance = NULL;
for (InstanceVector::iterator i = m_Instances.begin();
i != m_Instances.end(); i++)
{
instance = (*i).first;
if (instance->InstanceIndex() == ( int )index) return &(*i);
}
if (!instance)
{
if (!FAILED(MSR_NuiCreateInstanceByIndex(( int )index, &instance)))
{
InstanceInfo info;
info.first = instance;
info.second = NULL;
m_Instances.push_back(info);
return &(m_Instances.at(m_Instances.size()-1));
}
}
return NULL;
}
void KinectHelper::Finalize()
{
for (InstanceVector::const_iterator i = m_Instances.begin();
i != m_Instances.end(); i++)
{
if ((*i).first && (*i).second)
{
(*i).first->NuiShutdown();
MSR_NuiDestroyInstance((*i).first);
}
}
}
wxString KinectHelper::GetDeviceName(size_t index)
{
BSTR result;
DWORD size;
InstanceInfo * info = GetInstanceByIndex(index);
if (info != NULL)
{
INuiInstance * instance = info->first;
if (instance != NULL && instance->MSR_NuiGetPropsBlob(
MsrNui::INDEX_UNIQUE_DEVICE_NAME, &result, &size))
{
wxString name = result;
SysFreeString(result);
return name;
}
}
return wxT( "Unknown Kinect Sensor" );
}
bool KinectHelper::IsDeviceOK(size_t deviceIndex)
{
return GetInstanceByIndex(deviceIndex) != NULL;
}
InstanceInfo
structure contains a pointer to an INuiInstance
instance, with which we can get the device name, as well as a handle to the stream in which the image is captured (which will be discussed later).wxKinectHelper
class contains a vector of InstanceInfo
structures and methods for getting the number of devices, as well as the name of each device. In the wxKinectHelper
class wxKinectHelper
, the Finalize()
method is called, which closes all open image capture streams and then deletes all instances of INuiInstance
....
class KinectTestMainFrame: public wxFrame, public wxThreadHelper
{
...
void ShowDevices();
...
KinectHelper * m_KinectHelper;
}
...
...
void KinectTestMainFrame::ShowDevices()
{
size_t count = m_KinectHelper->GetDeviceCount();
m_DeviceListBox->Clear();
for (size_t i = 0; i < count; ++i)
{
int item = m_DeviceListBox->Append(
m_KinectHelper->GetDeviceName(i));
m_DeviceListBox->SetClientData(item, ( void *)i);
}
}
INuiInstance::NuiInitialize()
method, which takes a bit mask as a parameter that describes the list of subsystems of the device that we plan to use (depth sensor, camera, or search for players in the video).HRESULT NuiInitialize(
DWORD dwFlags,
);
INuiInstance:: NuiImageStreamOpen()
, which takes as parameters:NUI_IMAGE_STREAM_FRAME_LIMIT_MAXIMUM
is currently 4)NULL
, then the capture stream may not start)INuiInstance::NuiShutdown()
method, and after you finish working with the INuiInstance
instance, INuiInstance
need to free the memory using the MSR_NuiDestroyInstance()
function, in which parameter you must pass a pointer to an INuiInstance
object.INuiInstance:: NuiImageStreamOpen()
method and, as the first parameter, pass a value containing the NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX
or NUI_IMAGE_TYPE_DEPTH
. The buffer that was most suitable for subsequent processing was obtained using the NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX
flag. In the source code, such a call would look like this:if (FAILED(info->first->NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX,
NUI_IMAGE_RESOLUTION_320x240, 0,
3,
hDepthFrameEvent,
&info->second))) { /* Handle error here */ }
info->second
will be a handle to the image capture stream. hDepthFrameEvent
can create an hDepthFrameEvent
event hDepthFrameEvent
using the CreateEvent()
function.hDepthFrameEvent
event will be triggered. Waiting for this event can be implemented using the WaitForMultipleObjects()
function or WaitForSingleObject()
function.NuiImageStreamGetNextFrame()
method, which needs to be passed as parameters:virtual HRESULT NuiImageStreamGetNextFrame(
_In_ HANDLE hStream,
_In_ DWORD dwMillisecondsToWait,
_Deref_out_ CONST NUI_IMAGE_FRAME **ppcImageFrame
);
NUI_IMAGE_FRAME
we are currently most interested in the NuiImageBuffer *pFrameTexture
.LockRect()
method. The LockRect()
method LockRect()
four parameters, of which two are used in the beta version of the API.KINECT_LOCKED_RECT
structure, in which, after successful completion of the function, the data for working with the buffer will be written. We pass NULL
as the third parameter, and 0 as the fourth.STDMETHODIMP LockRect(
UINT Level,
KINECT_LOCKED_RECT* pLockedRect,
CONST RECT* pRectUsuallyNull,
DWORD Flags
);
KINECT_LOCKED_RECT
structure KINECT_LOCKED_RECT
we are interested in the pBits
field, which contains the depth data itself. For each pixel of the image in the buffer 2 bytes are allocated. Judging by the FAQ on the official forum , the data format is as follows:NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX
flag, 12 lower bits are reserved for the depth value and the remaining 3 bits are used for the player index, the upper bit is not used.NUI_INITIALIZE_FLAG_USES_DEPTH
flag, 12 lower bits are allocated for the depth value, the rest are not used.USHORT RealDepth = (s & 0xfff8) >> 3;
BYTE l = 255 - (BYTE)(256*RealDepth/0x0fff);
RGBQUAD q;
q.rgbRed = q.rgbBlue = q.rgbGreen = l;
return q;
NuiImageStreamReleaseFrame()
method, which takes a stream handle and a pointer to an instance of NUI_IMAGE_FRAME
as parameters.NuiInitialize()
method.NuiImageStreamOpen()
method.NuiImageStreamOpen()
.NuiImageStreamGetNextFrame()
method.NuiImageBuffer::LockRect()
method.NuiImageStreamReleaseFrame()
method.NuiShutdown()
method.class KinectHelper
{
...
const wxSize & GetFrameSize();
BYTE * CreateDataBuffer();
void FreeDataBuffer(BYTE * data);
size_t GetDataBufferLength();
bool StartGrabbing(size_t deviceIndex, HANDLE hDepthFrameEvent);
bool ReadKinectFrame(size_t deviceIndex, BYTE * data);
bool IsDeviceOK(size_t deviceIndex);
bool IsGrabbingStarted(size_t deviceIndex);
static RGBQUAD Nui_ShortToQuad_Depth( USHORT s );
protected :
InstanceVector m_Instances;
wxSize m_FrameSize;
...
};
...
void ReadLockedRect(KINECT_LOCKED_RECT & LockedRect, int w, int h, BYTE * data)
{
if ( LockedRect.Pitch != 0 )
{
BYTE * pBuffer = (BYTE*) LockedRect.pBits;
// draw the bits to the bitmap
USHORT * pBufferRun = (USHORT*) pBuffer;
for ( int y = 0 ; y < h ; y++ )
{
for ( int x = 0 ; x < w ; x++ )
{
RGBQUAD quad = KinectHelper::Nui_ShortToQuad_Depth( *pBufferRun );
pBufferRun++;
int offset = (w * y + x) * 3;
data[offset + 0] = quad.rgbRed;
data[offset + 1] = quad.rgbGreen;
data[offset + 2] = quad.rgbBlue;
}
}
}
}
...
BYTE * KinectHelper::CreateDataBuffer()
{
size_t length = GetDataBufferLength();
BYTE * result = (BYTE*)CoTaskMemAlloc(length);
memset(result, 0, length);
return result;
}
size_t KinectHelper::GetDataBufferLength()
{
return m_FrameSize.GetWidth() * m_FrameSize.GetHeight() * 3;
}
void KinectHelper::FreeDataBuffer(BYTE * data)
{
CoTaskMemFree((LPVOID)data);
}
void KinectHelper::Finalize()
{
for (InstanceVector::const_iterator i = m_Instances.begin();
i != m_Instances.end(); i++)
{
if ((*i).first && (*i).second)
{
(*i).first->NuiShutdown();
MSR_NuiDestroyInstance((*i).first);
}
}
}
bool KinectHelper::StartGrabbing(size_t deviceIndex, HANDLE hDepthFrameEvent)
{
do
{
InstanceInfo * info = GetInstanceByIndex(deviceIndex);
if (!info || !info->first) break ;
if (FAILED(info->first->NuiInitialize(
NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX))) break ;
if (FAILED(info->first->NuiImageStreamOpen(
NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX,
NUI_IMAGE_RESOLUTION_320x240, 0,
3,
hDepthFrameEvent,
&info->second))) break ;
}
while ( false );
return false ;
}
bool KinectHelper::IsDeviceOK(size_t deviceIndex)
{
return GetInstanceByIndex(deviceIndex) != NULL;
}
bool KinectHelper::IsGrabbingStarted(size_t deviceIndex)
{
InstanceInfo * info = GetInstanceByIndex(deviceIndex);
return (info != NULL && info->first != NULL && info->second != NULL);
}
bool KinectHelper::ReadKinectFrame(size_t deviceIndex, BYTE * data)
{
do
{
if (deviceIndex < 0) break ;
InstanceInfo * info = GetInstanceByIndex((size_t)deviceIndex);
if (!info || !info->second) break ;
const NUI_IMAGE_FRAME * pImageFrame;
if (FAILED(NuiImageStreamGetNextFrame(
info->second, 200, &pImageFrame))) break ;
NuiImageBuffer * pTexture = pImageFrame->pFrameTexture;
KINECT_LOCKED_RECT LockedRect;
pTexture->LockRect( 0, &LockedRect, NULL, 0 );
ReadLockedRect(LockedRect, m_FrameSize.GetWidth(),
m_FrameSize.GetHeight(), data);
NuiImageStreamReleaseFrame(info->second, pImageFrame);
return true ;
}
while ( false );
return false ;
}
RGBQUAD KinectHelper::Nui_ShortToQuad_Depth( USHORT s )
{
USHORT RealDepth = (s & 0xfff8) >> 3;
BYTE l = 255 - (BYTE)(256*RealDepth/0x0fff);
RGBQUAD q;
q.rgbRed = q.rgbBlue = q.rgbGreen = l;
return q;
}
class KinectTestMainFrame: public wxFrame, public wxThreadHelper
{
...
void OnDEVICELISTBOXSelected( wxCommandEvent& event );
...
void ShowDevices();
void StopGrabbing();
HANDLE m_NewDepthFrameEvent;
KinectHelper * m_KinectHelper;
BYTE * m_pDepthBuffer;
wxImage * m_CurrentImage;
int m_SelectedDeviceIndex;
};
...
BEGIN_EVENT_TABLE( KinectTestMainFrame, wxFrame )
EVT_LISTBOX( ID_DEVICE_LISTBOX, KinectTestMainFrame::OnDEVICELISTBOXSelected )
END_EVENT_TABLE()
...
void KinectTestMainFrame::Init()
{
m_NewDepthFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
m_KinectHelper = new KinectHelper;
m_pDepthBuffer = m_KinectHelper->CreateDataBuffer();
m_CurrentImage = new wxImage(
m_KinectHelper->GetFrameSize().GetWidth(),
m_KinectHelper->GetFrameSize().GetHeight(),
m_pDepthBuffer, true );
m_SelectedDeviceIndex = -1;
m_MainSizer = NULL;
m_DeviceListBox = NULL;
m_Canvas = NULL;
}
...
KinectTestMainFrame::~KinectTestMainFrame()
{
StopGrabbing();
wxDELETE(m_CurrentImage);
m_KinectHelper->FreeDataBuffer(m_pDepthBuffer);
wxDELETE(m_KinectHelper);
}
...
wxThread::ExitCode KinectTestMainFrame::Entry()
{
while (!GetThread()->TestDestroy())
{
int mEventIndex = WaitForMultipleObjects(
1, &m_NewDepthFrameEvent, FALSE, 100);
switch (mEventIndex)
{
case 0:
{
wxCriticalSectionLocker lock (m_CS);
m_KinectHelper->ReadKinectFrame(
m_SelectedDeviceIndex, m_pDepthBuffer);
m_Canvas->Refresh();
}
break ;
default :
break ;
}
}
return NULL;
}
...
void KinectTestMainFrame::OnDEVICELISTBOXSelected( wxCommandEvent& event )
{
do
{
StopGrabbing();
size_t deviceIndex =
(size_t)m_DeviceListBox->GetClientData( event .GetInt());
if (deviceIndex < 0 || deviceIndex >
m_KinectHelper->GetDeviceCount()) break ;
m_SelectedDeviceIndex = deviceIndex;
if (!m_KinectHelper->IsDeviceOK(deviceIndex)) break ;
if (!m_KinectHelper->IsGrabbingStarted(deviceIndex))
{
m_KinectHelper->StartGrabbing(
deviceIndex, m_NewDepthFrameEvent);
if (CreateThread() != wxTHREAD_NO_ERROR) break ;
m_Canvas->SetCurrentImage(m_CurrentImage);
GetThread()->Run();
}
}
while ( false );
}
void KinectTestMainFrame::StopGrabbing()
{
if (GetThread())
{
if (GetThread()->IsAlive())
{
GetThread()->Delete();
}
if (m_kind == wxTHREAD_JOINABLE)
{
if (GetThread()->IsAlive())
{
GetThread()->Wait();
}
wxDELETE(m_thread);
}
else
{
m_thread = NULL;
}
}
}
wxKinectHelper
, the wxKinectHelper
Object allocates memory for the depth buffer, in accordance with the resolution (320x240x24). The allocated memory is then transferred as an RGB buffer to the m_CurrentImage
object.m_CurrentImage
in the list of available devices, the image capture stream from the device is launched, and the m_CurrentImage
object m_CurrentImage
associated with the canvas.Entry()
method waits for a new image from the device. When the image is available, the RGB buffer is filled with new values, and then the canvas is redrawn.NUI_INITIALIZE_FLAG_USES_COLOR
flag when calling the NuiInitialize()
method, as well as specify a resolution of at least 640x480 when you call the NuiImageStreamOpen()
method.if (FAILED(info->first->NuiInitialize(
NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX|
NUI_INITIALIZE_FLAG_USES_COLOR))) break ;
if (FAILED(info->first->NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR,
NUI_IMAGE_RESOLUTION_640x480, 0,
3,
hDepthFrameEvent,
&info->second))) break ;
KINECT_LOCKED_RECT
structure KINECT_LOCKED_RECT
contained in the RGBA format (the RGBQUAD
structure, available in the SDK, is quite suitable for data access). Thus, the code to get the RGB buffer will look like this:if ( LockedRect.Pitch != 0 )
{
BYTE * pBuffer = (BYTE*) LockedRect.pBits;
for ( int y = 0 ; y < h ; y++ )
{
for ( int x = 0 ; x < w ; x++ )
{
RGBQUAD * quad = ((RGBQUAD*)pBuffer) + x;
int offset = (w * y + x) * 3;
data[offset + 0] = quad->rgbRed;
data[offset + 1] = quad->rgbGreen;
data[offset + 2] = quad->rgbBlue;
}
pBuffer += LockedRect.Pitch;
}
}
NuiInitialize()
needs to pass a flag containing the value NUI_INITIALIZE_FLAG_USES_SKELETON
, and then call NuiSkeletonTrackingEnable()
, which as a first parameter to pass the event descriptor that will be called when receiving a new piece of data with segments, and the second parameter is a set of flags (the beta version of the SDK ignores this parameter, so you can pass 0).NuiSkeletonTrackingDisable()
method.if (FAILED(info->first->NuiSkeletonTrackingEnable(hSkeletonFrameEvent, 0)))
{ /* error */ };
NuiSkeletonGetNextFrame()
method, which takes as parameters:NUI_SKELETON_FRAME
structure, which, if the function completes successfully, will contain a pointer to a data buffer.NuiSkeletonGetNextFrame()
method, we get an instance of the NUI_SKELETON_FRAME structure. Let's look at it in more detail.struct _NUI_SKELETON_FRAME {
LARGE_INTEGER liTimeStamp;
DWORD dwFrameNumber;
DWORD dwFlags;
Vector4 vFloorClipPlane;
Vector4 vNormalToGravity;
NUI_SKELETON_DATA SkeletonData[NUI_SKELETON_COUNT];
} NUI_SKELETON_FRAME;
liTimeStamp
- the date / time of receiving the depth buffer from which the skeleton segments were obtained.dwFlag
is a bitmask containing flags.vFloorClipPlane
- floor coordinates (calculated inside the library), which were used to cut off everything that is below the floor.vNormalToGravity
is a normal vector.dwFrameNumber
- frame number.SkeletonData
is an array of NUI_SKELETON_DATA
structures, each of which contains data about the segmentation of the skeleton of one player.NUI_SKELETON_FRAME
structure, a limited number of players are supported (in the current SDK version, the value of NUI_SKELETON_COUNT
is 6).NUI_SKELETON_DATA
:struct _NUI_SKELETON_DATA {
NUI_SKELETON_TRACKING_STATE eTrackingState;
DWORD dwTrackingID;
DWORD dwEnrollmentIndex;
DWORD dwUserIndex;
Vector4 Position;
Vector4 SkeletonPositions[NUI_SKELETON_POSITION_COUNT];
NUI_SKELETON_POSITION_TRACKING_STATE
eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_COUNT];
DWORD dwQualityFlags;
} NUI_SKELETON_DATA;
eTrackingState
is the value from the NUI_SKELETON_TRACKING_STATE
enumeration. It may indicate that the player was not found, only the coordinates of the player were found (without skeleton segments), or that the coordinates and skeletal segments were found.dwEnrollmentIndex
- judging by the documentation (p. 20), is not used in the current version.dwUserIndex
- in the current version of the SDK is always equal to XUSER_INDEX_NONE
.dwTrackingID
- number of the player being tracked.Position
- the coordinates of the player.SkeletonPositions
- the list of coordinates of the joints of the skeleton segmentseSkeletonPositionTrackingState
— A list of flags that indicate whether skeletal segment junctions are found.NUI_SKELETON_DATA
structure, the number of supported articulations of the segments is limited to a number equal to NUI_SKELETON_POSITION_COUNT
....
struct KinectStreams
{
HANDLE hDepth;
HANDLE hColor;
HANDLE hSkeleton;
KinectStreams() : hDepth(NULL), hColor(NULL), hSkeleton(NULL) {}
};
...
void KinectHelper::Finalize()
{
for (InstanceVector::const_iterator i = m_Instances.begin();
i != m_Instances.end(); i++)
{
if ((*i).first)
{
...
if ((*i).second.hSkeleton != NULL)
{
(*i).first->NuiSkeletonTrackingDisable();
}
MSR_NuiDestroyInstance((*i).first);
}
}
}
bool KinectHelper::StartGrabbing(size_t deviceIndex,
HANDLE hDepthFrameEvent,
HANDLE hColorFrameEvent,
HANDLE hSkeletonFrameEvent)
{
do
{
if (hDepthFrameEvent == NULL &&
hColorFrameEvent == NULL &&
hSkeletonFrameEvent == NULL) break ;
InstanceInfo * info = GetInstanceByIndex(deviceIndex);
if (!info || !info->first) break ;
if (FAILED(info->first->NuiInitialize(
NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX |
NUI_INITIALIZE_FLAG_USES_COLOR |
NUI_INITIALIZE_FLAG_USES_SKELETON))) break ;
...
if (hSkeletonFrameEvent != NULL)
{
if (FAILED(info->first->NuiSkeletonTrackingEnable(
hSkeletonFrameEvent, 0))) break ;
info->second.hSkeleton = hSkeletonFrameEvent;
}
}
while ( false );
return false ;
}
void * KinectHelper::ReadSkeletonFrame(size_t deviceIndex)
{
do
{
if (deviceIndex < 0) break ;
InstanceInfo * info = GetInstanceByIndex((size_t)deviceIndex);
if (!info || !info->second.hColor) break ;
NUI_SKELETON_FRAME * frame = new NUI_SKELETON_FRAME;
if (FAILED(info->first->NuiSkeletonGetNextFrame(200, frame))) break ;
return frame;
}
while ( false );
return NULL;
}
NUI_SKELETON_FRAME
, you need to send them for preprocessing. The NuiTransformSmooth NuiTransformSmooth()
method performs the NuiTransformSmooth()
- it filters the coordinates of the segments in order to avoid jerks and sudden movements. As parameters, the NuiTransformSmooth()
method takes a pointer to a NUI_SKELETON_FRAME
structure and, optionally, a pointer to a NUI_TRANSFORM_SMOOTH_PARAMETERS
object containing the preprocessing parameters.HRESULT NuiTransformSmooth(
NUI_SKELETON_FRAME *pSkeletonFrame,
CONST NUI_TRANSFORM_SMOOTH_PARAMETERS *pSmoothingParams
);
NuiTransformSkeletonToDepthImageF()
method, which takes as parameters:Vector4
structure.VOID NuiTransformSkeletonToDepthImageF(
Vector4 vPoint,
_Out_ FLOAT *pfDepthX,
_Out_ FLOAT *pfDepthY
);
#pragma once
#include <wx/wx.h>
class SkeletonPainterImpl;
class SkeletonPainter
{
public :
SkeletonPainter();
~SkeletonPainter();
void DrawSkeleton(wxDC & dc, void * data);
private :
SkeletonPainterImpl * m_Impl;
};
#include "SkeletonPainter.h"
#if defined(__WXMSW__)
#include "SkeletonPainterImplMSW.h"
#endif
SkeletonPainter::SkeletonPainter()
{
#if defined(__WXMSW__)
m_Impl = new SkeletonPainterImplMSW;
#else
m_Impl = NULL;
#endif
}
SkeletonPainter::~SkeletonPainter()
{
wxDELETE(m_Impl);
}
void SkeletonPainter::DrawSkeleton(wxDC & dc, void * data)
{
if (m_Impl)
{
m_Impl->DrawSkeleton(dc, data);
}
}
#pragma once
#include <wx/wx.h>
class SkeletonPainterImpl
{
public :
virtual ~SkeletonPainterImpl() {}
virtual void DrawSkeleton(wxDC & dc, void * data) = 0;
};
#pragma once
#include "SkeletonPainterImpl.h"
#include "msr_nuiapi.h"
class SkeletonPainterImplMSW : public SkeletonPainterImpl
{
public :
~SkeletonPainterImplMSW();
void DrawSkeleton(wxDC & dc, void * data);
private :
void Nui_DrawSkeleton(wxDC & dc, NUI_SKELETON_DATA * data, size_t index);
void Nui_DrawSkeletonSegment(wxDC & dc, wxPoint * points, int numJoints, ... );
static wxPen m_SkeletonPen[6];
};
#include "SkeletonPainterImplMSW.h"
wxPen SkeletonPainterImplMSW::m_SkeletonPen[6] =
{
wxPen(wxColor(255, 0, 0), wxSOLID),
...
};
SkeletonPainterImplMSW::~SkeletonPainterImplMSW()
{
}
void SkeletonPainterImplMSW::DrawSkeleton(wxDC & dc, void * data)
{
do
{
NUI_SKELETON_FRAME * frame =
reinterpret_cast<NUI_SKELETON_FRAME*>(data);
if (!frame) break ;
int skeletonCount(0);
for ( int i = 0 ; i < NUI_SKELETON_COUNT ; i++ )
{
if ( frame->SkeletonData[i].eTrackingState ==
NUI_SKELETON_TRACKED )
{
skeletonCount++;
}
}
if (!skeletonCount) break ;
NuiTransformSmooth(frame, NULL);
for (size_t i = 0 ; i < NUI_SKELETON_COUNT ; i++ )
{
if (frame->SkeletonData[i].eTrackingState ==
NUI_SKELETON_TRACKED)
{
Nui_DrawSkeleton(dc, &frame->SkeletonData[i], i );
}
}
}
while ( false );
}
void SkeletonPainterImplMSW::Nui_DrawSkeleton(wxDC & dc,
NUI_SKELETON_DATA * data, size_t index)
{
wxPoint points[NUI_SKELETON_POSITION_COUNT];
float fx(0), fy(0);
wxSize imageSize = dc.GetSize();
for (size_t i = 0; i < NUI_SKELETON_POSITION_COUNT; i++)
{
NuiTransformSkeletonToDepthImageF(
data->SkeletonPositions[i], &fx, &fy);
points[i].x = ( int ) ( fx * imageSize.GetWidth() + 0.5f );
points[i].y = ( int ) ( fy * imageSize.GetHeight() + 0.5f );
}
Nui_DrawSkeletonSegment(dc,points,4,
NUI_SKELETON_POSITION_HIP_CENTER,
NUI_SKELETON_POSITION_SPINE,
NUI_SKELETON_POSITION_SHOULDER_CENTER,
NUI_SKELETON_POSITION_HEAD);
Nui_DrawSkeletonSegment(dc,points,5,
NUI_SKELETON_POSITION_SHOULDER_CENTER,
NUI_SKELETON_POSITION_SHOULDER_LEFT,
NUI_SKELETON_POSITION_ELBOW_LEFT,
NUI_SKELETON_POSITION_WRIST_LEFT,
NUI_SKELETON_POSITION_HAND_LEFT);
Nui_DrawSkeletonSegment(dc,points,5,
NUI_SKELETON_POSITION_SHOULDER_CENTER,
NUI_SKELETON_POSITION_SHOULDER_RIGHT,
NUI_SKELETON_POSITION_ELBOW_RIGHT,
NUI_SKELETON_POSITION_WRIST_RIGHT,
NUI_SKELETON_POSITION_HAND_RIGHT);
Nui_DrawSkeletonSegment(dc,points,5,
NUI_SKELETON_POSITION_HIP_CENTER,
NUI_SKELETON_POSITION_HIP_LEFT,
NUI_SKELETON_POSITION_KNEE_LEFT,
NUI_SKELETON_POSITION_ANKLE_LEFT,
NUI_SKELETON_POSITION_FOOT_LEFT);
Nui_DrawSkeletonSegment(dc,points,5,
NUI_SKELETON_POSITION_HIP_CENTER,
NUI_SKELETON_POSITION_HIP_RIGHT,
NUI_SKELETON_POSITION_KNEE_RIGHT,
NUI_SKELETON_POSITION_ANKLE_RIGHT,
NUI_SKELETON_POSITION_FOOT_RIGHT);
}
void SkeletonPainterImplMSW::Nui_DrawSkeletonSegment(wxDC & dc,
wxPoint * points, int numJoints, ...)
{
va_list vl;
va_start(vl,numJoints);
wxPoint segmentPositions[NUI_SKELETON_POSITION_COUNT];
for ( int iJoint = 0; iJoint < numJoints; iJoint++)
{
NUI_SKELETON_POSITION_INDEX jointIndex =
va_arg(vl,NUI_SKELETON_POSITION_INDEX);
segmentPositions[iJoint].x = points[jointIndex].x;
segmentPositions[iJoint].y = points[jointIndex].y;
}
dc.SetPen(*wxBLUE_PEN);
dc.DrawLines(numJoints, segmentPositions);
va_end(vl);
}
SkeletonPainter
class in an application will look like this:...
class KinectTestMainFrame: public wxFrame, public wxThreadHelper
{
...
HANDLE m_NewSkeletonFrameEvent;
wxImage m_SkeletonImage;
...
};
...
...
wxThread::ExitCode KinectTestMainFrame::Entry()
{
HANDLE eventHandles[3];
eventHandles[0] = m_NewDepthFrameEvent;
eventHandles[1] = m_NewColorFrameEvent;
eventHandles[2] = m_NewSkeletonFrameEvent;
SkeletonPainter painter;
while (!GetThread()->TestDestroy())
{
int mEventIndex = WaitForMultipleObjects(
_countof(eventHandles), eventHandles, FALSE, 100);
switch (mEventIndex)
{
...
case 2:
{
void * frame = m_KinectHelper->ReadSkeletonFrame(
m_SelectedDeviceIndex);
if (frame)
{
wxBitmap bmp(
m_SkeletonImage.GetWidth(),
m_SkeletonImage.GetHeight());
wxMemoryDC dc(bmp);
painter.DrawSkeleton(dc, frame);
m_KinectHelper->ReleaseSkeletonFrame(frame);
dc.SelectObject(wxNullBitmap);
m_SkeletonImage = bmp.ConvertToImage();
m_SkeletonCanvas->Refresh();
}
}
break ;
default :
break ;
}
}
return NULL;
}
KinectHelper
class to obtaining a list of devices and creating instances of the grabber:#pragma once
#include <wx/wx.h>
class KinectGrabberBase
{
public :
KinectGrabberBase(wxEvtHandler * handler);
virtual ~KinectGrabberBase();
virtual bool GrabDepthFrame(unsigned char * data) = 0;
virtual bool GrabColorFrame(unsigned char * data) = 0;
virtual void * GrabSkeletonFrame() = 0;
virtual bool Start() = 0;
virtual bool Stop() = 0;
virtual bool IsStarted() = 0;
const wxSize & GetDepthFrameSize();
const wxSize & GetColorFrameSize();
protected :
wxSize m_DepthFrameSize;
wxSize m_ColorFrameSize;
wxEvtHandler * m_Handler;
};
BEGIN_DECLARE_EVENT_TYPES()
DECLARE_LOCAL_EVENT_TYPE(KINECT_DEPTH_FRAME_RECEIVED, -1)
DECLARE_LOCAL_EVENT_TYPE(KINECT_COLOR_FRAME_RECEIVED, -1)
DECLARE_LOCAL_EVENT_TYPE(KINECT_SKELETON_FRAME_RECEIVED, -1)
END_DECLARE_EVENT_TYPES()
#include "KinectGrabberBase.h"
DEFINE_EVENT_TYPE(KINECT_DEPTH_FRAME_RECEIVED)
DEFINE_EVENT_TYPE(KINECT_COLOR_FRAME_RECEIVED)
DEFINE_EVENT_TYPE(KINECT_SKELETON_FRAME_RECEIVED)
...
#pragma once
#include "KinectGrabberBase.h"
#include "MSR_NuiApi.h"
class KinectGrabberMSW : public KinectGrabberBase, public wxThreadHelper
{
...
private :
virtual wxThread::ExitCode Entry();
BYTE * CreateDepthDataBuffer();
BYTE * CreateColorDataBuffer();
size_t GetDepthDataBufferLength();
size_t GetColorDataBufferLength();
void FreeDataBuffer(BYTE * data);
bool ReadDepthFrame();
bool ReadColorFrame();
bool ReadSkeletonFrame();
void ReadDepthLockedRect(KINECT_LOCKED_RECT & LockedRect,
int w, int h, BYTE * data);
void ReadColorLockedRect(KINECT_LOCKED_RECT & LockedRect,
int w, int h, BYTE * data);
static RGBQUAD Nui_ShortToQuad_Depth( USHORT s );
void ResetEvents();
void StopThread();
bool CopyLocalBuffer(BYTE * src, BYTE * dst, size_t count);
HANDLE m_NewDepthFrameEvent;
HANDLE m_NewColorFrameEvent;
HANDLE m_NewSkeletonFrameEvent;
HANDLE m_DepthStreamHandle;
HANDLE m_ColorStreamHandle;
BYTE * m_DepthBuffer;
BYTE * m_ColorBuffer;
INuiInstance * m_Instance;
size_t m_DeviceIndex;
NUI_SKELETON_FRAME m_SkeletonFrame;
};
#include "KinectGrabberMSW.h"
KinectGrabberMSW::KinectGrabberMSW(wxEvtHandler * handler, size_t deviceIndex)
: KinectGrabberBase(handler), m_DeviceIndex(deviceIndex), m_Instance(NULL)
{
m_DepthBuffer = CreateDepthDataBuffer();
m_ColorBuffer = CreateColorDataBuffer();
ResetEvents();
do
{
if (FAILED(MSR_NuiCreateInstanceByIndex(( int )m_DeviceIndex, &m_Instance))) break ;
if (FAILED(m_Instance->NuiInitialize(
NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX |
NUI_INITIALIZE_FLAG_USES_COLOR |
NUI_INITIALIZE_FLAG_USES_SKELETON))) break ;
}
while ( false );
}
...
void * KinectGrabberMSW::GrabSkeletonFrame()
{
do
{
if (!GetThread() || !GetThread()->IsAlive() ||
!m_Instance || !m_NewSkeletonFrameEvent) break ;
return &m_SkeletonFrame;
}
while ( false );
return NULL;
}
bool KinectGrabberMSW::Start()
{
do
{
if (!m_Instance) break ;
if (GetThread() && GetThread()->IsAlive()) break ;
if (CreateThread() != wxTHREAD_NO_ERROR) break ;
m_NewDepthFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
m_NewColorFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
m_NewSkeletonFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
if (FAILED(m_Instance->NuiImageStreamOpen(
NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX,
NUI_IMAGE_RESOLUTION_320x240, 0,
3,
m_NewDepthFrameEvent,
&m_DepthStreamHandle))) break ;
if (FAILED(m_Instance->NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR,
NUI_IMAGE_RESOLUTION_640x480, 0,
4,
m_NewColorFrameEvent,
&m_ColorStreamHandle))) break ;
if (FAILED(m_Instance->NuiSkeletonTrackingEnable(
m_NewSkeletonFrameEvent, 0))) break ;
GetThread()->Run();
return true ;
}
while ( false );
return false ;
}
...
wxThread::ExitCode KinectGrabberMSW::Entry()
{
HANDLE eventHandles[3];
eventHandles[0] = m_NewDepthFrameEvent;
eventHandles[1] = m_NewColorFrameEvent;
eventHandles[2] = m_NewSkeletonFrameEvent;
while (!GetThread()->TestDestroy())
{
int mEventIndex = WaitForMultipleObjects(
_countof(eventHandles), eventHandles, FALSE, 100);
switch (mEventIndex)
{
case 0: ReadDepthFrame(); break ;
case 1: ReadColorFrame(); break ;
case 2: ReadSkeletonFrame(); break ;
default :
break ;
}
}
return NULL;
}
...
void KinectGrabberMSW::StopThread()
{
if (GetThread())
{
if (GetThread()->IsAlive())
{
GetThread()->Delete();
}
if (m_kind == wxTHREAD_JOINABLE)
{
if (GetThread()->IsAlive())
{
GetThread()->Wait();
}
wxDELETE(m_thread);
}
else
{
m_thread = NULL;
}
}
wxYield();
}
bool KinectGrabberMSW::ReadDepthFrame()
{
do
{
if (m_DeviceIndex < 0 || !m_Instance) break ;
const NUI_IMAGE_FRAME * pImageFrame;
if (FAILED(NuiImageStreamGetNextFrame(
m_DepthStreamHandle, 200, &pImageFrame))) break ;
NuiImageBuffer * pTexture = pImageFrame->pFrameTexture;
KINECT_LOCKED_RECT LockedRect;
pTexture->LockRect( 0, &LockedRect, NULL, 0 );
ReadDepthLockedRect(LockedRect,
m_DepthFrameSize.GetWidth(),
m_DepthFrameSize.GetHeight(),
m_DepthBuffer);
NuiImageStreamReleaseFrame(m_DepthStreamHandle, pImageFrame);
if (m_Handler)
{
wxCommandEvent e(KINECT_DEPTH_FRAME_RECEIVED, wxID_ANY);
e.SetInt(m_DeviceIndex);
m_Handler->AddPendingEvent(e);
}
return true ;
}
while ( false );
return false ;
}
bool KinectGrabberMSW::ReadColorFrame()
{
do
{
if (m_DeviceIndex < 0 || !m_Instance) break ;
const NUI_IMAGE_FRAME * pImageFrame;
if (FAILED(NuiImageStreamGetNextFrame(
m_ColorStreamHandle, 200, &pImageFrame))) break ;
NuiImageBuffer * pTexture = pImageFrame->pFrameTexture;
KINECT_LOCKED_RECT LockedRect;
pTexture->LockRect( 0, &LockedRect, NULL, 0 );
ReadColorLockedRect(LockedRect,
m_ColorFrameSize.GetWidth(),
m_ColorFrameSize.GetHeight(),
m_ColorBuffer);
NuiImageStreamReleaseFrame(m_ColorStreamHandle, pImageFrame);
if (m_Handler)
{
wxCommandEvent e(KINECT_COLOR_FRAME_RECEIVED, wxID_ANY);
e.SetInt(m_DeviceIndex);
m_Handler->AddPendingEvent(e);
}
return true ;
}
while ( false );
return false ;
}
bool KinectGrabberMSW::ReadSkeletonFrame()
{
do
{
if (m_DeviceIndex < 0 || !m_Instance) break ;
if (FAILED(m_Instance->NuiSkeletonGetNextFrame(200, &m_SkeletonFrame))) break ;
if (m_Handler)
{
wxCommandEvent e(KINECT_SKELETON_FRAME_RECEIVED, wxID_ANY);
e.SetInt(m_DeviceIndex);
m_Handler->AddPendingEvent(e);
}
return true ;
}
while ( false );
return false ;
}
void KinectGrabberMSW::ReadDepthLockedRect(KINECT_LOCKED_RECT & LockedRect, int w, int h, BYTE * data)
{
if ( LockedRect.Pitch != 0 )
{
BYTE * pBuffer = (BYTE*) LockedRect.pBits;
USHORT * pBufferRun = (USHORT*) pBuffer;
for ( int y = 0 ; y < h ; y++ )
{
for ( int x = 0 ; x < w ; x++ )
{
RGBQUAD quad = KinectGrabberMSW::Nui_ShortToQuad_Depth( *pBufferRun );
pBufferRun++;
int offset = (w * y + x) * 3;
data[offset + 0] = quad.rgbRed;
data[offset + 1] = quad.rgbGreen;
data[offset + 2] = quad.rgbBlue;
}
}
}
}
void KinectGrabberMSW::ReadColorLockedRect(KINECT_LOCKED_RECT & LockedRect, int w, int h, BYTE * data)
{
if ( LockedRect.Pitch != 0 )
{
BYTE * pBuffer = (BYTE*) LockedRect.pBits;
for ( int y = 0 ; y < h ; y++ )
{
for ( int x = 0 ; x < w ; x++ )
{
RGBQUAD * quad = ((RGBQUAD*)pBuffer) + x;
int offset = (w * y + x) * 3;
data[offset + 0] = quad->rgbRed;
data[offset + 1] = quad->rgbGreen;
data[offset + 2] = quad->rgbBlue;
}
pBuffer += LockedRect.Pitch;
}
}
}
...
#pragma once
class KinectGrabberBase;
class KinectHelper
{
public :
KinectHelper();
~KinectHelper();
size_t GetDeviceCount();
wxString GetDeviceName(size_t index);
KinectGrabberBase * CreateGrabber(wxEvtHandler * handler, size_t index);
};
...
wxString KinectHelper::GetDeviceName(size_t index)
{
BSTR result;
DWORD size;
INuiInstance * instance(NULL);
wxString name = wxT( "Unknown Kinect Sensor" );
if (!FAILED(MSR_NuiCreateInstanceByIndex(index, &instance)))
{
if (instance != NULL)
{
if (instance->MSR_NuiGetPropsBlob(
MsrNui::INDEX_UNIQUE_DEVICE_NAME,
&result, &size))
{
name = result;
SysFreeString(result);
}
MSR_NuiDestroyInstance(instance);
}
}
return name;
}
KinectGrabberBase * KinectHelper::CreateGrabber(wxEvtHandler * handler, size_t index)
{
#if defined(__WXMSW__)
return new KinectGrabberMSW(handler, index);
#else
return NULL;
#endif
}
...
class KinectTestMainFrame: public wxFrame
{
...
void OnDepthFrame(wxCommandEvent & event );
void OnColorFrame(wxCommandEvent & event );
void OnSkeletonFrame(wxCommandEvent & event );
...
wxImage m_CurrentImage;
int m_SelectedDeviceIndex;
wxImage m_ColorImage;
wxImage m_SkeletonImage;
KinectGrabberBase * m_Grabber;
...
};
...
BEGIN_EVENT_TABLE( KinectTestMainFrame, wxFrame )
...
EVT_COMMAND (wxID_ANY, KINECT_DEPTH_FRAME_RECEIVED, \
KinectTestMainFrame::OnDepthFrame)
EVT_COMMAND (wxID_ANY, KINECT_COLOR_FRAME_RECEIVED, \
KinectTestMainFrame::OnColorFrame)
EVT_COMMAND (wxID_ANY, KINECT_SKELETON_FRAME_RECEIVED, \
KinectTestMainFrame::OnSkeletonFrame)
END_EVENT_TABLE()
...
void KinectTestMainFrame::OnDEVICELISTBOXSelected( wxCommandEvent& event )
{
do
{
size_t deviceIndex =
(size_t)m_DeviceListBox->GetClientData( event .GetInt());
if (deviceIndex < 0 ||
deviceIndex > m_KinectHelper->GetDeviceCount()) break ;
m_SelectedDeviceIndex = deviceIndex;
StartGrabbing();
}
while ( false );
}
void KinectTestMainFrame::StartGrabbing()
{
StopGrabbing();
m_Grabber = m_KinectHelper->CreateGrabber( this , m_SelectedDeviceIndex);
m_CurrentImage = wxImage(
m_Grabber->GetDepthFrameSize().GetWidth(),
m_Grabber->GetDepthFrameSize().GetHeight());
m_ColorImage = wxImage(
m_Grabber->GetColorFrameSize().GetWidth(),
m_Grabber->GetColorFrameSize().GetHeight());
m_SkeletonImage = wxImage(
m_Grabber->GetDepthFrameSize().GetWidth(),
m_Grabber->GetDepthFrameSize().GetHeight());
m_DepthCanvas->SetCurrentImage(&m_CurrentImage);
m_ColorCanvas->SetCurrentImage(&m_ColorImage);
m_SkeletonCanvas->SetCurrentImage(&m_SkeletonImage);
if (!m_Grabber->Start())
{
StopGrabbing();
}
}
...
void KinectTestMainFrame::OnDepthFrame(wxCommandEvent & event )
{
do
{
if (!m_Grabber) break ;
m_Grabber->GrabDepthFrame(m_CurrentImage.GetData());
m_DepthCanvas->Refresh();
}
while ( false );
}
void KinectTestMainFrame::OnColorFrame(wxCommandEvent & event )
{
do
{
if (!m_Grabber) break ;
m_Grabber->GrabColorFrame(m_ColorImage.GetData());
m_ColorCanvas->Refresh();
}
while ( false );
}
void KinectTestMainFrame::OnSkeletonFrame(wxCommandEvent & event )
{
do
{
if (!m_Grabber) break ;
SkeletonPainter painter;
wxBitmap bmp(m_SkeletonImage.GetWidth(), m_SkeletonImage.GetHeight());
wxMemoryDC mdc(bmp);
painter.DrawSkeleton(mdc, m_Grabber->GrabSkeletonFrame());
mdc.SelectObject(wxNullBitmap);
m_SkeletonImage = bmp.ConvertToImage();
m_SkeletonCanvas->Refresh();
}
while ( false );
}
wxEvtHandler
object (and the wxFrame
class in wxWidgets is derived from wxEvtHandler
). The form has event handlers that are called when notifications are received from the grabber.KinectGrabberBase::GrabSkeletonFrame()
method returns void*
is also quite simple - if you make image capture implementations using various SDKs (including unofficial ones), then it’s not a fact that all these SDKs will receive information about the position of players in the form of identical data structures. In any case, the coordinates must be sent for post-processing. In this case, the code that receives the pointer from the grabber will know by itself what data type it needs to be converted to. GUI about the internal structure of the grabber is not necessary to know.Source: https://habr.com/ru/post/123588/
All Articles