Reprinted to: https://www.parful.com/blog/article/110
Before you start studying this tutorial, you first need to understand how to use the tool to create a suffix VGB .gba or .gbd posture database files. Without this foundation, please read in order:
- Kinect Kinect for windows v2 gesture recognition using the tools of Studio
- Kinect for windows v2 gesture recognition tools of Visual Gesture Builder of use
- Kinect for windows v2 gesture recognition means detects the discontinuity in the action
Develop your own first Kinect Gestrue matching program
You need your vs project set up a directory and library directory, the directory containing as Kinect installation inc directory of the library installation directory Lib files in the directory Kinect folder (include x64 x86, select the version you need ).
It includes the header file and import library
#include "Kinect.h"
#include "Kinect.VisualGestureBuilder.h"
#pragma comment(lib,"Kinect20.lib")
#pragma comment(lib,"Kinect20.VisualGestureBuilder.lib")
IKinectSensor* pSensor = nullptr;
if (GetDefaultKinectSensor(&pSensor) != S_OK)
{
return 1;
}
pSensor->Open();
wstring gestureDatabasePath = L"Seat.gbd";
IVisualGestureBuilderDatabase* pGestureDatabase = nullptr;
if (CreateVisualGestureBuilderDatabaseInstanceFromFile(gestureDatabasePath.c_str(), &pGestureDatabase)!=S_OK)
{
wcout << "Can not read the database, Please check if it exit!" << endl;
return 1;
}
Gets the number of gesture gesture and gesture information database
UINT numGesture = 0;
//Get how many gestures in database
pGestureDatabase->get_AvailableGesturesCount(&numGesture);
wcout << "Total gesture count:" << numGesture << endl;
//Get the list of gestures
IGesture** gestureList = new IGesture*[numGesture];
pGestureDatabase->get_AvailableGestures(numGesture, gestureList);
//GestureType enum
GestureType gestureType;
wchar_t gestureName[300];
for (int index = 0; index < numGesture; ++index)
{
if (gestureList[index]->get_GestureType(&gestureType) == S_OK)
{
if (gestureType == GestureType::GestureType_Discrete)
{
//Get discrete gesture name
gestureList[index]->get_Name(300, gestureName);
wcout << "discrete_gesture_" << index << "_name:" << gestureName << endl;
}
else if (gestureType == GestureType::GestureType_Continuous)
{
//Get continuous gesture name
gestureList[index]->get_Name(300, gestureName);
wcout << "continuous_gesture_" << index << "_name" << gestureName << endl;
}
}
}
Creating gesture data stream and reader
A data stream corresponding to a stream reader, the maximum somatosensory body 6 can detect data (numBody = 6) simultaneously. In order to achieve simultaneous detection of more than posture, so here create six gesture stream 6 flow gesture reader
IVisualGestureBuilderFrameSource** gestureSources = new IVisualGestureBuilderFrameSource*[numBody];
IVisualGestureBuilderFrameReader** gestureReaders = new IVisualGestureBuilderFrameReader*[numBody];
for (int bodyIndex = 0; bodyIndex < numBody; ++bodyIndex)
{
CreateVisualGestureBuilderFrameSource(pSensor, bodyIndex, &gestureSources[bodyIndex]);
gestureSources[bodyIndex]->AddGestures(numGesture, gestureList);
gestureSources[bodyIndex]->OpenReader(&gestureReaders[bodyIndex]);
}
Into the logic of the human skeleton tracking information
IBodyFrame* pFrame = nullptr;
//Gets the latest frame.
if (pFrameReader->AcquireLatestFrame(&pFrame) == S_OK)
{
//for each frame, update the body data
if (pFrame->GetAndRefreshBodyData(numBody, bodies) == S_OK)
{
for (int bodyIndex = 0; bodyIndex < numBody; ++bodyIndex)
{
BOOLEAN tracked = false;
IBody* pBody = bodies[bodyIndex];
//has tracked body
if (pBody->get_IsTracked(&tracked) == S_OK&&tracked)
{
//Process body
}
}
}
pFrame->Release();
}
Binding skeletal data stream id gesture tracked
UINT64 trackingId = 0;
if (pBody->get_TrackingId(&trackingId) == S_OK)
{
UINT64 gestureId = 0;
if (gestureSources[bodyIndex]->get_TrackingId(&gestureId) == S_OK)
{
if (gestureId != trackingId)
{
//Bind tracking ID
gestureSources[bodyIndex]->put_TrackingId(trackingId);
wcout << "Gesture Source " << bodyIndex << " start to track user " << trackingId << endl;
}
}
}
Gesture database and get results in real time body somatosensory match data
Finally, to get results there are two possibilities, IDiscreteGestureResult (discrete posture results) and IContinuousGestureResult (continuous posture results).
- Results IDicreteGestureResult available for a BOOLEAN variable, discrete posture when the match is successful, a float with a value of confidence (value 0-1).
- Results IContinuousGestureResult is accessible to a float, the representative value of 0-1 progress value of the continuous operation, this value is always present.
IVisualGestureBuilderFrame* pGestureFrame = nullptr;
//Get latest frame
if (gestureReaders[bodyIndex]->CalculateAndAcquireLatestFrame(&pGestureFrame) == S_OK)
{
BOOLEAN bGestureTracked = false;
if (pGestureFrame->get_IsTrackingIdValid(&bGestureTracked) == S_OK&&bGestureTracked)
{
for (UINT gestureIndex = 0; gestureIndex < numGesture; ++gestureIndex)
{
GestureType gestureType;
gestureList[gestureIndex]->get_GestureType(&gestureType);
wchar_t gestureName[260];
gestureList[gestureIndex]->get_Name(260, gestureName);
//Discrete gesture
if (gestureType == GestureType::GestureType_Discrete)
{
IDiscreteGestureResult* pGestureResult = nullptr;
if (pGestureFrame->get_DiscreteGestureResult(gestureList[gestureIndex], &pGestureResult) == S_OK)
{
BOOLEAN detected = false;
if (pGestureResult->get_Detected(&detected) == S_OK&&detected)
{
float confidence = 0.0f;
pGestureResult->get_Confidence(&confidence);
wcout <<gestureName<<"-Confidence:" << confidence<<endl;
pGestureResult->Release();
}
}
} //Continuous gesture
else if (gestureType == GestureType::GestureType_Continuous)
{
IContinuousGestureResult* pGestureResult = nullptr;
if (pGestureFrame->get_ContinuousGestureResult(gestureList[gestureIndex], &pGestureResult) == S_OK)
{
float progress = 0.0f;
if (pGestureResult->get_Progress(&progress) == S_OK)
{
wcout << gestureName << "-progress:" << progress << endl;
}
pGestureResult->Release();
}
}
}
}
pGestureFrame->Release();
}
-
Please download the complete text at the venue: https://www.parful.com/blog/article/110