Mais conteúdo relacionado Semelhante a Using the Kinect for Fun and Profit by Tam Hanna (20) Using the Kinect for Fun and Profit by Tam Hanna2. About /me
• Tam HANNA
– Director,
Tamoggemon Holding
k,s
– Runs web sites about
mobile computing
– Writes scientific books
3. Agenda
• Kinect – what is that?
• Streams
• Skeletons
• Facial tracking
• libfreenect
• OpenNI
6. History - I
• Depth: PrimeSense technology
– Not from Redmond
• First public mention: 2007
– Bill Gates, D3 conference
– „Camera for game control“
11. History - II
• 2008: Wii ships
– Best-selling console of its generation
• 2009: E3 conference
– Announcement of „Project Natal“
• 2010: no CPU in sensor
– Takes 10% of XBox 360 CPU
12. History - III
• 4. November 2010
– First shipment
– “We will sue anyone who reverse engineers“
• June 2011
– Official SDK
14. Kinect provides
• Video stream
• Depth stream
– (IR stream)
• Accelerometer data
• Rest: computedRest: computed
15. Family tree
• Kinect for XBOX
– Normal USB
• Kinect bundle
– MS-Fucked USB
– Needs PSU
• Kinect for Windows
– Costs more
– Legal to deploy
19. Color stream
• Two modes
– VGA@30fps
– 1280x960@12fps
• Simple data format
– 8 bits / component
– R / G / B / A components
21. Depth stream - II
• 16bit words
• Special encoding for limited range:
26. Code - I
public partial class MainWindow : Window
{
KinectSensor mySensor;
KinectSensorChooser myChooser;
public MainWindow()
{
InitializeComponent();
myChooser = new KinectSensorChooser();
myChooser.KinectChanged += new
EventHandler<KinectChangedEventArgs>(myChooser_KinectChanged);
this.SensorChooserUI.KinectSensorChooser = myChooser;
myChooser.Start();
27. Code - II
void myChooser_KinectChanged(object sender,
KinectChangedEventArgs e)
{
if (null != e.OldSensor)
{
if (mySensor != null)
{
mySensor.Dispose();
}
}
if (null != e.NewSensor)
{
mySensor = e.NewSensor;
29. Process stream
void mySensor_AllFramesReady(object sender,
AllFramesReadyEventArgs e)
{
ColorImageFrame c = e.OpenColorImageFrame();
DepthImageFrame d = e.OpenDepthImageFrame();
if (c == null || d == null) return;
c.CopyPixelDataTo(myColorArray);
d.CopyPixelDataTo(myArray);
32. On Push mode
• Kinect can push data to application
• Preferred mode of operation
• But: sensitive to proc time
• If handler takes too long -> App stops
36. Get joints
void mySensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
{
ColorImageFrame c = e.OpenColorImageFrame();
SkeletonFrame s = e.OpenSkeletonFrame();
if (c == null || s == null) return;
c.CopyPixelDataTo(myColorArray);
s.CopySkeletonDataTo(mySkeletonArray);
foreach (Skeleton aSkeleton in mySkeletonArray)
{
DrawBone(aSkeleton.Joints[JointType.HandLeft],
aSkeleton.Joints[JointType.WristLeft], armPen, drawingContext);
37. Use joints
private void DrawBone(Joint jointFrom, Joint jointTo, Pen aPen,
DrawingContext aContext)
{
if (jointFrom.TrackingState == JointTrackingState.NotTracked ||
jointTo.TrackingState == JointTrackingState.NotTracked)
{}
if (jointFrom.TrackingState == JointTrackingState.Inferred ||
jointTo.TrackingState == JointTrackingState.Inferred)
{
ColorImagePoint p1 =
mySensor.CoordinateMapper.MapSkeletonPointToColorPoint(jointFrom.Pos
ition, ColorImageFormat.RgbResolution640x480Fps30);
}
if (jointFrom.TrackingState == JointTrackingState.Tracked ||
jointTo.TrackingState == JointTrackingState.Tracked)
45. Feed face tracker
FaceTrackFrame myFrame = null;
foreach (Skeleton aSkeleton in mySkeletonArray)
{
if (aSkeleton.TrackingState == SkeletonTrackingState.Tracked)
{
myFrame =
myFaceTracker.Track(ColorImageFormat.RgbResolution640x480Fps30,
myColorArray, DepthImageFormat.Resolution640x480Fps30, myArray,
aSkeleton);
if (myFrame.TrackSuccessful == true)
{
break;
}
}
}
48. What is it
• Result of Kinect hacking competition
• Bundled with most Linux distributions
• „Basic Kinect data parser“
49. Set-up
• /etc/udev/rules.d/66-kinect.rules
#Rules for Kinect
#############################################
#######SYSFS{idVendor}=="045e",
SYSFS{idProduct}=="02ae",
MODE="0660",GROUP="video"SYSFS{idVendor}=="045
e", SYSFS{idProduct}=="02ad",
MODE="0660",GROUP="video"SYSFS{idVendor}=="045
e", SYSFS{idProduct}=="02b0",
MODE="0660",GROUP="video"### END
#############################################
################
50. Set-up II
• sudo adduser $USER plugdev
• sudo usermod -a -G video tamhan
• tamhan@tamhan-X360:~$ freenect-glview
Kinect camera test
Number of devices found: 1
Could not claim interface on camera: -6
Could not open device
52. Problems
• gspca-kinect
– Kernel module, uses Kinect as webcam
– Blocks other libraries
– sudo modprobe -r gspca_kinect
• Outdated version widely deployed
– API not compatible
53. Update library
• sudo foo
• sudo add-apt-repository ppa:floe/libtisch
• sudo apt-get update
• sudo apt-get install libfreenect libfreenect-
dev libfreenect-demos
56. The .pro file
QT += core gui
TARGET = QtDepthFrame
CONFIG += i386
DEFINES += USE_FREENECT
LIBS += -lfreenect
57. The freenect thread
• Library needs processing time
– Does not multithread itself
• Should be provided outside of main app
58. class QFreenectThread : public QThread
{
Q_OBJECT
public:
explicit QFreenectThread(QObject *parent = 0);
void run();
signals:
public slots:
public:
bool myActive;
freenect_context *myContext;
};
60. QFreenect
• Main engine module
– Contact point between Kinect and app
• Fires off signals on frame availability
61. • class QFreenect : public QObject
• {
• Q_OBJECT
• public:
• explicit QFreenect(QObject *parent = 0);
• ~QFreenect();
• void processVideo(void *myVideo, uint32_t myTimestamp=0);
• void processDepth(void *myDepth, uint32_t myTimestamp=0);
• signals:
• void videoDataReady(uint8_t* myRGBBuffer);
• void depthDataReady(uint16_t* myDepthBuffer);
• public slots:
62. • private:
• freenect_context *myContext;
• freenect_device *myDevice;
• QFreenectThread *myWorker;
• uint8_t* myRGBBuffer;
• uint16_t* myDepthBuffer;
• QMutex* myMutex;
• public:
• bool myWantDataFlag;
• bool myFlagFrameTaken;
• bool myFlagDFrameTaken;
• static QFreenect* mySelf;
• };
63. Some C++
QFreenect* QFreenect::mySelf;
static inline void videoCallback(freenect_device *myDevice, void
*myVideo, uint32_t myTimestamp=0)
{
QFreenect::mySelf->processVideo(myVideo, myTimestamp);
}
static inline void depthCallback(freenect_device *myDevice, void
*myVideo, uint32_t myTimestamp=0)
{
QFreenect::mySelf->processDepth(myVideo, myTimestamp);
}
64. Bring-up
• QFreenect::QFreenect(QObject *parent) :
• QObject(parent)
• {
• myMutex=NULL;
• myRGBBuffer=NULL;
• myMutex=new QMutex();
• myWantDataFlag=false;
• myFlagFrameTaken=true;
• mySelf=this;
• if (freenect_init(&myContext, NULL) < 0)
• {
• qDebug("init failed");
• QApplication::exit(1);
• }
65. Bring-up – II
• freenect_set_log_level(myContext, FREENECT_LOG_FATAL);
• int nr_devices = freenect_num_devices (myContext);
• if (nr_devices < 1)
• {
• freenect_shutdown(myContext);
• qDebug("No Kinect found!");
• QApplication::exit(1);
• }
• if (freenect_open_device(myContext, &myDevice, 0) < 0)
• {
• qDebug("Open Device Failed!");
• freenect_shutdown(myContext);
• QApplication::exit(1);
• }
66. • myRGBBuffer = (uint8_t*)malloc(640*480*3);
• freenect_set_video_callback(myDevice,
videoCallback);
• freenect_set_video_buffer(myDevice,
myRGBBuffer);
• freenect_frame_mode vFrame =
freenect_find_video_mode(FREENECT_RESOLUTIO
N_MEDIUM,FREENECT_VIDEO_RGB);
• freenect_set_video_mode(myDevice,vFrame);
• freenect_start_video(myDevice);
69. Data passing
void QFreenect::processVideo(void *myVideo, uint32_t
myTimestamp)
{
QMutexLocker locker(myMutex);
if(myWantDataFlag && myFlagFrameTaken)
{
uint8_t* mySecondBuffer=(uint8_t*)malloc(640*480*3);
memcpy(mySecondBuffer,myVideo,640*480*3);
myFlagFrameTaken=false;
emit videoDataReady(mySecondBuffer);
}
}
71. Format of data word - II
for(int x=2; x<640;x++)
{
for(int y=0;y<480;y++)
{
r=(myRGBBuffer[3*(x+y*640)+0]);
g=(myRGBBuffer[3*(x+y*640)+1]);
b=(myRGBBuffer[3*(x+y*640)+2]);
myVideoImage->setPixel(x,y,qRgb(r,g,b));
}
}
74. Extra processing
void QFreenect::processDepth(void *myDepth, uint32_t
myTimestamp)
{
QMutexLocker locker(myMutex);
if(myWantDataFlag && myFlagDFrameTaken)
{
uint16_t* mySecondBuffer=(uint16_t*)malloc(640*480*2);
memcpy(mySecondBuffer,myDepth,640*480*2);
myFlagDFrameTaken=false;
emit depthDataReady(mySecondBuffer);
}
}
76. Data is in meters
if(calcval==FREENECT_DEPTH_MM_NO_VALUE)
{
r=255; g=0;b=0;
}
else if(calcval>1000 && calcval < 2000)
{
QRgb aVal=myVideoImage->pixel(x,y);
r=qRed(aVal);
g=qGreen(aVal);
b=qBlue(aVal);
}
else
{
r=0;g=0;b=0;
}
myDepthImage->setPixel(x,y,qRgb(r,g,b));
79. What is OpenNI?
• Open standard for Natural Interfaces
– Very Asus-Centric
• Provides generic NI framework
• VERY complex APIVERY complex API