Kinect

作者:追风剑情 发布于:2019-9-19 13:11 分类:Unity3d

官方文档

1111.png

一、安装开发环境

1、官方下载 Kinect for Windows SDK 2.0,并安装。

2、测试Kinect设备是否正常工作。

1111.png

1111.png

绿勾代表正常工作

2222.png


3、下载示例并导入到Unity2017或更高版本中: 百度网盘下载 提取码: 5yjs

1111.png

=====================================================================================

»KinectManager接口功能

1.检查Kinect是否完成了初始化
if ( KinectManager.IsKinectInitialized() ) { }

2.获取摄像头画面(一般作为背景)
Texture2D background = manager.GetUsersClrTex();

3.判断指定用户的某关节是否正处于跟踪中
long userId = manager.GetUserIdByIndex(0);//0代表第1个用户
int joint = (int)KinectInterop.JointType.HandRight;//判断右手
if (manager.IsJointTracked (userId, joint)) {}

4.获取关节坐标及旋转角

//关节坐标
long userId = manager.GetUserIdByIndex(0);
int joint = (int)KinectInterop.JointType.HandRight;//右手
Rect backgroundRect = foregroundCamera.pixelRect;
Vector3 posJoint = manager.GetJointPosColorOverlay(userId, joint, foregroundCamera, backgroundRect);
//关节旋转角度
Vector3 vForward = foregroundCamera ? foregroundCamera.transform.forward : Vector3.forward;
bool objFlipped = (Vector3.Dot(overlayObject.forward, vForward) < 0);//夹角为锐角
Quaternion rotJoint = manager.GetJointOrientation(userId, joint, !objFlipped);

5.获取父关节
KinectInterop.JointType jointParent = manager.GetParentJoint(KinectInterop.JointType.HandRight);

6.是否至少有1个用户被跟踪
if (!manager.IsUserDetected()) {
//没用户时,可以暂停或停止游戏
}

7.判断指定用户ID是否在跟踪用户列表中
if (manager.IsUserTracked(userId)) {}

8.清除所有被跟踪的用户
manager.ClearKinectUsers();//通常在游戏结束时调用

9.骨骼枚举
Kinect中的定义:KinectInterop.JointType
Unity中的定义: HumanBodyBones
PS: 注意定义中的对应关系
例如:KinectInterop.JointType.ShoulderLeft对应的是HumanBodyBones.LeftUpperArm

10.获取骨骼
//获取KinectInterop.JointType.ShoulderLeft
animatorComponent.GetBoneTransform(HumanBodyBones.LeftUpperArm)

11.改变人类化身姿态

//确保对象不为null,且为人类
if (animatorComponent && animatorComponent.avatar && animatorComponent.avatar.isHuman)
{
	HumanPoseHandler humanPoseHandler = new HumanPoseHandler(animatorComponent.avatar, rootTransform);
	HumanPose humanPose = new HumanPose();
	humanPoseHandler.GetHumanPose(ref humanPose);
	humanPose.bodyPosition = 新值;
	humanPose.bodyRotation = 新值;
	humanPose.muscles = 新值;//当前姿势的肌肉值的数组
	humanPoseHandler.SetHumanPose(ref humanPose);
}

12.刷新姿态监听器列表(通常进入场景后调用一次)
找出场景中实现了KinectGestures.GestureListenerInterface的脚本,并加入到监听列表,同时找出场景中
KinectGestures脚本。
manager.refreshGestureListeners();

13.刷新化身控制器列表(通常进入场景后调用一次)
找出场景中的AvatarController脚本并加入avatarControllers列表
manager.refreshAvatarControllers()

14.获取主用户ID
获取主用户(第一个或最近的用户)的userid,如果没有检测到用户,则获取0
manager.GetPrimaryUserID();

15.获取前景图像(通常指用户)

backManager = BackgroundRemovalManager.Instance;
if (backManager && backManager.IsBackgroundRemovalInitialized()) {
	foregroundTex = (RenderTexture)backManager.GetForegroundTex ();
}

16.获取坐标点深度值

//获取头部关节坐标
Vector3 posHeadRaw = kinectManager.GetJointKinectPosition(userId, (int)KinectInterop.JointType.Head);
if(posHeadRaw != Vector3.zero)
{
	//转到深度坐标系
	Vector2 posDepthHead = kinectManager.MapSpacePointToDepthCoords(posHeadRaw);
	//得到深度值
	ushort depthHead = kinectManager.GetDepthForPixel((int)posDepthHead.x, (int)posDepthHead.y);
}

17.获取面部矩形区域

if (faceManager.IsFaceTrackingInitialized() && faceManager.IsTrackingFace(userId)) 
{
    Rect faceJointRect = faceManager.GetFaceColorRect(userId);
}

18.调整前景裁剪边缘
禾.png
Erode Iterations: 缩小裁剪边缘
Dilate Iterations: 扩大裁剪边缘
2222.png

19.获取面部模型数据

KinectInterop.SensorData sensorData = kinectManager.GetSensorData();
//获取面部模型顶点数
int iNumVertices = sensorData.sensorInterface.GetFaceModelVerticesCount(0);
if (iNumVertices <= 0)
	return;
Vector3[] avModelVertices = new Vector3[iNumVertices];
//获取面部模型顶点
bool bGotModelVertices = sensorData.sensorInterface.GetFaceModelVertices(0, ref avModelVertices);

//获取三角形数量
int iNumTriangles = sensorData.sensorInterface.GetFaceModelTrianglesCount();
if(iNumTriangles <= 0)
	return;
//获取三角形顶点索引
int[] avModelTriangles = new int[iNumTriangles];
bool bGotModelTriangles = sensorData.sensorInterface.GetFaceModelTriangles(mirroredModelMesh, ref avModelTriangles);

//获取头在kinect中的坐标
Vector3 headPos = Vector3.zero;
bGotHeadPos = sensorData.sensorInterface.GetHeadPosition(primaryUserID, ref headPos);
//从kinect坐标系转到Unity世界坐标系
Matrix4x4 kinectToWorld = kinectManager ? kinectManager.GetKinectToWorldMatrix() : Matrix4x4.identity;
Vector3 headPosWorld = kinectToWorld.MultiplyPoint3x4(headPos);

//将顶点坐标从kinect坐标系转到Unity世界坐标
Vector3[] vMeshVertices = new Vector3[avModelVertices.Length];
for(int i = 0; i < avModelVertices.Length; i++)
{
	//世界坐标转面部模型的本地坐标
	vMeshVertices[i] = kinectToWorld.MultiplyPoint3x4(avModelVertices[i]) - headPosWorld;
}

//创建Mesh
if (faceModelMesh) 
{
	Mesh mesh = new Mesh();
	mesh.name = "FaceMesh";
	faceModelMesh.GetComponent().mesh = mesh;
	mesh.vertices = vMeshVertices;
	mesh.triangles = avModelTriangles;
	mesh.RecalculateNormals();
}

//为面部Mesh设置贴图
Texture texColorMap = kinectManager ? kinectManager.GetUsersClrTex() : null;
RenderTexture faceMeshTexture = new RenderTexture(texColorMap.width, texColorMap.height, 0);
faceModelMesh.GetComponent().material.mainTexture = faceMeshTexture;
if (faceMeshTexture && texColorMap) 
{
	// update the color texture
	Graphics.Blit(texColorMap, faceMeshTexture);
}

20.获取采集图像尺寸

//摄像头采集的图像尺寸
kinectManager.GetColorImageWidth();
kinectManager.GetColorImageHeight();
//红外摄像头采集的深度图像尺寸
kinectManager.GetDepthImageWidth();
kinectManager.GetDepthImageHeight();

21.获取面部模型特征点在Unity中的世界坐标

FacetrackingManager faceManager = FacetrackingManager.Instance;
int iVertCount = faceManager.GetUserFaceVertexCount(userId);
Vector3[] faceVertices = new Vector3[iVertCount];
//这里的顶点坐标系是Kinect设备
if (faceManager.GetUserFaceVertices(userId, ref faceVertices))
{
	//Kinect坐标到Unity世界坐标系的转换矩阵
	Matrix4x4 kinectToWorld = kinectManager.GetKinectToWorldMatrix();
	//面部关键点枚举数组
	HighDetailFacePoints[] facePoints = (HighDetailFacePoints[])System.Enum.GetValues(typeof(HighDetailFacePoints));

	Dictionary dictFacePoints = new Dictionary();
	for (int i = 0; i < facePoints.Length; i++) 
	{
		HighDetailFacePoints point = facePoints[i];
		//转换到Unity世界坐标系
		dictFacePoints[point] = kinectToWorld.MultiplyPoint3x4(faceVertices[(int)point]);
	}
	
	//让面具模型坐标跟随鼻尖(即,实现面具跟随面部)
	//NoseTip: 鼻尖
	HighDetailFacePoints facePoint = HighDetailFacePoints.NoseTip;
	Vector3 facePointPos = faceVertices[(int)facePoint];
	facePointTransform.position = facePointPos;
}

22.调整背景画面显示大小及位置 
//显示摄像头画面
KinectManager manager = KinectManager.Instance;
mGUITexture.texture = manager.GetUsersClrTex();
2222.png1111.png

23.设置前景像机(渲染3D物体)与Kinect摄像头的视场角相等 
KinectManager manager = KinectManager.Instance;
KinectInterop.SensorData sensorData = manager.GetSensorData();
foregroundCamera.fieldOfView = sensorData.colorCameraFOV;

24.获取关节在Camera坐标系中的坐标

Rect backgroundRect = foregroundCamera.pixelRect;
ortraitBackground portraitBack = PortraitBackground.Instance;
if(portraitBack && portraitBack.enabled)
{
	backgroundRect = portraitBack.GetBackgroundRect();
}
//返回关节在Unity中的世界坐标
Vector3 posColorOverlay = kinectManager.GetJointPosColorOverlay(
primaryUserID, (int)KinectInterop.JointType.Head, foregroundCamera, backgroundRect);
//让目标3D对象跟随关节
faceModelMesh.transform.position = posColorOverlay;

25.计算面部模型顶点对应的UV贴图坐标

//获取模型顶点坐标
KinectInterop.SensorData sensorData = kinectManager.GetSensorData();
int iNumVertices = sensorData.sensorInterface.GetFaceModelVerticesCount(primaryUserID);
Vector3[] avModelVertices = new Vector3[iNumVertices];
sensorData.sensorInterface.GetFaceModelVertices(primaryUserID, ref avModelVertices);
//Kinect摄像头采集的画面宽高
float colorWidth = (float)kinectManager.GetColorImageWidth();
float colorHeight = (float)kinectManager.GetColorImageHeight();
//计算模型顶点对应的UV坐标
for(int i = 0; i < avModelVertices.Length; i++)
{
	//顶点对应的深度图坐标
	Vector2 posDepth = kinectManager.MapSpacePointToDepthCoords(avModelVertices[i]);
	if(posDepth != Vector2.zero)
	{
		//得到深度值
		ushort depth = kinectManager.GetDepthForPixel((int)posDepth.x, (int)posDepth.y);
		//得到顶点在Color图像中的坐标
		Vector2 posColor = kinectManager.MapDepthPointToColorCoords(posDepth, depth);
		if(posColor != Vector2.zero && !float.IsInfinity(posColor.x) && !float.IsInfinity(posColor.y))
		{
			//转成[0,1]区间的UV坐标
			avModelUV[i] = new Vector2(posColor.x / colorWidth, posColor.y / colorHeight);
		}
	}
}
//设置uv
mesh.uv = avModelUV;
//如果顶点有变化,别忘了重新计算下
mesh.RecalculateNormals();
mesh.RecalculateBounds();

26.获取面部细节点坐标

int NoseTip = (int)Microsoft.Kinect.Face.HighDetailFacePoints.NoseTip;
Vector3 NoseTipPos = avModelVertices[NoseTip];//鼻尖坐标

27.表情动画

//获取动画单元(AU)权重值,Shape各动画表情权重值
//AU是中性形状的增量,您可以使用它来对动画化身模型上的目标进行变形,
//以使化身像被跟踪的用户一样起作用。例如,AU定义是否张口,抬起眉毛以及其他面部表情细节。
Dictionary dictAU = 
	new Dictionary();
sensorData.sensorInterface.GetAnimUnits(userId, ref dictAU);

//获取形状单位(SU)权重
//SU估计用户头部的特定形状:眉毛,鼻子,脸颊,嘴巴或下巴等特征的形状。
Dictionary dictSU =
	new Dictionary();
ensorData.sensorInterface.GetShapeUnits(userId, ref dictSU);

»需要开发人员自己实现的接口:

public class MyGestureListener : KinectGestures.GestureListenerInterface 
{
	//用户进入
	void UserDetected(long userId, int userIndex)
	{
		KinectManager manager = KinectManager.Instance;
		// 添加希望追踪的用户手势
		manager.DetectGesture(userId, KinectGestures.Gestures.SwipeLeft);
		manager.DetectGesture(userId, KinectGestures.Gestures.SwipeRight);
	}
	
	//用户离开
	void UserLost(long userId, int userIndex)
	{
	}
	
	//返回手势进度
	void GestureInProgress(long userId, int userIndex, Gestures gesture, float progress, 
		                       KinectInterop.JointType joint, Vector3 screenPos)
	{
	}
	
	//手势完成
	//return true(手势必须重新开始)
	bool GestureCompleted(long userId, int userIndex, Gestures gesture,
		                      KinectInterop.JointType joint, Vector3 screenPos)
	{
		// 判断手势类型并做出相应处理
		switch (gesture)
		{
			case KinectGestures.Gestures.SwipeLeft:
			
				break;
			case KinectGestures.Gestures.SwipeRight:
			
				break;
		}
	}
	
	//手势取消
	bool GestureCancelled(long userId, int userIndex, Gestures gesture, 
		                      KinectInterop.JointType joint)
	{
		return true;
	}
}

public class MyInteractionListener : InteractionListenerInterface 
{
	//手握住
	void HandGripDetected(long userId, int userIndex, bool isRightHand, 
                              bool isHandInteracting, Vector3 handScreenPos)
	{
	
	}
	
	//手释放
	void HandReleaseDetected(long userId, int userIndex, bool isRightHand, 
                                 bool isHandInteracting, Vector3 handScreenPos)
	{
	
	}
	
	//左手或右手在适当的位置停留至少2.5秒
	//return true(手势必须重新开始)
	bool HandClickDetected(long userId, int userIndex, bool isRightHand, Vector3 handScreenPos)
	{
	
	}
}

»手势识别
Kinect手势类型定义在KinectGestures.cs中 KinectGestures.Gestures

public enum Gestures
{
	None = 0,
	RaiseRightHand,//右手举起过肩并保持至少一秒
	RaiseLeftHand,//左手举起过肩并保持至少一秒
	Psi,//双手举起过肩并保持至少一秒
	Tpose,//触摸
	Stop,//双手下垂
	Wave,//左手或右手举起来回摆动
	Click,//左手或右手在适当的位置停留至少2.5秒
	SwipeLeft,//右手向左挥
	SwipeRight,//左手向右挥
	SwipeUp,//左手或者右手向上挥
	SwipeDown,//左手或者右手向下挥
	RightHandCursor,//假手势,用来使光标随着手移动
	LeftHandCursor,//假手势,用来使光标随着手移动
	ZoomIn,//手肘向下,两手掌相聚至少0.7米,然后慢慢合在一起
	ZoomOut,//手肘向下,左右手掌合在一起(求佛的手势),然后慢慢分开
	Wheel,//想象一下你双手握着方向盘,然后左右转动
	Jump,//在1.5秒内髋关节中心至少上升10厘米 (跳)
	Squat,//在1.5秒内髋关节中心至少下降10厘米 (下蹲)
	Push,//在1.5秒内将左手或右手向外推
	Pull,//在1.5秒内将左手或右手向里拉
	ShoulderLeftFront,//左肩前倾
	ShoulderRightFront,//右肩前倾
	LeanLeft, //身体向左倾斜
	LeanRight, //身体向右倾斜
	LeanForward,//身体向前倾斜
	LeanBack,//身体向后倾斜
	KickLeft,//踢左脚
	KickRight,//踢右脚
	Run,//跑
	RaisedRightHorizontalLeftHand,//左手平举
	RaisedLeftHorizontalRightHand,//右手平举
	
	//自定义手势
	UserGesture1 = 101,
	UserGesture2 = 102,
	UserGesture3 = 103,
	UserGesture4 = 104,
	UserGesture5 = 105,
	UserGesture6 = 106,
	UserGesture7 = 107,
	UserGesture8 = 108,
	UserGesture9 = 109,
	UserGesture10 = 110,
}
每种手势的识别处理在KinectGestures.CheckForGesture()方法中,可以在这个方法中扩展自定义手势识别。


»关节
每个姿态由手、肘、肩、脊柱、臀部、膝、踝关节共同组成,参见KinectGestures.GetNeededJointIndexes()方法:

/// <summary>
/// 获取组成姿态的关节索引列表
/// </summary>
public virtual int[] GetNeededJointIndexes(KinectManager manager)
{
	//手
	leftHandIndex = manager.GetJointIndex(KinectInterop.JointType.HandLeft);
	rightHandIndex = manager.GetJointIndex(KinectInterop.JointType.HandRight);
	//肘
	leftElbowIndex = manager.GetJointIndex(KinectInterop.JointType.ElbowLeft);
	rightElbowIndex = manager.GetJointIndex(KinectInterop.JointType.ElbowRight);
	//肩
	leftShoulderIndex = manager.GetJointIndex(KinectInterop.JointType.ShoulderLeft);
	rightShoulderIndex = manager.GetJointIndex(KinectInterop.JointType.ShoulderRight);
	//脊柱
	hipCenterIndex = manager.GetJointIndex(KinectInterop.JointType.SpineBase);
	shoulderCenterIndex = manager.GetJointIndex(KinectInterop.JointType.SpineShoulder);
	//臀部
	leftHipIndex = manager.GetJointIndex(KinectInterop.JointType.HipLeft);
	rightHipIndex = manager.GetJointIndex(KinectInterop.JointType.HipRight);
	//膝
	leftKneeIndex = manager.GetJointIndex(KinectInterop.JointType.KneeLeft);
	rightKneeIndex = manager.GetJointIndex(KinectInterop.JointType.KneeRight);
	//踝
	leftAnkleIndex = manager.GetJointIndex(KinectInterop.JointType.AnkleLeft);
	rightAnkleIndex = manager.GetJointIndex(KinectInterop.JointType.AnkleRight);
	
	int[] neededJointIndexes = {
		leftHandIndex, rightHandIndex, leftElbowIndex, rightElbowIndex, leftShoulderIndex, 
                rightShoulderIndex, hipCenterIndex, shoulderCenterIndex, leftHipIndex, rightHipIndex, 
                leftKneeIndex, rightKneeIndex, leftAnkleIndex, rightAnkleIndex
	};

	return neededJointIndexes;
}


标签: Unity3d

Powered by emlog  蜀ICP备18021003号   sitemap

川公网安备 51019002001593号