因为涉及到不能使用普通的摄像头,要使用AzureKinect 的摄像头,所以有了下面的转换

1.将脚本替换

2.kinect设置成1280*720

3.赋值相应的预设体即可

4.AzureKinect 的 KinectController

5. ArCamera 改成2000

using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
using DlibFaceLandmarkDetector;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.Calib3dModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using com.rfilkov.kinect;namespace DlibFaceLandmarkDetectorExample
{/// <summary>/// AR Head WebCamTexture Example/// This example was referring to http://www.morethantechnical.com/2012/10/17/head-pose-estimation-with-opencv-opengl-revisited-w-code//// and use effect asset from http://ktk-kumamoto.hatenablog.com/entry/2014/09/14/092400./// </summary>//[RequireComponent(typeof(WebCamTextureToMatHelper))]public class TintARHeadWebcamTexture : MonoBehaviour{/// <summary>/// Determines if displays face points./// </summary>public bool displayFacePoints =>true;/// <summary>/// Determines if displays display axes/// </summary>public bool displayAxes =>true;/// <summary>/// Determines if displays head./// </summary>public bool displayHead =>true;/// <summary>/// Determines if displays effects./// </summary>public bool displayEffects => true;[Space(10)]/// <summary>/// The axes./// </summary>public GameObject axes;/// <summary>/// The head./// </summary>public GameObject head;/// <summary>/// The right eye./// </summary>public GameObject rightEye;/// <summary>/// The left eye./// </summary>public GameObject leftEye;/// <summary>/// The mouth./// </summary>public GameObject mouth;/// <summary>/// The AR camera./// </summary>public Camera ARCamera;/// <summary>/// The AR game object./// </summary>public GameObject ARGameObject;[Space(10)]/// <summary>/// Determines if request the AR camera moving./// </summary>public bool shouldMoveARCamera;[Space(10)]/// <summary>/// Determines if enable low pass filter./// </summary>public bool enableLowPassFilter;/// <summary>/// The position low pass. (Value in meters)/// </summary>public float positionLowPass = 4f;/// <summary>/// The rotation low pass. (Value in degrees)/// </summary>public float rotationLowPass = 2f;/// <summary>/// The old pose data./// </summary>PoseData oldPoseData;/// <summary>/// The mouth particle system./// </summary>ParticleSystem[] mouthParticleSystem;/// <summary>/// The texture./// </summary>Texture2D texture;/// <summary>/// The face landmark detector./// </summary>FaceLandmarkDetector faceLandmarkDetector;/// <summary>/// The cameraparam matrix./// </summary>Mat camMatrix;/// <summary>/// The distortion coeffs./// </summary>MatOfDouble distCoeffs;/// <summary>/// The matrix that inverts the Y-axis./// </summary>Matrix4x4 invertYM;/// <summary>/// The matrix that inverts the Z-axis./// </summary>Matrix4x4 invertZM;/// <summary>/// The transformation matrix./// </summary>Matrix4x4 transformationM = new Matrix4x4();/// <summary>/// The transformation matrix for AR./// </summary>Matrix4x4 ARM;/// <summary>/// The 3d face object points./// </summary>MatOfPoint3f objectPoints68;/// <summary>/// The 3d face object points./// </summary>MatOfPoint3f objectPoints5;/// <summary>/// The image points./// </summary>MatOfPoint2f imagePoints;/// <summary>/// The rvec./// </summary>Mat rvec;/// <summary>/// The tvec./// </summary>Mat tvec;/// <summary>/// The FPS monitor./// </summary>FpsMonitor fpsMonitor;/// <summary>/// The dlib shape predictor file name./// </summary>string dlibShapePredictorFileName = "sp_human_face_68.dat";/// <summary>/// The dlib shape predictor file path./// </summary>string dlibShapePredictorFilePath;#if UNITY_WEBGL && !UNITY_EDITORIEnumerator getFilePath_Coroutine;
#endifpublic static TintARHeadWebcamTexture Instance;private void Awake(){Instance = this;}KinectManager manager;Mat userFrameMat;public int screenWidth => 1280;public int screenHeight => 720;public Texture2D userColorTex;public Camera ARHeadCamere;// Use this for initializationvoid Start(){if (KinectManager.Instance) manager = KinectManager.Instance;fpsMonitor = GetComponent<FpsMonitor>();userFrameMat = new Mat(screenHeight, screenWidth, CvType.CV_8UC4);userColorTex = new Texture2D(screenWidth, screenHeight, TextureFormat.RGBA32, false);dlibShapePredictorFileName = DlibFaceLandmarkDetectorExample.dlibShapePredictorFileName;
#if UNITY_WEBGL && !UNITY_EDITORgetFilePath_Coroutine = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePathAsync (dlibShapePredictorFileName, (result) => {getFilePath_Coroutine = null;dlibShapePredictorFilePath = result;Run ();});StartCoroutine (getFilePath_Coroutine);
#elsedlibShapePredictorFilePath = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath(dlibShapePredictorFileName);Run();
#endif}private void Run(){//set 3d face object points.objectPoints68 = new MatOfPoint3f(new Point3(-34, 90, 83),//l eye (Interpupillary breadth)new Point3(34, 90, 83),//r eye (Interpupillary breadth)new Point3(0.0, 50, 120),//nose (Nose top)new Point3(-26, 15, 83),//l mouse (Mouth breadth)new Point3(26, 15, 83),//r mouse (Mouth breadth)new Point3(-79, 90, 0.0),//l ear (Bitragion breadth)new Point3(79, 90, 0.0)//r ear (Bitragion breadth));objectPoints5 = new MatOfPoint3f(new Point3(-23, 90, 83),//l eye (Inner corner of the eye)new Point3(23, 90, 83),//r eye (Inner corner of the eye)new Point3(-50, 90, 80),//l eye (Tail of the eye)new Point3(50, 90, 80),//r eye (Tail of the eye)new Point3(0.0, 50, 120)//nose (Nose top));imagePoints = new MatOfPoint2f();faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);#if UNITY_ANDROID && !UNITY_EDITOR// Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endifOnWebCamTextureToMatHelperInitialized();}/// <summary>/// Raises the web cam texture to mat helper initialized event./// </summary>public void OnWebCamTextureToMatHelperInitialized(){Debug.Log("OnWebCamTextureToMatHelperInitialized");// Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();texture = new Texture2D(screenWidth, screenHeight, TextureFormat.RGBA32, false);gameObject.GetComponent<Renderer>().material.mainTexture = texture;gameObject.transform.localScale = new Vector3(screenWidth, screenHeight, 1);Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);float width = screenWidth;float height = screenHeight;float imageSizeScale = 1.0f;float widthScale = (float)Screen.width / width;float heightScale = (float)Screen.height / height;if (widthScale < heightScale){ARHeadCamere.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;imageSizeScale = (float)Screen.height / (float)Screen.width;}else{ARHeadCamere.orthographicSize = height / 2;}//set cameraparamint max_d = (int)Mathf.Max(width, height);double fx = max_d;double fy = max_d;double cx = width / 2.0f;double cy = height / 2.0f;camMatrix = new Mat(3, 3, CvType.CV_64FC1);camMatrix.put(0, 0, fx);camMatrix.put(0, 1, 0);camMatrix.put(0, 2, cx);camMatrix.put(1, 0, 0);camMatrix.put(1, 1, fy);camMatrix.put(1, 2, cy);camMatrix.put(2, 0, 0);camMatrix.put(2, 1, 0);camMatrix.put(2, 2, 1.0f);Debug.Log("camMatrix " + camMatrix.dump());distCoeffs = new MatOfDouble(0, 0, 0, 0);Debug.Log("distCoeffs " + distCoeffs.dump());//calibration cameraSize imageSize = new Size(width * imageSizeScale, height * imageSizeScale);double apertureWidth = 0;double apertureHeight = 0;double[] fovx = new double[1];double[] fovy = new double[1];double[] focalLength = new double[1];Point principalPoint = new Point(0, 0);double[] aspectratio = new double[1];Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);Debug.Log("imageSize " + imageSize.ToString());Debug.Log("apertureWidth " + apertureWidth);Debug.Log("apertureHeight " + apertureHeight);Debug.Log("fovx " + fovx[0]);Debug.Log("fovy " + fovy[0]);Debug.Log("focalLength " + focalLength[0]);Debug.Log("principalPoint " + principalPoint.ToString());Debug.Log("aspectratio " + aspectratio[0]);//To convert the difference of the FOV value of the OpenCV and Unity. double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));Debug.Log("fovXScale " + fovXScale);Debug.Log("fovYScale " + fovYScale);//Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4if (widthScale < heightScale){ARCamera.fieldOfView = (float)(fovx[0] * fovXScale);}else{ARCamera.fieldOfView = (float)(fovy[0] * fovYScale);}invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));Debug.Log("invertYM " + invertYM.ToString());invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));Debug.Log("invertZM " + invertZM.ToString());axes.SetActive(false);head.SetActive(false);rightEye.SetActive(false);leftEye.SetActive(false);mouth.SetActive(false);mouthParticleSystem = mouth.GetComponentsInChildren<ParticleSystem>(true);}/// <summary>/// Raises the web cam texture to mat helper disposed event./// </summary>public void OnWebCamTextureToMatHelperDisposed(){Debug.Log("OnWebCamTextureToMatHelperDisposed");if (texture != null){Texture2D.Destroy(texture);texture = null;}camMatrix.Dispose();distCoeffs.Dispose();}/// <summary>/// Raises the web cam texture to mat helper error occurred event./// </summary>/// <param name="errorCode">Error code.</param>public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode){Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);}// Update is called once per framevoid Update(){// if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()){// Mat userFrameMat = webCamTextureToMatHelper.GetMat();userColorTex = manager.GetColorImageTex(0) as Texture2D;Utils.texture2DToMat(userColorTex, userFrameMat, true, 1);OpenCVForUnityUtils.SetImage(faceLandmarkDetector, userFrameMat);//detect face rectsList<UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();if (detectResult.Count > 0){//detect landmark pointsList<Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult[0]);if (displayFacePoints)OpenCVForUnityUtils.DrawFaceLandmark(userFrameMat, points, new Scalar(0, 255, 0, 255), 2);MatOfPoint3f objectPoints = null;bool isRightEyeOpen = false;bool isLeftEyeOpen = false;bool isMouthOpen = false;if (points.Count == 68){objectPoints = objectPoints68;imagePoints.fromArray(new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2),//l eye (Interpupillary breadth)new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2),//r eye (Interpupillary breadth)new Point(points[30].x, points[30].y),//nose (Nose top)new Point(points[48].x, points[48].y),//l mouth (Mouth breadth)new Point(points[54].x, points[54].y), //r mouth (Mouth breadth)new Point(points[0].x, points[0].y),//l ear (Bitragion breadth)new Point(points[16].x, points[16].y)//r ear (Bitragion breadth));if (Mathf.Abs((float)(points[43].y - points[46].y)) > Mathf.Abs((float)(points[42].x - points[45].x)) / 5.0){isRightEyeOpen = true;}if (Mathf.Abs((float)(points[38].y - points[41].y)) > Mathf.Abs((float)(points[39].x - points[36].x)) / 5.0){isLeftEyeOpen = true;}float noseDistance = Mathf.Abs((float)(points[27].y - points[33].y));float mouseDistance = Mathf.Abs((float)(points[62].y - points[66].y));if (mouseDistance > noseDistance / 5.0){isMouthOpen = true;}else{isMouthOpen = false;}}else if (points.Count == 5){objectPoints = objectPoints5;imagePoints.fromArray(new Point(points[3].x, points[3].y),//l eye (Inner corner of the eye)new Point(points[1].x, points[1].y),//r eye (Inner corner of the eye)new Point(points[2].x, points[2].y),//l eye (Tail of the eye)new Point(points[0].x, points[0].y),//r eye (Tail of the eye)new Point(points[4].x, points[4].y)//nose (Nose top));if (fpsMonitor != null){fpsMonitor.consoleText = "This example supports mainly the face landmark points of 68 points.";}}// Estimate head pose.if (rvec == null || tvec == null){rvec = new Mat(3, 1, CvType.CV_64FC1);tvec = new Mat(3, 1, CvType.CV_64FC1);Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);}double tvec_z = tvec.get(2, 0)[0];if (double.IsNaN(tvec_z) || tvec_z < 0){ // if tvec is wrong data, do not use extrinsic guesses.Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);}else{Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);}//Debug.Log (tvec.dump());if (!double.IsNaN(tvec_z)){if (displayHead)head.SetActive(true);if (displayAxes)axes.SetActive(true);if (displayEffects){rightEye.SetActive(isRightEyeOpen);leftEye.SetActive(isLeftEyeOpen);if (isMouthOpen){mouth.SetActive(true);foreach (ParticleSystem ps in mouthParticleSystem){var em = ps.emission;em.enabled = true;
#if UNITY_5_5_OR_NEWERvar main = ps.main;main.startSizeMultiplier = 20;
#elseps.startSize = 20;
#endif}}else{foreach (ParticleSystem ps in mouthParticleSystem){var em = ps.emission;em.enabled = false;}}}// Convert to unity pose data.double[] rvecArr = new double[3];rvec.get(0, 0, rvecArr);double[] tvecArr = new double[3];tvec.get(0, 0, tvecArr);PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);// Changes in pos/rot below these thresholds are ignored.if (enableLowPassFilter){ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);}oldPoseData = poseData;// Create transform matrix.transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);}// right-handed coordinates system (OpenCV) to left-handed one (Unity)ARM = invertYM * transformationM;// Apply Z-axis inverted matrix.ARM = ARM * invertZM;if (shouldMoveARCamera){ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);}else{ARM = ARCamera.transform.localToWorldMatrix * ARM;ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);}}//Imgproc.putText (userFrameMat, "W:" + userFrameMat.width () + " H:" + userFrameMat.height () + " SO:" + Screen.orientation, new Point (5, userFrameMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(userFrameMat, texture);}}/// <summary>/// Raises the destroy event./// </summary>void OnDestroy(){//if (webCamTextureToMatHelper != null)//    webCamTextureToMatHelper.Dispose();if (faceLandmarkDetector != null)faceLandmarkDetector.Dispose();#if UNITY_WEBGL && !UNITY_EDITORif (getFilePath_Coroutine != null) {StopCoroutine (getFilePath_Coroutine);((IDisposable)getFilePath_Coroutine).Dispose ();}
#endif}/// <summary>/// Raises the back button click event./// </summary>public void OnBackButtonClick(){SceneManager.LoadScene("DlibFaceLandmarkDetectorExample");}}
}

Unity AzureKinect Ar贴纸相关推荐

  1. Unity制作AR图片和视频展示

    注:本次用的AR SDK是vuforia,所以用这个SDK做开发解释说明,并且采用循序渐进的方式,逐步做出扫描出图可交互图片,视频和模型动画. 识别出现图片的做法: 1:先登录vuforia官网,没有 ...

  2. Unity实现AR扫描图片

    Unity实现AR扫描并且实时跟踪

  3. 《Unity 2018 AR与VR开发快速上手》简介

    #好书推荐##好书奇遇季#<Unity 2018 AR与VR开发快速上手>,京东当当天猫都有发售. Unity3D是由Unity Technologies公司开发的一款跨平台的游戏行业软件 ...

  4. Unity Vuforia AR生成详细教程

    Unity2019 生成AR程序详细教程+台式机开发无摄像头解决方案 文章目录 一.vuforia插件介绍 二.安装vuforia 三.注册vuforia官网账号和获取Key与创建数据库 四.Unit ...

  5. [增强现实]Unity制作AR增强现实--茶壶(亲测)

    1.下载Unity Extension 在这里  https://ar.qualcomm.at/qdevnet/sdk/ios  注册,这个是必须的 或者到asset store里找"Vuf ...

  6. 使用unity制作AR的方法 EasyAR+ARCore

    先附上我简单做的图片识别AR 模型来自b站 喵刀Hime 大佬,点击可互动 上面的例子是用EasyAR做的 以下是我在siki学院的学习笔记,仅供参考 ##EasyAR 导入EasyAR的包(Easy ...

  7. Unity Vuforia AR基础

    一.AR概念 AR 是缩写,英文是:Augmented Reality,中文是:增强现实.这个技术,让用户的眼中,除了看见真实的世界,还看见了"虚拟的影像".真实和虚拟环境叠加,呈 ...

  8. 使用Easyar在unity制作ar视频黑屏

    最近由于一个大作业想实现下AR,于是选了在unity下使用easyar. 在使用其官网的sample中的播放视频功能时,打包成apk在手机上运行,进行识别后能识别出来,缺只出现了黑色的块并且有声音,只 ...

  9. 5分钟使用Unity制作AR应用,结合Vuforia SDK制作AR

    今天这篇课程将为大家介绍使用Unity结合Vuforia SDK制作AR应用的过程,非常简单且容易上手,适合Unity初学者.最后效果是制作出向上跳跃的Unity酱. 注意:本文教程使用Unity 5 ...

最新文章

  1. Mysql压测工具mysqlslap 讲解
  2. 推荐几个好玩又有难度的编程网站
  3. python商务图表_Excel职场商务图表高效制作
  4. [转载]答《漫话ID》中的疑问:UniqueID和ClientID的来源
  5. 2 image pil 转_pdf转图片,php语言和Java语言的两种方法
  6. 【渝粤教育】电大中专药品储存与养护技术 (2)作业 题库
  7. php写linux应用程序,Linux应用程序使用写文件调试程序的方法
  8. 微信公众号url服务器在哪里,微信公众号url认证(服务器认证)
  9. 约瑟夫问题公式及代码实现
  10. 别急马上到你们了!携号转网全国推广工作研讨会召开
  11. vue canvas插件_基于vue.js 制作在线桌椅定制选择交互特效源码
  12. Wincc安装教程+破解教程(包括软件下载链接——百度网盘)
  13. 三因素三水平正交表l9_正交试验在减水剂中的应用
  14. STM32 自定义串口协议
  15. 传阿里云盘将开启扩容收费测试:200GB售价 108元/年
  16. 政府行政管理思维与互联网思维
  17. 我的气垫船充满了鳗鱼
  18. C语言:记录zmq订阅地址ipc文件问题
  19. 懒惰的苏珊 UVa1620
  20. 知识图谱嵌入经典方法(Trans系列、KG2E)

热门文章

  1. js中文首字母排序(二)
  2. 修改服务器后打印机,怎样改打印机服务器的ip地址
  3. c++在文件中提取数字或字母
  4. java斗地主随机发牌_java实现斗地主发牌功能
  5. 亲测方案:解决HBuilder X启动提示语法助手无法访问的问题
  6. Attempted to lock an already-locked dir异常解决方法
  7. CSS画卡通人物:皮卡丘
  8. HCNP-路由交换:GRE(通用路由封装协议)
  9. 图书管理系统接口文档
  10. globk平差输出选项含义