一、框架视图

Main.png

二、关键代码

WebCamTextureARSample

using UnityEngine;

using System.Collections;

using System.Collections.Generic;

using UnityEngine.UI;

#if UNITY_5_3 || UNITY_5_3_OR_NEWER

using UnityEngine.SceneManagement;

#endif

using OpenCVForUnity;

using DlibFaceLandmarkDetector;

namespace DlibFaceLandmarkDetectorSample

{

///

/// Face tracker AR from WebCamTexture Sample.

/// This sample was referring to http://www.morethantechnical.com/2012/10/17/head-pose-estimation-with-opencv-opengl-revisited-w-code/

/// and use effect asset from http://ktk-kumamoto.hatenablog.com/entry/2014/09/14/092400

///

[RequireComponent(typeof(WebCamTextureToMatHelper))]

public class WebCamTextureARSample : MonoBehaviour

{

///

/// The is showing face points.

///

public bool isShowingFacePoints;

///

/// The is showing face points toggle.

///

public Toggle isShowingFacePointsToggle;

///

/// The is showing axes.

///

public bool isShowingAxes;

///

/// The is showing axes toggle.

///

public Toggle isShowingAxesToggle;

///

/// The is showing head.

///

public bool isShowingHead;

///

/// The is showing head toggle.

///

public Toggle isShowingHeadToggle;

///

/// The is showing effects.

///

public bool isShowingEffects;

///

/// The is showing effects toggle.

///

public Toggle isShowingEffectsToggle;

///

/// The axes. 轴

///

public GameObject axes;

///

/// The head. 头部

///

public GameObject head;

///

/// The right eye.右眼

///

public GameObject rightEye;

///

/// The left eye.左眼

///

public GameObject leftEye;

///

/// The mouth. 嘴巴

///

public GameObject mouth;

///

/// The mouth particle system. 嘴部特效

///

ParticleSystem[] mouthParticleSystem;

///

/// The texture. 贴图

///

Texture2D texture;

///

/// The face landmark detector. 脸部识别

///

FaceLandmarkDetector faceLandmarkDetector;

///

/// The AR camera.

///

public Camera ARCamera;

///

/// The cam matrix.

///

Mat camMatrix;

///

/// The dist coeffs.

///

MatOfDouble distCoeffs;

///

/// The invert Y.

///

Matrix4x4 invertYM;

///

/// The transformation m.

///

Matrix4x4 transformationM = new Matrix4x4 ();

///

/// The invert Z.

///

Matrix4x4 invertZM;

///

/// The ar m.

///

Matrix4x4 ARM;

///

/// The ar game object.

///

public GameObject ARGameObject;

///

/// The should move AR camera.

///

public bool shouldMoveARCamera;

///

/// The 3d face object points.

///

MatOfPoint3f objectPoints;

///

/// The image points.

///

MatOfPoint2f imagePoints;

///

/// The rvec.

///

Mat rvec;

///

/// The tvec.

///

Mat tvec;

///

/// The rot m.

///

Mat rotM;

///

/// The web cam texture to mat helper.

///

WebCamTextureToMatHelper webCamTextureToMatHelper;

///

/// The shape_predictor_68_face_landmarks_dat_filepath.

///

private string shape_predictor_68_face_landmarks_dat_filepath;

// Use this for initialization

void Start ()

{

isShowingFacePointsToggle.isOn = isShowingFacePoints;

isShowingAxesToggle.isOn = isShowingAxes;

isShowingHeadToggle.isOn = isShowingHead;

isShowingEffectsToggle.isOn = isShowingEffects;

#if UNITY_WEBGL && !UNITY_EDITOR

StartCoroutine(DlibFaceLandmarkDetector.Utils.getFilePathAsync("shape_predictor_68_face_landmarks.dat", (result) => {

shape_predictor_68_face_landmarks_dat_filepath = result;

Run ();

}));

#else

shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat");

Run ();

#endif

}

private void Run ()

{

//set 3d face object points.

objectPoints = new MatOfPoint3f (

new Point3 (-31, 72, 86),//l eye

new Point3 (31, 72, 86),//r eye

new Point3 (0, 40, 114),//nose

new Point3 (-20, 15, 90),//l mouse

new Point3 (20, 15, 90),//r mouse

new Point3 (-69, 76, -2),//l ear

new Point3 (69, 76, -2)//r ear

);

imagePoints = new MatOfPoint2f ();

rvec = new Mat ();

tvec = new Mat ();

rotM = new Mat (3, 3, CvType.CV_64FC1);

faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);

webCamTextureToMatHelper = gameObject.GetComponent ();

webCamTextureToMatHelper.Init ();

}

///

/// Raises the web cam texture to mat helper inited event.

///

public void OnWebCamTextureToMatHelperInited ()

{

Debug.Log ("OnWebCamTextureToMatHelperInited");

Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();

texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);

gameObject.GetComponent ().material.mainTexture = texture;

gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);

Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

float width = webCamTextureMat.width ();

float height = webCamTextureMat.height ();

float imageSizeScale = 1.0f;

float widthScale = (float)Screen.width / width;

float heightScale = (float)Screen.height / height;

if (widthScale < heightScale) {

Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;

imageSizeScale = (float)Screen.height / (float)Screen.width;

} else {

Camera.main.orthographicSize = height / 2;

}

// Update is called once per frame

void Update ()

{

if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {

Mat rgbaMat = webCamTextureToMatHelper.GetMat ();

OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);

//detect face rects

List detectResult = faceLandmarkDetector.Detect ();

if (detectResult.Count > 0) {

//detect landmark points

List points = faceLandmarkDetector.DetectLandmark (detectResult [0]);

if (points.Count > 0) {

if (isShowingFacePoints)

OpenCVForUnityUtils.DrawFaceLandmark (rgbaMat, points, new Scalar (0, 255, 0, 255), 2);

imagePoints.fromArray (

new Point ((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2),//l eye

new Point ((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2),//r eye

new Point (points [33].x, points [33].y),//nose

new Point (points [48].x, points [48].y),//l mouth

new Point (points [54].x, points [54].y) //r mouth

,

new Point (points [0].x, points [0].y),//l ear

new Point (points [16].x, points [16].y)//r ear

);

Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

//眼睛的特效

if (tvec.get (2, 0) [0] > 0) {

if (Mathf.Abs ((float)(points [43].y - points [46].y)) > Mathf.Abs ((float)(points [42].x - points [45].x)) / 6.0) {

if (isShowingEffects)

rightEye.SetActive (true);

}

if (Mathf.Abs ((float)(points [38].y - points [41].y)) > Mathf.Abs ((float)(points [39].x - points [36].x)) / 6.0) {

if (isShowingEffects)

leftEye.SetActive (true);

}

if (isShowingHead)

head.SetActive (true);

if (isShowingAxes)

axes.SetActive (true);

//嘴部特效

float noseDistance = Mathf.Abs ((float)(points [27].y - points [33].y));

float mouseDistance = Mathf.Abs ((float)(points [62].y - points [66].y));

if (mouseDistance > noseDistance / 5.0) {

if (isShowingEffects) {

mouth.SetActive (true);

foreach (ParticleSystem ps in mouthParticleSystem) {

ps.enableEmission = true;

ps.startSize = 500 * (mouseDistance / noseDistance);

}

}

} else {

if (isShowingEffects) {

foreach (ParticleSystem ps in mouthParticleSystem) {

ps.enableEmission = false;

}

}

}

Calib3d.Rodrigues (rvec, rotM);

transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0]));

transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0]));

transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0]));

transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));

if (shouldMoveARCamera) {

if (ARGameObject != null) {

ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM;

ARUtils.SetTransformFromMatrix (ARCamera.transform, ref ARM);

ARGameObject.SetActive (true);

}

} else {

ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;

if (ARGameObject != null) {

ARUtils.SetTransformFromMatrix (ARGameObject.transform, ref ARM);

ARGameObject.SetActive (true);

}

}

}

}

}

//显示分辨率和的字眼在底下

// Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors ());

}

}

///

/// Raises the disable event.

///

void OnDisable ()

{

if (webCamTextureToMatHelper != null)

webCamTextureToMatHelper.Dispose ();

if (faceLandmarkDetector != null)

faceLandmarkDetector.Dispose ();

}

///

/// Raises the back button event.

///

public void OnBackButton ()

{

#if UNITY_5_3 || UNITY_5_3_OR_NEWER

// SceneManager.LoadScene ("DlibFaceLandmarkDetectorSample");

SceneManager.LoadScene("FaceMask");

#else

Application.LoadLevel ("FaceMask");

//Application.LoadLevel ("DlibFaceLandmarkDetectorSample");

#endif

}

///

/// Raises the play button event.

///

public void OnPlayButton ()

{

webCamTextureToMatHelper.Play ();

}

///

/// Raises the pause button event.

///

public void OnPauseButton ()

{

webCamTextureToMatHelper.Pause ();

}

///

/// Raises the stop button event.

///

public void OnStopButton ()

{

webCamTextureToMatHelper.Stop ();

}

///

/// Raises the change camera button event.

///

public void OnChangeCameraButton ()

{

webCamTextureToMatHelper.Init (null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);

}

if (isTimeout) {

//Debug.Log("Init time out.");

webCamTexture.Stop ();

webCamTexture = null;

initWaiting = false;

if (OnErrorOccurredEvent != null)

OnErrorOccurredEvent.Invoke (ErrorCode.TIMEOUT);

}

}

///

/// Ises the inited.

///

/// true, if inited was ised, false otherwise.

public bool IsInited ()

{

return initDone;

}

///

/// Play this instance.

///

public void Play ()

{

if (initDone)

webCamTexture.Play ();

}

///

/// Pause this instance.

///

public void Pause ()

{

if (initDone)

webCamTexture.Pause ();

}

///

/// Stop this instance.

///

public void Stop ()

{

if (initDone)

webCamTexture.Stop ();

}

///

/// Ises the playing.

///

/// true, if playing was ised, false otherwise.

public bool IsPlaying ()

{

if (!initDone)

return false;

return webCamTexture.isPlaying;

}

///

/// Gets the web cam texture.

///

/// The web cam texture.

public WebCamTexture GetWebCamTexture ()

{

return (initDone) ? webCamTexture : null;

}

///

/// Gets the web cam device.

///

/// The web cam device.

public WebCamDevice GetWebCamDevice ()

{

return webCamDevice;

}

///

/// Dids the update this frame.

///

/// true, if update this frame was dided, false otherwise.

public bool DidUpdateThisFrame ()

{

if (!initDone)

return false;

#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)

if (webCamTexture.width > 16 && webCamTexture.height > 16) {

return true;

} else {

return false;

}

#else

return webCamTexture.didUpdateThisFrame;

#endif

}

///

/// Gets the mat.

///

/// The mat.

public Mat GetMat ()

{

if (!initDone || !webCamTexture.isPlaying) {

if (rotatedRgbaMat != null) {

return rotatedRgbaMat;

} else {

return rgbaMat;

}

}

Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

if (rotatedRgbaMat != null) {

using (Mat transposeRgbaMat = rgbaMat.t ()) {

Core.flip (transposeRgbaMat, rotatedRgbaMat, 1);

}

flipMat (rotatedRgbaMat);

return rotatedRgbaMat;

} else {

flipMat (rgbaMat);

return rgbaMat;

}

}

///

/// Flips the mat.

///

/// Mat.

private void flipMat (Mat mat)

{

int flipCode = int.MinValue;

if (webCamDevice.isFrontFacing) {

if (webCamTexture.videoRotationAngle == 0) {

flipCode = 1;

} else if (webCamTexture.videoRotationAngle == 90) {

flipCode = 1;

}

if (webCamTexture.videoRotationAngle == 180) {

flipCode = 0;

} else if (webCamTexture.videoRotationAngle == 270) {

flipCode = 0;

}

} else {

if (webCamTexture.videoRotationAngle == 180) {

flipCode = -1;

} else if (webCamTexture.videoRotationAngle == 270) {

flipCode = -1;

}

}

if (flipVertical) {

if (flipCode == int.MinValue) {

flipCode = 0;

} else if (flipCode == 0) {

flipCode = int.MinValue;

} else if (flipCode == 1) {

flipCode = -1;

} else if (flipCode == -1) {

flipCode = 1;

}

}

if (flipHorizontal) {

if (flipCode == int.MinValue) {

flipCode = 1;

} else if (flipCode == 0) {

flipCode = -1;

} else if (flipCode == 1) {

flipCode = int.MinValue;

} else if (flipCode == -1) {

flipCode = 0;

}

}

if (flipCode > int.MinValue) {

Core.flip (mat, mat, flipCode);

}

}

///

/// Gets the buffer colors.

///

/// The buffer colors.

public Color32[] GetBufferColors ()

{

return colors;

}

///

/// To release the resources for the init method.

///

private void dispose ()

{

initWaiting = false;

initDone = false;

if (webCamTexture != null) {

webCamTexture.Stop ();

webCamTexture = null;

}

if (rgbaMat != null) {

rgbaMat.Dispose ();

rgbaMat = null;

}

if (rotatedRgbaMat != null) {

rotatedRgbaMat.Dispose ();

rotatedRgbaMat = null;

}

if (OnDisposedEvent != null)

OnDisposedEvent.Invoke ();

}

///

/// Releases all resource used by the object.

///

/// Call when you are finished using the . The

/// method leaves the in an unusable state. After

/// calling , you must release all references to the so

/// the garbage collector can reclaim the memory that the was occupying.

public void Dispose ()

{

if (initDone)

dispose ();

colors = null;

}

}

}

TrackedMeshOverlay

using System;

using System.Collections.Generic;

using UnityEngine;

using OpenCVForUnity.RectangleTrack;

namespace FaceMaskExample

{

public class TrackedMeshOverlay : MonoBehaviour

{

public int Interval = 1;

public int PoolSize = 10;

[SerializeField]

private GameObject baseObject;

public GameObject BaseObject

{

get {

return baseObject;

}

set {

baseObject = value;

setBaseObject(baseObject);

}

}

public float Width

{

get {

return targetWidth;

}

}

public float Height

{

get {

return targetHeight;

}

}

protected Transform targetTransform;

protected float targetWidth = 0;

protected float targetHeight = 0;

protected Transform overlayTransform;

protected ObjectPool objectPool;

protected Dictionary showingObjects = new Dictionary();

void Awake()

{

init("MeshOverlay");

}

void OnDestroy()

{

overlayTransform = null;

targetTransform = null;

targetWidth = 0;

targetHeight = 0;

showingObjects.Clear();

if(objectPool != null)

{

Destroy(objectPool.gameObject);

objectPool = null;

}

}

protected GameObject getPoolObject(Transform parent)

{

if(objectPool == null) return null;

GameObject newObj = objectPool.GetInstance(parent);

if(newObj != null){

newObj.transform.SetParent(parent, false);

return newObj;

}else{

return null;

}

}

protected virtual void init(String name)

{

GameObject obj = new GameObject(name);

overlayTransform = obj.transform;

overlayTransform.parent = gameObject.transform.parent;

if(baseObject != null)

setBaseObject (baseObject);

}

protected virtual void setBaseObject (GameObject obj)

{

if (obj.GetComponent() == null)

{

Debug.LogWarning("Object is not TrackedMesh.");

return;

}

if(objectPool != null){

Destroy(objectPool);

}

objectPool = overlayTransform.gameObject.AddComponent();

objectPool.prefab = obj;

objectPool.maxCount = PoolSize;

objectPool.prepareCount = (int)PoolSize / 2;

objectPool.Interval = Interval;

}

public virtual void UpdateOverlayTransform(Transform targetTransform)

{

if (targetTransform == null)

{

this.targetTransform = null;

return;

}

targetWidth = targetTransform.localScale.x;

targetHeight = targetTransform.localScale.y;

this.targetTransform = targetTransform;

overlayTransform.localPosition = targetTransform.localPosition;

overlayTransform.localRotation = targetTransform.localRotation;

overlayTransform.localScale = targetTransform.localScale;

}

public virtual TrackedMesh GetObjectById(int id)

{

if (showingObjects.ContainsKey(id))

{

return showingObjects[id];

}

return null;

}

public virtual TrackedMesh CreateObject(int id, Texture2D tex = null)

{

if (!showingObjects.ContainsKey(id)){

GameObject obj = getPoolObject(overlayTransform);

if (obj == null) return null;

TrackedMesh tm = obj.GetComponent();

if (tm != null)

{

tm.Id = id;

tm.transform.localPosition = Vector3.zero;

tm.transform.localRotation = new Quaternion();

tm.transform.localScale = Vector3.one;

if (tex != null)

{

Renderer tmRenderer = tm.transform.GetComponent();

tmRenderer.sharedMaterial.SetTexture ("_MainTex", tex);

}

showingObjects.Add(id, tm);

}

return tm;

}

else{

return null;

}

}

public virtual void UpdateObject(int id, Vector3[] vertices, int[] triangles = null, Vector2[] uv = null)

{

if (showingObjects.ContainsKey(id)){

TrackedMesh tm = showingObjects[id];

if(vertices.Length != tm.MeshFilter.mesh.vertices.Length) Debug.LogError("The number of vertices does not match.");

tm.MeshFilter.mesh.vertices = vertices;

if (triangles != null)

{

tm.MeshFilter.mesh.triangles = triangles;

}

if (uv != null)

{

tm.MeshFilter.mesh.uv = uv;

}

tm.MeshFilter.mesh.RecalculateBounds();

tm.MeshFilter.mesh.RecalculateNormals();

}

}

public virtual void DeleteObject(int id)

{

if (showingObjects.ContainsKey(id))

{

if(showingObjects[id] != null)

showingObjects[id].gameObject.SetActive(false);

showingObjects.Remove(id);

}

}

public virtual void Reset()

{

foreach (int key in showingObjects.Keys)

{

if(showingObjects[key] != null)

showingObjects[key].gameObject.SetActive(false);

}

showingObjects.Clear();

}

}

}

WebCamTextureFaceMaskExample

using System.Collections;

using System.Collections.Generic;

using System.IO;

using UnityEngine;

using UnityEngine.UI;

using DlibFaceLandmarkDetector;

using OpenCVForUnity;

using OpenCVForUnity.RectangleTrack;

using WebGLFileUploader;

#if UNITY_5_3 || UNITY_5_3_OR_NEWER

using UnityEngine.SceneManagement;

#endif

#if UNITY_WEBGL && !UNITY_EDITOR

private IEnumerator getFilePathCoroutine()

{

var getFilePathAsync_0_Coroutine = StartCoroutine (OpenCVForUnity.Utils.getFilePathAsync ("haarcascade_frontalface_alt.xml", (result) => {

haarcascade_frontalface_alt_xml_filepath = result;

}));

var getFilePathAsync_1_Coroutine = StartCoroutine (DlibFaceLandmarkDetector.Utils.getFilePathAsync ("shape_predictor_68_face_landmarks.dat", (result) => {

shape_predictor_68_face_landmarks_dat_filepath = result;

}));

yield return getFilePathAsync_0_Coroutine;

yield return getFilePathAsync_1_Coroutine;

Run ();

uploadFaceMaskButton.interactable = true;

}

#endif

private void Run ()

{

meshOverlay = this.GetComponent ();

shader_FadeID = Shader.PropertyToID("_Fade");

rectangleTracker = new RectangleTracker ();

faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);

frontalFaceParam = new FrontalFaceParam ();

webCamTextureToMatHelper.Init ();

isShowingFaceRectsToggle.isOn = isShowingFaceRects;

useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;

isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;

isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;

}

///

/// Raises the web cam texture to mat helper inited event.

///

public void OnWebCamTextureToMatHelperInited ()

{

Debug.Log ("OnWebCamTextureToMatHelperInited");

Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();

colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];

texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);

gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);

Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

float width = gameObject.transform.localScale.x;

float height = gameObject.transform.localScale.y;

float widthScale = (float)Screen.width / width;

float heightScale = (float)Screen.height / height;

if (widthScale < heightScale) {

Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;

} else {

Camera.main.orthographicSize = height / 2;

}

gameObject.GetComponent ().material.mainTexture = texture;

grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);

cascade = new CascadeClassifier (haarcascade_frontalface_alt_xml_filepath);

if (cascade.empty ()) {

Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");

}

meshOverlay.UpdateOverlayTransform (gameObject.transform);

OnChangeFaceMaskButton ();

}

///

/// Raises the web cam texture to mat helper disposed event.

///

public void OnWebCamTextureToMatHelperDisposed ()

{

Debug.Log ("OnWebCamTextureToMatHelperDisposed");

grayMat.Dispose ();

rectangleTracker.Reset ();

meshOverlay.Reset ();

}

///

/// Raises the web cam texture to mat helper error occurred event.

///

/// Error code.

public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode){

Debug.Log ("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);

}

// Update is called once per frame

void Update ()

{

if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {

Mat rgbaMat = webCamTextureToMatHelper.GetMat ();

// detect faces.

List detectResult = new List ();

if (useDlibFaceDetecter) {

OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);

List result = faceLandmarkDetector.Detect ();

foreach (var unityRect in result) {

detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));

}

} else {

// convert image to greyscale.

Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

using (Mat equalizeHistMat = new Mat ())

using (MatOfRect faces = new MatOfRect ()) {

Imgproc.equalizeHist (grayMat, equalizeHistMat);

cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());

detectResult = faces.toList ();

}

// Adjust to Dilb's result.

foreach (OpenCVForUnity.Rect r in detectResult) {

r.y += (int)(r.height * 0.1f);

}

}

// face traking.

rectangleTracker.UpdateTrackedObjects (detectResult);

List trackedRects = new List ();

rectangleTracker.GetObjects (trackedRects, true);

// detect face landmark.

OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);

List> landmarkPoints = new List> ();

for (int i = 0; i < trackedRects.Count; i++) {

TrackedRect tr = trackedRects [i];

UnityEngine.Rect rect = new UnityEngine.Rect (tr.x, tr.y, tr.width, tr.height);

List points = faceLandmarkDetector.DetectLandmark (rect);

landmarkPoints.Add (points);

}

// face masking.

if (faceMaskTexture != null && landmarkPoints.Count >= 1) {

OpenCVForUnity.Utils.texture2DToMat (faceMaskTexture, faceMaskMat);

float imageWidth = meshOverlay.Width;

float imageHeight = meshOverlay.Height;

float maskImageWidth = faceMaskTexture.width;

float maskImageHeight = faceMaskTexture.height;

TrackedRect tr;

TrackedMesh tm;

for (int i = 0; i < trackedRects.Count; i++) {

tr = trackedRects [i];

if (tr.state == TrackedState.NEW) {

meshOverlay.CreateObject (tr.id, faceMaskTexture);

}

if (tr.state < TrackedState.DELETED) {

tm = meshOverlay.GetObjectById (tr.id);

Vector3[] vertices = tm.MeshFilter.mesh.vertices;

if (vertices.Length == landmarkPoints [i].Count) {

for (int j = 0; j < vertices.Length; j++) {

vertices [j].x = landmarkPoints [i] [j].x / imageWidth - 0.5f;

vertices [j].y = 0.5f - landmarkPoints [i] [j].y / imageHeight;

}

}

Vector2[] uv = tm.MeshFilter.mesh.uv;

if (uv.Length == faceLandmarkPointsInMask.Count) {

for (int jj = 0; jj < uv.Length; jj++) {

uv [jj].x = faceLandmarkPointsInMask [jj].x / maskImageWidth;

uv [jj].y = (maskImageHeight - faceLandmarkPointsInMask [jj].y) / maskImageHeight;

}

}

meshOverlay.UpdateObject (tr.id, vertices, null, uv);

if (tr.numFramesNotDetected > 3) {

tm.Material.SetFloat (shader_FadeID, 1f);

}else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3) {

tm.Material.SetFloat (shader_FadeID, 0.3f + (0.7f/4f) * tr.numFramesNotDetected);

} else {

tm.Material.SetFloat (shader_FadeID, 0.3f);

}

// filter nonfrontalface.

if (isFilteringNonFrontalFaces && frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {

tm.Material.SetFloat (shader_FadeID, 1f);

}

} else if (tr.state == TrackedState.DELETED) {

meshOverlay.DeleteObject (tr.id);

}

}

} else if (landmarkPoints.Count >= 1) {

float imageWidth = meshOverlay.Width;

float imageHeight = meshOverlay.Height;

float maskImageWidth = texture.width;

float maskImageHeight = texture.height;

TrackedRect tr;

TrackedMesh tm;

for (int i = 0; i < trackedRects.Count; i++) {

tr = trackedRects [i];

if (tr.state == TrackedState.NEW) {

meshOverlay.CreateObject (tr.id, texture);

}

if (tr.state < TrackedState.DELETED) {

tm = meshOverlay.GetObjectById (tr.id);

Vector3[] vertices = tm.MeshFilter.mesh.vertices;

if (vertices.Length == landmarkPoints [i].Count) {

for (int j = 0; j < vertices.Length; j++) {

vertices [j].x = landmarkPoints[i][j].x / imageWidth - 0.5f;

vertices [j].y = 0.5f - landmarkPoints[i][j].y / imageHeight;

}

}

Vector2[] uv = tm.MeshFilter.mesh.uv;

if (uv.Length == landmarkPoints [0].Count) {

for (int jj = 0; jj < uv.Length; jj++) {

uv [jj].x = landmarkPoints[0][jj].x / maskImageWidth;

uv [jj].y = (maskImageHeight - landmarkPoints[0][jj].y) / maskImageHeight;

}

}

meshOverlay.UpdateObject (tr.id, vertices, null, uv);

if (tr.numFramesNotDetected > 3) {

tm.Material.SetFloat (shader_FadeID, 1f);

}else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3) {

tm.Material.SetFloat (shader_FadeID, 0.3f + (0.7f/4f) * tr.numFramesNotDetected);

} else {

tm.Material.SetFloat (shader_FadeID, 0.3f);

}

// filter nonfrontalface.

if (isFilteringNonFrontalFaces && frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {

tm.Material.SetFloat (shader_FadeID, 1f);

}

} else if (tr.state == TrackedState.DELETED) {

meshOverlay.DeleteObject (tr.id);

}

}

}

// draw face rects.

if (isShowingFaceRects) {

for (int i = 0; i < detectResult.Count; i++) {

UnityEngine.Rect rect = new UnityEngine.Rect (detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);

OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect, new Scalar (255, 0, 0, 255), 2);

}

for (int i = 0; i < trackedRects.Count; i++) {

UnityEngine.Rect rect = new UnityEngine.Rect (trackedRects [i].x, trackedRects [i].y, trackedRects [i].width, trackedRects [i].height);

OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect, new Scalar (255, 255, 0, 255), 2);

//Imgproc.putText (rgbaMat, " " + frontalFaceParam.getAngleOfFrontalFace (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

//Imgproc.putText (rgbaMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

}

}

// draw face points.

if (isShowingDebugFacePoints) {

for (int i = 0; i < landmarkPoints.Count; i++) {

OpenCVForUnityUtils.DrawFaceLandmark (rgbaMat, landmarkPoints [i], new Scalar (0, 255, 0, 255), 2);

}

}

// display face mask image.

if (faceMaskTexture != null && faceMaskMat != null) {

if (isShowingFaceRects) {

OpenCVForUnityUtils.DrawFaceRect (faceMaskMat, faceRectInMask, new Scalar (255, 0, 0, 255), 2);

}

if (isShowingDebugFacePoints) {

OpenCVForUnityUtils.DrawFaceLandmark (faceMaskMat, faceLandmarkPointsInMask, new Scalar (0, 255, 0, 255), 2);

}

float scale = (rgbaMat.width () / 4f) / faceMaskMat.width ();

float tx = rgbaMat.width () - faceMaskMat.width () * scale;

float ty = 0.0f;

Mat trans = new Mat (2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty);

trans.put (0, 0, scale);

trans.put (0, 1, 0.0f);

trans.put (0, 2, tx);

trans.put (1, 0, 0.0f);

trans.put (1, 1, scale);

trans.put (1, 2, ty);

Imgproc.warpAffine (faceMaskMat, rgbaMat, trans, rgbaMat.size (), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar (0));

}

Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture, colors);

}

}

///

/// Raises the disable event.

///

void OnDisable ()

{

WebGLFileUploadManager.FileUploadEventHandler -= fileUploadHandler;

WebGLFileUploadManager.Dispose ();

webCamTextureToMatHelper.Dispose ();

if (cascade != null)

cascade.Dispose ();

if (rectangleTracker != null)

rectangleTracker.Dispose ();

if (faceLandmarkDetector != null)

faceLandmarkDetector.Dispose ();

if (frontalFaceParam != null)

frontalFaceParam.Dispose ();

}

///

/// Raises the back button event.

///

public void OnBackButton ()

{

#if UNITY_5_3 || UNITY_5_3_OR_NEWER

SceneManager.LoadScene ("FaceMask");

#else

Application.LoadLevel ("FaceMask");

#endif

}

///

/// Raises the play button event.

///

public void OnPlayButton ()

{

webCamTextureToMatHelper.Play ();

}

///

/// Raises the pause button event.

///

public void OnPauseButton ()

{

webCamTextureToMatHelper.Pause ();

}

///

/// Raises the change camera button event.

///

public void OnChangeCameraButton ()

{

webCamTextureToMatHelper.Init (null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);

}

///

/// Raises the is showing face rects toggle event.

///

public void OnIsShowingFaceRectsToggle ()

{

if (isShowingFaceRectsToggle.isOn) {

isShowingFaceRects = true;

} else {

isShowingFaceRects = false;

}

}

///

/// Raises the use Dlib face detector toggle event.

///

public void OnUseDlibFaceDetecterToggle ()

{

if (useDlibFaceDetecterToggle.isOn) {

useDlibFaceDetecter = true;

} else {

useDlibFaceDetecter = false;

}

}

///

/// Raises the is filtering non frontal faces toggle event.

///

public void OnIsFilteringNonFrontalFacesToggle ()

{

if (isFilteringNonFrontalFacesToggle.isOn) {

isFilteringNonFrontalFaces = true;

} else {

isFilteringNonFrontalFaces = false;

}

}

///

/// Raises the is showing debug face points toggle event.

///

public void OnIsShowingDebugFacePointsToggle ()

{

if (isShowingDebugFacePointsToggle.isOn) {

isShowingDebugFacePoints = true;

} else {

isShowingDebugFacePoints = false;

}

}

///

/// Raises the set face mask button event.

///

public void OnChangeFaceMaskButton ()

{

removeFaceMask ();

ExampleMaskData maskData = ExampleDataSet.GetData();

faceMaskTexture = Resources.Load (maskData.FileName) as Texture2D;

faceMaskMat = new Mat (faceMaskTexture.height, faceMaskTexture.width, CvType.CV_8UC4);

OpenCVForUnity.Utils.texture2DToMat (faceMaskTexture, faceMaskMat);

Debug.Log ("faceMaskMat ToString " + faceMaskMat.ToString ());

if(maskData.LandmarkPoints != null){

faceRectInMask = maskData.FaceRect;

faceLandmarkPointsInMask = maskData.LandmarkPoints;

}else{

faceRectInMask = detectFace (faceMaskMat);

faceLandmarkPointsInMask = detectFaceLandmarkPoints (faceMaskMat, faceRectInMask);

}

ExampleDataSet.Next();

if (faceRectInMask.width == 0 && faceRectInMask.height == 0){

removeFaceMask ();

Debug.Log ("A face could not be detected from the input image.");

}

//dumpRect(faceRectInMask);

//dumpVector2(faceLandmarkPointsInMask);

//dumpVector3(faceLandmarkPointsInMask);

//MeshFilter mf = createFaceMesh(faceMaskTexture.width, faceMaskTexture.height);

//ObjExporter.MeshToFile(mf, "Assets/FaceMaskExample/Resources/FaceMesh.obj");

}

///

/// Raises the scan face mask button event.

///

public void OnScanFaceMaskButton ()

{

removeFaceMask ();

// Capture webcam frame.

if (webCamTextureToMatHelper.IsPlaying ()) {

Mat rgbaMat = webCamTextureToMatHelper.GetMat ();

faceRectInMask = detectFace (rgbaMat);

if (faceRectInMask.width == 0 && faceRectInMask.height == 0){

Debug.Log ("A face could not be detected from the input image.");

return;

}

OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect((int)faceRectInMask.x, (int)faceRectInMask.y, (int)faceRectInMask.width, (int)faceRectInMask.height);

rect.inflate(rect.x/5, rect.y/5);

rect = rect.intersect(new OpenCVForUnity.Rect(0,0,rgbaMat.width(),rgbaMat.height()));

faceMaskTexture = new Texture2D (rect.width, rect.height, TextureFormat.RGBA32, false);

faceMaskMat = new Mat(rgbaMat, rect).clone ();

OpenCVForUnity.Utils.matToTexture2D(faceMaskMat, faceMaskTexture);

Debug.Log ("faceMaskMat ToString " + faceMaskMat.ToString ());

faceRectInMask = detectFace (faceMaskMat);

faceLandmarkPointsInMask = detectFaceLandmarkPoints (faceMaskMat, faceRectInMask);

if (faceRectInMask.width == 0 && faceRectInMask.height == 0){

removeFaceMask ();

Debug.Log ("A face could not be detected from the input image.");

}

}

}

///

/// Raises the upload face mask button event.

///

public void OnUploadFaceMaskButton ()

{

WebGLFileUploadManager.PopupDialog (null, "Select frontal face image file (.png|.jpg|.gif)");

}

///

/// Raises the remove face mask button event.

///

public void OnRemoveFaceMaskButton ()

{

removeFaceMask ();

}

private void removeFaceMask ()

{

faceMaskTexture = null;

if (faceMaskMat != null) {

faceMaskMat.Dispose ();

faceMaskMat = null;

}

rectangleTracker.Reset ();

meshOverlay.Reset ();

}

///

/// Files the upload handler.

///

/// Result.

private void fileUploadHandler (UploadedFileInfo[] result)

{

if (result.Length == 0) {

Debug.Log ("File upload Error!");

return;

}

removeFaceMask ();

foreach (UploadedFileInfo file in result) {

if (file.isSuccess) {

Debug.Log ("file.filePath: " + file.filePath + " exists:" + File.Exists (file.filePath));

faceMaskTexture = new Texture2D (2, 2);

byte[] byteArray = File.ReadAllBytes (file.filePath);

faceMaskTexture.LoadImage (byteArray);

break;

}

}

if (faceMaskTexture != null) {

faceMaskMat = new Mat (faceMaskTexture.height, faceMaskTexture.width, CvType.CV_8UC4);

OpenCVForUnity.Utils.texture2DToMat (faceMaskTexture, faceMaskMat);

Debug.Log ("faceMaskMat ToString " + faceMaskMat.ToString ());

faceRectInMask = detectFace (faceMaskMat);

faceLandmarkPointsInMask = detectFaceLandmarkPoints (faceMaskMat, faceRectInMask);

if (faceRectInMask.width == 0 && faceRectInMask.height == 0){

removeFaceMask ();

Debug.Log ("A face could not be detected from the input image.");

}

}

}

private UnityEngine.Rect detectFace (Mat mat)

{

if (useDlibFaceDetecter) {

OpenCVForUnityUtils.SetImage (faceLandmarkDetector, mat);

List result = faceLandmarkDetector.Detect ();

if (result.Count >= 1)

return result [0];

} else {

using (Mat grayMat = new Mat ())

using (Mat equalizeHistMat = new Mat ())

using (MatOfRect faces = new MatOfRect ()) {

// convert image to greyscale.

Imgproc.cvtColor (mat, grayMat, Imgproc.COLOR_RGBA2GRAY);

Imgproc.equalizeHist (grayMat, equalizeHistMat);

cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());

List faceList = faces.toList ();

if (faceList.Count >= 1) {

UnityEngine.Rect r = new UnityEngine.Rect (faceList [0].x, faceList [0].y, faceList [0].width, faceList [0].height);

// Adjust to Dilb's result.

r.y += (int)(r.height * 0.1f);

return r;

}

}

}

return new UnityEngine.Rect ();

}

private List detectFaceLandmarkPoints (Mat mat, UnityEngine.Rect rect)

{

OpenCVForUnityUtils.SetImage (faceLandmarkDetector, mat);

List points = faceLandmarkDetector.DetectLandmark (rect);

return points;

}

/*

private void dumpRect(UnityEngine.Rect rect){

string r = "new Rect(" + rect.x + ", " + rect.y + ", " + rect.width + ", " + rect.height + ")";

Debug.Log ("dumpRect:" + "\n" + r);

}

private void dumpVector2(List points){

string p = "";

int i = 0;

foreach (var item in points) {

p += "new Vector2(" + "" + item.x + ", " + item.y + "),\n";

i++;

}

Debug.Log ("dumpMeshVector2:" + "\n" + p);

}

private void dumpVector3(List points){

string p = "";

int i = 0;

foreach (var item in points) {

//p += ", " + i + ":" + item;

p += "new Vector3(" + "" + item.x + ", " + item.y + "),\n";

i++;

}

Debug.Log ("dumpMeshVector3:" + "\n" + p);

}

private MeshFilter createFaceMesh (float textureWidth, float textureHeight)

{

GameObject newObj = new GameObject("FaceMesh");

MeshFilter meshFilter = newObj.AddComponent();

newObj.AddComponent();

MeshRenderer meshRenderer = newObj.AddComponent();

meshRenderer.material = new Material(Shader.Find("Hide/FadeShader"));

Vector3[] vertices2 = (Vector3[])vertices.Clone();

for (int j = 0; j < vertices2.Length; j++) {

vertices2 [j].x = vertices2 [j].x - textureWidth/2;

vertices2 [j].y = textureHeight/2 - vertices2 [j].y;

}

//Flip X axis

for (int j = 0; j < vertices2.Length; j++) {

vertices2 [j].x = -vertices2 [j].x;

}

meshFilter.mesh.vertices = vertices2;

};

//Flip X axis

for (int j = 0; j < triangles.Length; j=j+3) {

int a = triangles [j+1];

int b = triangles [j+2];

triangles [j+1] = b;

triangles [j+2] = a;

}

meshFilter.mesh.triangles = triangles;

//uv

Vector2[] uv = new Vector2[68];

for (int j = 0; j < uv.Length; j++) {

uv [j].x = vertices[j].x / textureWidth;

uv [j].y = (textureHeight - vertices[j].y) / textureHeight;

}

meshFilter.mesh.uv = uv;

meshFilter.mesh.RecalculateBounds ();

meshFilter.mesh.RecalculateNormals ();

return meshFilter;

}

*/

}

}

三、效果展示

四、Demo下载地址:

unityar自动识别人脸_AR开发实战项目之人脸识别(实现换脸、人脸涂妆、动作特效)...相关推荐

  1. 最新仿映客直播APP开发实战项目IOS开发实战8天(最全最新)

    最新仿映客直播APP开发实战项目IOS开发实战8天 第 1 章:直播准备 1: [录播] 课程大纲介绍 09:56 2: [录播] 了解直播技术和腾讯云直播 09:54 3: [录播] 基础封装 23 ...

  2. web 全栈开发-实战项目-一

    一.web 全栈开发-实战项目 项目描述: ou-article文章管理系统,包括pc页面展示(自适应移动端),后台文章管理系统(有完整的权限系统),server服务端,以及环境部署. 适合人群: 有 ...

  3. 测速源码_物联网之智能平衡车开发实战项目(附源码)

    自从上次分享了"适合练手的10个前端实战项目(附源码)"之后,很多小伙伴就私信问有没有物联网相关的实战项目教程,那么今天就给大家分享一个物联网工作初期经常接触的项目:智能平衡车开发 ...

  4. Java开发实战项目分享

    序言 很多学生问我如何学好Java,感觉Java的知识点太多,无从下手.Java发展几十年由于其历史悠久,从传统项目研发到移动端.互联网分布式系统的研发都有其深厚的技术底蕴,所以我们学习Java应该信 ...

  5. 《SpringBoot+vue全栈开发实战项目》笔记

    前言 Spring 作为一个轻量级的容器,在JavaEE开发中得到了广泛的应用,但是Spring 的配置繁琐臃肿,在和各种第三方框架进行整合时代码量都非常大,并且整合的代码大多是重复的,为了使开发者能 ...

  6. 转发:11个web前端开发实战项目案例+源码!拿走就是了

    版权声明:本文为博主原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明. 本文链接:https://blog.csdn.net/weixin_43793782/arti ...

  7. 新手上路 Vue腾讯云开发实战项目 问卷调查

    Vue 云开发问卷实战 Vue 云开发问卷实战 起步,技术选型和方案设计 前端页面构建 配置邮箱登录 利用云开发的CMS来配置问卷数据 云数据库的存储 使用云函数进行填写内容的校验 文件上传 用云开发 ...

  8. 【ADS层表-V1】前端页面所需的数据库设计——大数据开发实战项目(五)

    文章目录 前言 TiTan数据运营系统--数据库 表的具体设计 前言 如果你从本文中学习到丝毫知识,那么请您点点关注.点赞.评论和收藏 大家好,我是爱做梦的鱼,我是东北大学大数据实验班大三的小菜鸡,非 ...

  9. java开发项目实例_Alibaba内部出品Java突击手册,大量开发实战项目分享

    前言 Java作为目前最受欢迎的语言,每年都会有很多转行.跨行等等地人加入到开发大军中来,但是Java开发也是会遇到瓶颈的,当我们遇到瓶颈的时候就会去寻求发展突破,尤其是从Java开发高级工程师向架构 ...

  10. java安卓开发 项目实例_Java系列--第七篇 基于Maven的Android开发实战项目

    本篇是基于<Android应用案例开发大全,吴亚峰等著>的项目开发实例源码,其中有些图片,我做了一些修改,用于个人学习,请勿用于商业. 1, 日程管理专家 mvn archetype:ge ...

最新文章

  1. 15.基于Hadoop的数据仓库Hive第2部分
  2. Redis中的发布与订阅
  3. 51单片机基本刷屏测试实验_51单片机开发基础8——实时时钟实验
  4. (转)Bootstrap3 概述
  5. E20180404-ts
  6. 《淘宝网开店 SEO 推广 营销 爆款 实战200招》——1.3 网上开店的热门行业有哪些...
  7. (转)Managed DirectX +C# 开发(入门篇)(三)
  8. Android:沉浸式状态栏(二)集成
  9. 【渝粤题库】陕西师范大学200621 英语词汇学 作业
  10. ATP-EMTP中的LCC模块电缆线路阅读bergeron模型与JMarti模型数据卡
  11. iOS媒体视频播放器应用源码
  12. vb.net 模拟鼠标 失去焦点_DNF:都9102年了还在手动点竹子?鼠标连点器了解一下...
  13. Microhard P900 900MHz跳频电台核心模块
  14. 【Java】用类描述计算机中CPU的速度和硬盘的容量,要求Java应用程序由4个类,名字分别是PC、CPU、HardDisk、和Test,其中Test是主类。
  15. 三星s4 android 5.0 root权限,三星S4(i9500)一键ROOT权限获取+USB驱动
  16. GoLang之什么是CSP(1)
  17. Unity 图片字体制作
  18. 第12章实验1:学生成绩管理系统V5.0(c语言)
  19. 如何查看自己电脑使用第几代内存条?
  20. Matlab作图如何画多曲线子图,多子图绘图指令subplot

热门文章

  1. ubuntu 配置ip地址命令
  2. 14.高性能MySQL --- 应用层优化
  3. 60. 理解 Ajax 性能
  4. 22. PHP 日期和时间
  5. java 读取clob字段的几种方法
  6. 全国计算机应用考试试卷,全国信息化计算机应用技术水平教育考试试卷
  7. 20145325张梓靖 《信息安全系统设计基础》第10周学习总结
  8. hdu 1978 How many ways(dp)
  9. 不使用服务器控件的ASP.NET
  10. 批量创建用户(基于域)