2
2
3
3
using DlibFaceLandmarkDetector ;
4
4
using NrealLightWithOpenCVForUnity . UnityUtils . Helper ;
5
+ using NrealLightWithDlibFaceLandmarkDetectorExample . RectangleTrack ;
5
6
using NRKernal ;
6
7
using OpenCVForUnity . Calib3dModule ;
7
8
using OpenCVForUnity . CoreModule ;
8
9
using OpenCVForUnity . ImgprocModule ;
9
10
using OpenCVForUnity . ObjdetectModule ;
10
- using OpenCVForUnity . RectangleTrack ;
11
11
using OpenCVForUnity . UnityUtils ;
12
12
using OpenCVForUnity . UnityUtils . Helper ;
13
13
using System ;
16
16
using UnityEngine . SceneManagement ;
17
17
using UnityEngine . UI ;
18
18
using Rect = OpenCVForUnity . CoreModule . Rect ;
19
+ using System . Threading ;
19
20
20
21
namespace NrealLightWithDlibFaceLandmarkDetectorExample
21
22
{
22
23
/// <summary>
23
24
/// Nreal AR Head Example
24
25
/// An example of AR head projection using OpenCVForUnity and DlibLandmarkDetector on Hololens.
25
26
/// </summary>
26
- [ RequireComponent ( typeof ( NRCamTextureToMatHelper ) , typeof ( ImageOptimizationHelper ) ) ]
27
+ [ RequireComponent ( typeof ( NRCamTexture2MatHelper ) , typeof ( ImageOptimizationHelper ) ) ]
27
28
public class NrealARHeadExample : MonoBehaviour
28
29
{
29
30
[ SerializeField , HeaderAttribute ( "Preview" ) ]
@@ -262,7 +263,7 @@ public class NrealARHeadExample : MonoBehaviour
262
263
/// <summary>
263
264
/// The webcam texture to mat helper.
264
265
/// </summary>
265
- NRCamTextureToMatHelper webCamTextureToMatHelper ;
266
+ NRCamTexture2MatHelper webCamTextureToMatHelper ;
266
267
267
268
/// <summary>
268
269
/// The image optimization helper.
@@ -364,7 +365,7 @@ bool hasUpdatedDetectionResult
364
365
/// </summary>
365
366
Camera mainCamera ;
366
367
367
-
368
+ /*
368
369
// Use this for initialization
369
370
protected void Start()
370
371
{
@@ -379,8 +380,8 @@ protected void Start()
379
380
enableLerpFilterToggle.isOn = enableLerpFilter;
380
381
381
382
imageOptimizationHelper = gameObject.GetComponent<ImageOptimizationHelper>();
382
- webCamTextureToMatHelper = gameObject . GetComponent < NRCamTextureToMatHelper > ( ) ;
383
- webCamTextureToMatHelper . outputColorFormat = WebCamTextureToMatHelper . ColorFormat . GRAY ;
383
+ webCamTextureToMatHelper = gameObject.GetComponent<NRCamTexture2MatHelper >();
384
+ webCamTextureToMatHelper.outputColorFormat = Source2MatHelperColorFormat .GRAY;
384
385
webCamTextureToMatHelper.Initialize();
385
386
386
387
rectangleTracker = new RectangleTracker();
@@ -446,6 +447,132 @@ protected void Start()
446
447
opticalFlowFilter = new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts());
447
448
opticalFlowFilter.diffCheckSensitivity /= imageOptimizationHelper.downscaleRatio;
448
449
}
450
+ */
451
+
452
+
453
+ //////
454
+ string cascade_filepath ;
455
+ string cascade4Thread_filepath ;
456
+ string dlibShapePredictor_filepath ;
457
+ string dlibShapePredictor4Thread_filepath ;
458
+
459
+ /// <summary>
460
+ /// The CancellationTokenSource.
461
+ /// </summary>
462
+ CancellationTokenSource cts = new CancellationTokenSource ( ) ;
463
+
464
+ // Use this for initialization
465
+ async void Start ( )
466
+ {
467
+ displayCameraPreviewToggle . isOn = displayCameraPreview ;
468
+ enableDownScaleToggle . isOn = enableDownScale ;
469
+ useSeparateDetectionToggle . isOn = useSeparateDetection ;
470
+ useOpenCVDetectorToggle . isOn = useOpenCVDetector ;
471
+ displayAxesToggle . isOn = displayAxes ;
472
+ displayHeadToggle . isOn = displayHead ;
473
+ displayEffectsToggle . isOn = displayEffects ;
474
+ enableOpticalFlowFilterToggle . isOn = enableOpticalFlowFilter ;
475
+ enableLerpFilterToggle . isOn = enableLerpFilter ;
476
+
477
+ imageOptimizationHelper = gameObject . GetComponent < ImageOptimizationHelper > ( ) ;
478
+ webCamTextureToMatHelper = gameObject . GetComponent < NRCamTexture2MatHelper > ( ) ;
479
+
480
+ rectangleTracker = new RectangleTracker ( ) ;
481
+
482
+
483
+ // Asynchronously retrieves the readable file path from the StreamingAssets directory.
484
+ Debug . Log ( "Preparing file access..." ) ;
485
+
486
+ cascade_filepath = await DlibFaceLandmarkDetector . UnityUtils . Utils . getFilePathAsyncTask ( "OpenCVForUnity/objdetect/lbpcascade_frontalface.xml" , cancellationToken : cts . Token ) ;
487
+ cascade4Thread_filepath = await DlibFaceLandmarkDetector . UnityUtils . Utils . getFilePathAsyncTask ( "OpenCVForUnity/objdetect/haarcascade_frontalface_alt.xml" , cancellationToken : cts . Token ) ;
488
+ dlibShapePredictorFileName = NrealLightWithDlibFaceLandmarkDetectorExample . dlibShapePredictorFileName ;
489
+ dlibShapePredictor_filepath = await DlibFaceLandmarkDetector . UnityUtils . Utils . getFilePathAsyncTask ( dlibShapePredictorFileName , cancellationToken : cts . Token ) ;
490
+ dlibShapePredictor4Thread_filepath = await DlibFaceLandmarkDetector . UnityUtils . Utils . getFilePathAsyncTask ( "DlibFaceLandmarkDetector/sp_human_face_6.dat" , cancellationToken : cts . Token ) ;
491
+
492
+ Debug . Log ( "Preparing file access complete!" ) ;
493
+
494
+ Run ( ) ;
495
+ }
496
+
497
+ // Use this for initialization
498
+ void Run ( )
499
+ {
500
+ cascade = new CascadeClassifier ( ) ;
501
+ cascade . load ( cascade_filepath ) ;
502
+ if ( cascade . empty ( ) )
503
+ {
504
+ Debug . LogError ( "cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. " ) ;
505
+ }
506
+
507
+ cascade4Thread = new CascadeClassifier ( ) ;
508
+ cascade4Thread . load ( cascade4Thread_filepath ) ;
509
+ if ( cascade4Thread . empty ( ) )
510
+ {
511
+ Debug . LogError ( "cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. " ) ;
512
+ }
513
+
514
+ if ( string . IsNullOrEmpty ( dlibShapePredictor_filepath ) )
515
+ {
516
+ Debug . LogError ( "shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. " ) ;
517
+ }
518
+ faceLandmarkDetector = new FaceLandmarkDetector ( dlibShapePredictor_filepath ) ;
519
+
520
+ if ( string . IsNullOrEmpty ( dlibShapePredictor4Thread_filepath ) )
521
+ {
522
+ Debug . LogError ( "shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/DlibFaceLandmarkDetector/” to “Assets/StreamingAssets/DlibFaceLandmarkDetector/” folder. " ) ;
523
+ }
524
+ faceLandmarkDetector4Thread = new FaceLandmarkDetector ( dlibShapePredictor4Thread_filepath ) ;
525
+
526
+
527
+ // set 3d face object points. (right-handed coordinates system)
528
+ objectPoints68 = new MatOfPoint3f (
529
+ new Point3 ( - 34 , 90 , 83 ) , //l eye (Interpupillary breadth)
530
+ new Point3 ( 34 , 90 , 83 ) , //r eye (Interpupillary breadth)
531
+ new Point3 ( 0.0 , 50 , 117 ) , //nose (Tip)
532
+ new Point3 ( 0.0 , 32 , 97 ) , //nose (Subnasale)
533
+ new Point3 ( - 79 , 90 , 10 ) , //l ear (Bitragion breadth)
534
+ new Point3 ( 79 , 90 , 10 ) //r ear (Bitragion breadth)
535
+ ) ;
536
+
537
+ objectPoints17 = new MatOfPoint3f (
538
+ new Point3 ( - 34 , 90 , 83 ) , //l eye (Interpupillary breadth)
539
+ new Point3 ( 34 , 90 , 83 ) , //r eye (Interpupillary breadth)
540
+ new Point3 ( 0.0 , 50 , 117 ) , //nose (Tip)
541
+ new Point3 ( 0.0 , 32 , 97 ) , //nose (Subnasale)
542
+ new Point3 ( - 79 , 90 , 10 ) , //l ear (Bitragion breadth)
543
+ new Point3 ( 79 , 90 , 10 ) //r ear (Bitragion breadth)
544
+ ) ;
545
+
546
+ objectPoints6 = new MatOfPoint3f (
547
+ new Point3 ( - 34 , 90 , 83 ) , //l eye (Interpupillary breadth)
548
+ new Point3 ( 34 , 90 , 83 ) , //r eye (Interpupillary breadth)
549
+ new Point3 ( 0.0 , 50 , 117 ) , //nose (Tip)
550
+ new Point3 ( 0.0 , 32 , 97 ) //nose (Subnasale)
551
+ ) ;
552
+
553
+ objectPoints5 = new MatOfPoint3f (
554
+ new Point3 ( - 23 , 90 , 83 ) , //l eye (Inner corner of the eye)
555
+ new Point3 ( 23 , 90 , 83 ) , //r eye (Inner corner of the eye)
556
+ new Point3 ( - 50 , 90 , 80 ) , //l eye (Tail of the eye)
557
+ new Point3 ( 50 , 90 , 80 ) , //r eye (Tail of the eye)
558
+ new Point3 ( 0.0 , 32 , 97 ) //nose (Subnasale)
559
+ ) ;
560
+
561
+ // adjust object points to the scale of real world space.
562
+ AjustPointScale ( objectPoints68 , 0.001 ) ;
563
+ AjustPointScale ( objectPoints17 , 0.001 ) ;
564
+ AjustPointScale ( objectPoints6 , 0.001 ) ;
565
+ AjustPointScale ( objectPoints5 , 0.001 ) ;
566
+
567
+ imagePoints = new MatOfPoint2f ( ) ;
568
+
569
+ opticalFlowFilter = new OFPointsFilter ( ( int ) faceLandmarkDetector . GetShapePredictorNumParts ( ) ) ;
570
+ opticalFlowFilter . diffCheckSensitivity /= imageOptimizationHelper . downscaleRatio ;
571
+
572
+ webCamTextureToMatHelper . outputColorFormat = Source2MatHelperColorFormat . GRAY ;
573
+ webCamTextureToMatHelper . Initialize ( ) ;
574
+ }
575
+ //////
449
576
450
577
/// <summary>
451
578
/// Raises the web cam texture to mat helper initialized event.
@@ -559,7 +686,7 @@ public void OnWebCamTextureToMatHelperInitialized()
559
686
560
687
mouthParticleSystem = mouth . GetComponentsInChildren < ParticleSystem > ( true ) ;
561
688
562
-
689
+ /*
563
690
//grayMat = new Mat();
564
691
cascade = new CascadeClassifier();
565
692
cascade.load(Utils.getFilePath("OpenCVForUnity/objdetect/lbpcascade_frontalface.xml"));
@@ -581,6 +708,11 @@ public void OnWebCamTextureToMatHelperInitialized()
581
708
Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/OpenCVForUnity/objdetect/” to “Assets/StreamingAssets/OpenCVForUnity/objdetect/” folder. ");
582
709
}
583
710
#endif
711
+ */
712
+
713
+ /////
714
+ grayMat4Thread = new Mat ( ) ;
715
+ /////
584
716
}
585
717
586
718
/// <summary>
@@ -604,14 +736,14 @@ public void OnWebCamTextureToMatHelperDisposed()
604
736
texture = null ;
605
737
}
606
738
607
- if ( cascade != null )
608
- cascade . Dispose ( ) ;
739
+ // if (cascade != null)
740
+ // cascade.Dispose();
609
741
610
742
if ( grayMat4Thread != null )
611
743
grayMat4Thread . Dispose ( ) ;
612
744
613
- if ( cascade4Thread != null )
614
- cascade4Thread . Dispose ( ) ;
745
+ // if (cascade4Thread != null)
746
+ // cascade4Thread.Dispose();
615
747
616
748
rectangleTracker . Reset ( ) ;
617
749
@@ -635,12 +767,13 @@ public void OnWebCamTextureToMatHelperDisposed()
635
767
}
636
768
637
769
/// <summary>
638
- /// Raises the web cam texture to mat helper error occurred event.
770
+ /// Raises the webcam texture to mat helper error occurred event.
639
771
/// </summary>
640
772
/// <param name="errorCode">Error code.</param>
641
- public void OnWebCamTextureToMatHelperErrorOccurred ( WebCamTextureToMatHelper . ErrorCode errorCode )
773
+ /// <param name="message">Message.</param>
774
+ public void OnWebCamTextureToMatHelperErrorOccurred ( Source2MatHelperErrorCode errorCode , string message )
642
775
{
643
- Debug . Log ( "OnWebCamTextureToMatHelperErrorOccurred " + errorCode ) ;
776
+ Debug . Log ( "OnWebCamTextureToMatHelperErrorOccurred " + errorCode + ":" + message ) ;
644
777
}
645
778
646
779
// Update is called once per frame
@@ -816,7 +949,7 @@ void Update()
816
949
817
950
if ( displayCameraPreview )
818
951
{
819
- Utils . fastMatToTexture2D ( grayMat , texture ) ;
952
+ Utils . matToTexture2D ( grayMat , texture ) ;
820
953
}
821
954
}
822
955
}
@@ -1243,6 +1376,15 @@ void OnDestroy()
1243
1376
1244
1377
if ( opticalFlowFilter != null )
1245
1378
opticalFlowFilter . Dispose ( ) ;
1379
+
1380
+ if ( cascade != null )
1381
+ cascade . Dispose ( ) ;
1382
+
1383
+ if ( cascade4Thread != null )
1384
+ cascade4Thread . Dispose ( ) ;
1385
+
1386
+ if ( cts != null )
1387
+ cts . Dispose ( ) ;
1246
1388
}
1247
1389
1248
1390
/// <summary>
@@ -1282,7 +1424,7 @@ public void OnStopButtonClick()
1282
1424
/// </summary>
1283
1425
public void OnChangeCameraButtonClick ( )
1284
1426
{
1285
- webCamTextureToMatHelper . requestedIsFrontFacing = ! webCamTextureToMatHelper . IsFrontFacing ( ) ;
1427
+ webCamTextureToMatHelper . requestedIsFrontFacing = ! webCamTextureToMatHelper . requestedIsFrontFacing ;
1286
1428
}
1287
1429
1288
1430
/// <summary>
0 commit comments