Explorar el Código

Add AR web cam functionality

master
Adwaith Rao hace 4 años
padre
commit
c3aaf71dbe
Se han modificado 27 ficheros con 859 adiciones y 402 borrados
  1. +557
    -398
      package-lock.json
  2. +3
    -1
      package.json
  3. +7
    -1
      src/app/ar-fan-cam/ar-fan-cam.page.html
  4. +42
    -1
      src/app/ar-fan-cam/ar-fan-cam.page.scss
  5. +98
    -1
      src/app/ar-fan-cam/ar-fan-cam.page.ts
  6. BIN
      src/assets/ar-accessories/glass.png
  7. BIN
      src/assets/ar-accessories/hat.png
  8. BIN
      src/assets/ar-accessories/turban.jpg
  9. BIN
      src/assets/weights/age_gender_model-shard1
  10. +1
    -0
      src/assets/weights/age_gender_model-weights_manifest.json
  11. BIN
      src/assets/weights/face_expression_model-shard1
  12. +1
    -0
      src/assets/weights/face_expression_model-weights_manifest.json
  13. BIN
      src/assets/weights/face_landmark_68_model-shard1
  14. +1
    -0
      src/assets/weights/face_landmark_68_model-weights_manifest.json
  15. BIN
      src/assets/weights/face_landmark_68_tiny_model-shard1
  16. +1
    -0
      src/assets/weights/face_landmark_68_tiny_model-weights_manifest.json
  17. BIN
      src/assets/weights/face_recognition_model-shard1
  18. +6
    -0
      src/assets/weights/face_recognition_model-shard2
  19. +1
    -0
      src/assets/weights/face_recognition_model-weights_manifest.json
  20. BIN
      src/assets/weights/mtcnn_model-shard1
  21. +1
    -0
      src/assets/weights/mtcnn_model-weights_manifest.json
  22. BIN
      src/assets/weights/ssd_mobilenetv1_model-shard1
  23. +137
    -0
      src/assets/weights/ssd_mobilenetv1_model-shard2
  24. +1
    -0
      src/assets/weights/ssd_mobilenetv1_model-weights_manifest.json
  25. BIN
      src/assets/weights/tiny_face_detector_model-shard1
  26. +1
    -0
      src/assets/weights/tiny_face_detector_model-weights_manifest.json
  27. +1
    -0
      tsconfig.json

+ 557
- 398
package-lock.json
La diferencia del archivo ha sido suprimido porque es demasiado grande
Ver fichero


+ 3
- 1
package.json Ver fichero

@@ -28,12 +28,14 @@
"@ionic-native/status-bar": "^5.0.0",
"@ionic/angular": "^5.0.0",
"cordova-res": "^0.15.2",
"face-api.js": "^0.22.2",
"faker": "^5.1.0",
"firebase": "^8.2.6",
"hammerjs": "^2.0.8",
"moment": "^2.29.1",
"rxjs": "~6.5.5",
"sharp": "^0.27.0",
"three": "^0.125.2",
"tslib": "^2.0.0",
"uninstall": "0.0.0",
"zone.js": "~0.10.3"
@@ -98,4 +100,4 @@
"android"
]
}
}
}

+ 7
- 1
src/app/ar-fan-cam/ar-fan-cam.page.html Ver fichero

@@ -1,3 +1,9 @@
<ion-content>
<div class="container">
<img class="glass-image" [ngStyle]="glassProperties" src="/assets/ar-accessories/glass.png" alt=""/>
<video id="playback-video" #videoElement autoPlay></video>
<button *ngIf="areNeuralNetsLoaded" (click)="getCameraStream()">Get access</button>
<button *ngIf="areNeuralNetsLoaded" (click)="stopCameraStream()">Stop stream</button>
<button *ngIf="areNeuralNetsLoaded" (click)="toggleDetection()">{{ isDetecting ? 'Stop detection' : 'Detect and draw' }}</button>}
</div>
</ion-content>

+ 42
- 1
src/app/ar-fan-cam/ar-fan-cam.page.scss Ver fichero

@@ -1 +1,42 @@
@import '../colors';
@import '../colors';

.container {
position: relative;
}

.glass-image {
position: absolute;
top: 0px;
left: 0px;
transform-origin: 15% 50%;
}

#playback-video, #result-canvas, #three-container {
display: block;
width: 100vw;
height: 75vw;

max-width: 600px;
max-height: 450px;
}

#result-canvas, #three-container {
position: absolute;
top: 0;
left: 0;
}

.left-eye, .right-eye {
position: absolute;
width: 4px;
height: 4px;
border-radius: 50%;
}

.left-eye {
background-color: blue;
}

.right-eye {
background-color: red;
}

+ 98
- 1
src/app/ar-fan-cam/ar-fan-cam.page.ts Ver fichero

@@ -1,4 +1,5 @@
import { Component, OnInit } from '@angular/core';
import { Component, ElementRef, OnInit, ViewChild } from '@angular/core';
import { detectSingleFace, loadFaceExpressionModel, loadFaceLandmarkModel, loadFaceLandmarkTinyModel, loadFaceRecognitionModel, loadSsdMobilenetv1Model, loadTinyFaceDetectorModel, TinyFaceDetectorOptions } from 'face-api.js';

@Component({
selector: 'app-ar-fan-cam',
@@ -6,10 +7,106 @@ import { Component, OnInit } from '@angular/core';
styleUrls: ['./ar-fan-cam.page.scss'],
})
export class ArFanCamPage implements OnInit {
@ViewChild('videoElement', null) videoElement: ElementRef<HTMLVideoElement>;
mediaStream: MediaStream|null = null;

glassProperties: {
[property: string]: any,
} = {
transform: 'scale(0)',
};

areNeuralNetsLoaded = false;
isDetecting = false;

leftEyeX = 0;
leftEyeY = 0;

rightEyeX = 0;
rightEyeY = 0;

width = 360;
height = 270;

constructor() { }

loadNeuralNetModels = async () => {
await loadTinyFaceDetectorModel('/assets/weights');
await loadSsdMobilenetv1Model('/assets/weights');

await loadFaceLandmarkTinyModel('/assets/weights');
await loadFaceLandmarkModel('/assets/weights');

await loadFaceRecognitionModel('/assets/weights');
await loadFaceExpressionModel('/assets/weights');

this.areNeuralNetsLoaded = true;
}

getCameraStream = async () => {
const stream = await window.navigator.mediaDevices.getUserMedia({
video: {
facingMode: 'user',
width: this.width,
},
});

this.mediaStream = stream;
this.videoElement.nativeElement.srcObject = stream;
};

stopCameraStream = async () => {
if (this.mediaStream) {
this.mediaStream.getVideoTracks().forEach(track => track.stop());
this.mediaStream = null;
}
}

detectAndDrawFace = async () => {
const tinyFaceDetectorOptions = new TinyFaceDetectorOptions();

let detectionWithLandmarks = await detectSingleFace(this.videoElement.nativeElement, tinyFaceDetectorOptions).withFaceLandmarks(true);
if (detectionWithLandmarks) {
const leftEye = detectionWithLandmarks.landmarks.getLeftEye();
const rightEye = detectionWithLandmarks.landmarks.getRightEye();

this.leftEyeX = leftEye[0].x;
this.leftEyeY = leftEye[0].y;

this.rightEyeX = rightEye[0].x;
this.rightEyeY = rightEye[0].y;

const distanceBetweenEyes = Math.sqrt(Math.pow(this.rightEyeX - this.leftEyeX, 2) + Math.pow(this.rightEyeY - this.leftEyeY, 2));
const angleOfRotation = (Math.atan2(this.rightEyeY - this.leftEyeY, this.rightEyeX - this.leftEyeX) * 180) / Math.PI;

const scaleMultiplier = distanceBetweenEyes / 130;

this.glassProperties = {
transform: `translate(calc(-14.5% + ${this.leftEyeX}px), calc(-45% + ${this.leftEyeY}px)) scale(${scaleMultiplier}) rotate(${angleOfRotation}deg)`,
}
}

requestAnimationFrame(this.detectAndDrawFace);
}

toggleDetection = () => {
if (!this.isDetecting) {
this.isDetecting = true;
this.detectAndDrawFace();
} else {
this.isDetecting = false;
}
}

ngOnInit() {
this.loadNeuralNetModels();

const feedWidth = window.innerWidth > 600 ? 600 : window.innerWidth;
const feedHeight = feedWidth * 0.75;

this.width = feedWidth;
this.height = feedHeight;
}

}

BIN
src/assets/ar-accessories/glass.png Ver fichero

Antes Después
Anchura: 260  |  Altura: 106  |  Tamaño: 5.0 KiB

BIN
src/assets/ar-accessories/hat.png Ver fichero

Antes Después
Anchura: 4000  |  Altura: 2347  |  Tamaño: 825 KiB

BIN
src/assets/ar-accessories/turban.jpg Ver fichero

Antes Después
Anchura: 500  |  Altura: 500  |  Tamaño: 21 KiB

BIN
src/assets/weights/age_gender_model-shard1 Ver fichero


+ 1
- 0
src/assets/weights/age_gender_model-weights_manifest.json
La diferencia del archivo ha sido suprimido porque es demasiado grande
Ver fichero


BIN
src/assets/weights/face_expression_model-shard1 Ver fichero


+ 1
- 0
src/assets/weights/face_expression_model-weights_manifest.json
La diferencia del archivo ha sido suprimido porque es demasiado grande
Ver fichero


BIN
src/assets/weights/face_landmark_68_model-shard1 Ver fichero


+ 1
- 0
src/assets/weights/face_landmark_68_model-weights_manifest.json
La diferencia del archivo ha sido suprimido porque es demasiado grande
Ver fichero


BIN
src/assets/weights/face_landmark_68_tiny_model-shard1 Ver fichero


+ 1
- 0
src/assets/weights/face_landmark_68_tiny_model-weights_manifest.json Ver fichero

@@ -0,0 +1 @@
[{"weights":[{"name":"dense0/conv0/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008194216092427571,"min":-0.9423348506291708}},{"name":"dense0/conv0/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006839508168837603,"min":-0.8412595047670252}},{"name":"dense0/conv1/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009194007106855804,"min":-1.2779669878529567}},{"name":"dense0/conv1/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0036026100317637128,"min":-0.3170296827952067}},{"name":"dense0/conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.000740380117706224,"min":-0.06367269012273527}},{"name":"dense0/conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},{"name":"dense0/conv2/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},{"name":"dense0/conv2/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0037702228508743585,"min":-0.6220867703942692}},{"name":"dense1/conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0033707996209462483,"min":-0.421349952618281}},{"name":"dense1/conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014611541991140328,"min":-1.8556658328748217}},{"name":"dense1/conv0/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002832523046755323,"min":-0.30307996600281956}},{"name":"dense1/conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006593170586754294,"min":-0.6329443763284123}},{"name":"dense1/conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.012215249211180444,"min":-1.6001976466646382}},{"name":"dense1/conv1/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002384825547536214,"min":-0.3028728445370992}},{"name":"dense1/conv2/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005859645441466687,"min":-0.7617539073906693}},{"name":"dense1/conv2/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013121426806730382,"min":-1.7845140457153321}},{"name":"dense1/conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032247188044529336,"min":-0.46435950784122243}},{"name":"dense2/conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002659512618008782,"min":-0.32977956463308894}},{"name":"dense2/conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015499923743453681,"min":-1.9839902391620712}},{"name":"dense2/conv0/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032450980999890497,"min":-0.522460794098237}},{"name":"dense2/conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005911862382701799,"min":-0.792189559282041}},{"name":"dense2/conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021025861478319356,"min":-2.2077154552235325}},{"name":"dense2/conv1/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00349616945958605,"min":-0.46149436866535865}},{"name":"dense2/conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008104994250278847,"min":-1.013124281284856}},{"name":"dense2/conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029337059282789044,"min":-3.5791212325002633}},{"name":"dense2/conv2/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0038808938334969913,"min":-0.4230174278511721}},{"name":"fc/weights","shape":[128,136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014016061670639936,"min":-1.8921683255363912}},{"name":"fc/bias","shape":[136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0029505149698724935,"min":0.088760145008564}}],"paths":["face_landmark_68_tiny_model-shard1"]}]

BIN
src/assets/weights/face_recognition_model-shard1 Ver fichero


+ 6
- 0
src/assets/weights/face_recognition_model-shard2
La diferencia del archivo ha sido suprimido porque es demasiado grande
Ver fichero


+ 1
- 0
src/assets/weights/face_recognition_model-weights_manifest.json
La diferencia del archivo ha sido suprimido porque es demasiado grande
Ver fichero


BIN
src/assets/weights/mtcnn_model-shard1 Ver fichero


+ 1
- 0
src/assets/weights/mtcnn_model-weights_manifest.json Ver fichero

@@ -0,0 +1 @@
[{"paths":["mtcnn_model-shard1"],"weights":[{"dtype":"float32","name":"pnet/conv1/weights","shape":[3,3,3,10]},{"dtype":"float32","name":"pnet/conv1/bias","shape":[10]},{"dtype":"float32","name":"pnet/prelu1_alpha","shape":[10]},{"dtype":"float32","name":"pnet/conv2/weights","shape":[3,3,10,16]},{"dtype":"float32","name":"pnet/conv2/bias","shape":[16]},{"dtype":"float32","name":"pnet/prelu2_alpha","shape":[16]},{"dtype":"float32","name":"pnet/conv3/weights","shape":[3,3,16,32]},{"dtype":"float32","name":"pnet/conv3/bias","shape":[32]},{"dtype":"float32","name":"pnet/prelu3_alpha","shape":[32]},{"dtype":"float32","name":"pnet/conv4_1/weights","shape":[1,1,32,2]},{"dtype":"float32","name":"pnet/conv4_1/bias","shape":[2]},{"dtype":"float32","name":"pnet/conv4_2/weights","shape":[1,1,32,4]},{"dtype":"float32","name":"pnet/conv4_2/bias","shape":[4]},{"dtype":"float32","name":"rnet/conv1/weights","shape":[3,3,3,28]},{"dtype":"float32","name":"rnet/conv1/bias","shape":[28]},{"dtype":"float32","name":"rnet/prelu1_alpha","shape":[28]},{"dtype":"float32","name":"rnet/conv2/weights","shape":[3,3,28,48]},{"dtype":"float32","name":"rnet/conv2/bias","shape":[48]},{"dtype":"float32","name":"rnet/prelu2_alpha","shape":[48]},{"dtype":"float32","name":"rnet/conv3/weights","shape":[2,2,48,64]},{"dtype":"float32","name":"rnet/conv3/bias","shape":[64]},{"dtype":"float32","name":"rnet/prelu3_alpha","shape":[64]},{"dtype":"float32","name":"rnet/fc1/weights","shape":[576,128]},{"dtype":"float32","name":"rnet/fc1/bias","shape":[128]},{"dtype":"float32","name":"rnet/prelu4_alpha","shape":[128]},{"dtype":"float32","name":"rnet/fc2_1/weights","shape":[128,2]},{"dtype":"float32","name":"rnet/fc2_1/bias","shape":[2]},{"dtype":"float32","name":"rnet/fc2_2/weights","shape":[128,4]},{"dtype":"float32","name":"rnet/fc2_2/bias","shape":[4]},{"dtype":"float32","name":"onet/conv1/weights","shape":[3,3,3,32]},{"dtype":"float32","name":"onet/conv1/bias","shape":[32]},{"dtype":"float32","name":"onet/prelu1_alpha","shape":[32]},{"dtype":"float32","name":"onet/conv2/weights","shape":[3,3,32,64]},{"dtype":"float32","name":"onet/conv2/bias","shape":[64]},{"dtype":"float32","name":"onet/prelu2_alpha","shape":[64]},{"dtype":"float32","name":"onet/conv3/weights","shape":[3,3,64,64]},{"dtype":"float32","name":"onet/conv3/bias","shape":[64]},{"dtype":"float32","name":"onet/prelu3_alpha","shape":[64]},{"dtype":"float32","name":"onet/conv4/weights","shape":[2,2,64,128]},{"dtype":"float32","name":"onet/conv4/bias","shape":[128]},{"dtype":"float32","name":"onet/prelu4_alpha","shape":[128]},{"dtype":"float32","name":"onet/fc1/weights","shape":[1152,256]},{"dtype":"float32","name":"onet/fc1/bias","shape":[256]},{"dtype":"float32","name":"onet/prelu5_alpha","shape":[256]},{"dtype":"float32","name":"onet/fc2_1/weights","shape":[256,2]},{"dtype":"float32","name":"onet/fc2_1/bias","shape":[2]},{"dtype":"float32","name":"onet/fc2_2/weights","shape":[256,4]},{"dtype":"float32","name":"onet/fc2_2/bias","shape":[4]},{"dtype":"float32","name":"onet/fc2_3/weights","shape":[256,10]},{"dtype":"float32","name":"onet/fc2_3/bias","shape":[10]}]}]

BIN
src/assets/weights/ssd_mobilenetv1_model-shard1 Ver fichero


+ 137
- 0
src/assets/weights/ssd_mobilenetv1_model-shard2
La diferencia del archivo ha sido suprimido porque es demasiado grande
Ver fichero


+ 1
- 0
src/assets/weights/ssd_mobilenetv1_model-weights_manifest.json
La diferencia del archivo ha sido suprimido porque es demasiado grande
Ver fichero


BIN
src/assets/weights/tiny_face_detector_model-shard1 Ver fichero


+ 1
- 0
src/assets/weights/tiny_face_detector_model-weights_manifest.json Ver fichero

@@ -0,0 +1 @@
[{"weights":[{"name":"conv0/filters","shape":[3,3,3,16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009007044399485869,"min":-1.2069439495311063}},{"name":"conv0/bias","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005263455241334205,"min":-0.9211046672334858}},{"name":"conv1/depthwise_filter","shape":[3,3,16,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004001977630690033,"min":-0.5042491814669441}},{"name":"conv1/pointwise_filter","shape":[1,1,16,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013836609615999109,"min":-1.411334180831909}},{"name":"conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0015159862590771096,"min":-0.30926119685173037}},{"name":"conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002666276225856706,"min":-0.317286870876948}},{"name":"conv2/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015265831292844286,"min":-1.6792414422128714}},{"name":"conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0020280554598453,"min":-0.37113414915168985}},{"name":"conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006100742489683862,"min":-0.8907084034938438}},{"name":"conv3/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016276211832083907,"min":-2.0508026908425725}},{"name":"conv3/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003394414279975143,"min":-0.7637432129944072}},{"name":"conv4/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006716050119961009,"min":-0.8059260143953211}},{"name":"conv4/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021875603993733724,"min":-2.8875797271728514}},{"name":"conv4/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0041141652009066415,"min":-0.8187188749804216}},{"name":"conv5/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008423839597141042,"min":-0.9013508368940915}},{"name":"conv5/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.030007277283014035,"min":-3.8709387695088107}},{"name":"conv5/bias","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008402082966823203,"min":-1.4871686851277068}},{"name":"conv8/filters","shape":[1,1,512,25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.028336129469030042,"min":-4.675461362389957}},{"name":"conv8/bias","shape":[25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002268134028303857,"min":-0.41053225912299807}}],"paths":["tiny_face_detector_model-shard1"]}]

+ 1
- 0
tsconfig.json Ver fichero

@@ -11,6 +11,7 @@
"moduleResolution": "node",
"importHelpers": true,
"target": "es2015",
"skipLibCheck": true,
"lib": [
"es2018",
"dom"


Cargando…
Cancelar
Guardar