|
|
@@ -1,6 +1,5 @@ |
|
|
|
import { Component, ElementRef, OnInit, ViewChild } from '@angular/core'; |
|
|
|
import { detectSingleFace, loadFaceExpressionModel, loadFaceLandmarkModel, loadFaceLandmarkTinyModel, loadFaceRecognitionModel, loadSsdMobilenetv1Model, loadTinyFaceDetectorModel, TinyFaceDetectorOptions } from 'face-api.js'; |
|
|
|
import domtoimage from 'dom-to-image'; |
|
|
|
import { Location } from '@angular/common'; |
|
|
|
|
|
|
|
@Component({ |
|
|
@@ -10,8 +9,11 @@ import { Location } from '@angular/common'; |
|
|
|
}) |
|
|
|
export class ArFanCamPage implements OnInit { |
|
|
|
@ViewChild('videoElement', null) videoElement: ElementRef<HTMLVideoElement>; |
|
|
|
@ViewChild('glassesElement', null) glassesElement: ElementRef<HTMLImageElement>; |
|
|
|
@ViewChild('canvasElement', null) canvasElement: ElementRef<HTMLCanvasElement>; |
|
|
|
|
|
|
|
mediaStream: MediaStream|null = null; |
|
|
|
capturedImageStrings: Array<string> = []; |
|
|
|
|
|
|
|
glassProperties: { |
|
|
|
[property: string]: any, |
|
|
@@ -25,9 +27,7 @@ export class ArFanCamPage implements OnInit { |
|
|
|
width = 360; |
|
|
|
height = 270; |
|
|
|
|
|
|
|
temp: any; |
|
|
|
|
|
|
|
|
|
|
|
temp: string; |
|
|
|
stream: any; |
|
|
|
|
|
|
|
constructor( |
|
|
@@ -77,15 +77,38 @@ export class ArFanCamPage implements OnInit { |
|
|
|
} |
|
|
|
|
|
|
|
async capture() { |
|
|
|
let element: HTMLElement = document.querySelector('#container'); |
|
|
|
|
|
|
|
await domtoimage.toPng(element).then((dataUrl) => { |
|
|
|
this.temp = dataUrl; |
|
|
|
console.log(dataUrl); |
|
|
|
}) |
|
|
|
.catch((error) => { |
|
|
|
console.error('oops, something went wrong!', error); |
|
|
|
}); |
|
|
|
const canvas = this.canvasElement.nativeElement; |
|
|
|
const context = canvas.getContext('2d'); |
|
|
|
context.drawImage(this.videoElement.nativeElement, 0, 0, canvas.width, canvas.height); |
|
|
|
|
|
|
|
const getGlassesStyles = window.getComputedStyle(this.glassesElement.nativeElement); |
|
|
|
|
|
|
|
const glassesTransformValue = getGlassesStyles.getPropertyValue("-webkit-transform") || |
|
|
|
getGlassesStyles.getPropertyValue("-moz-transform") || |
|
|
|
getGlassesStyles.getPropertyValue("-ms-transform") || |
|
|
|
getGlassesStyles.getPropertyValue("-o-transform") || |
|
|
|
getGlassesStyles.getPropertyValue("transform"); |
|
|
|
|
|
|
|
const glassesTransformMatrixValues = glassesTransformValue.split('(')[1].split(')')[0].split(','); |
|
|
|
|
|
|
|
{ |
|
|
|
const a = Number(glassesTransformMatrixValues[0]); |
|
|
|
const b = Number(glassesTransformMatrixValues[1]); |
|
|
|
const c = Number(glassesTransformMatrixValues[2]); |
|
|
|
const d = Number(glassesTransformMatrixValues[3]); |
|
|
|
const e = Number(glassesTransformMatrixValues[4]); |
|
|
|
const f = Number(glassesTransformMatrixValues[5]); |
|
|
|
|
|
|
|
context.transform(a, b, c, d, e, f); |
|
|
|
} |
|
|
|
|
|
|
|
context.drawImage(this.glassesElement.nativeElement, 0, 0); |
|
|
|
|
|
|
|
const dataURI = canvas.toDataURL('image/png'); |
|
|
|
this.capturedImageStrings.push(dataURI); |
|
|
|
|
|
|
|
context.resetTransform(); |
|
|
|
context.clearRect(0, 0, window.innerWidth, window.innerHeight); |
|
|
|
} |
|
|
|
|
|
|
|
detectAndDrawFace = async () => { |
|
|
|