forked from justadudewhohacks/opencv4nodejs
-
Notifications
You must be signed in to change notification settings - Fork 0
/
faceRecognition1.js
69 lines (57 loc) · 1.89 KB
/
faceRecognition1.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
const fs = require('fs');
const path = require('path');
const cv = require('../');
if (!cv.xmodules.face) {
throw new Error('exiting: opencv4nodejs compiled without face module');
}
const basePath = '../data/face-recognition';
const imgsPath = path.resolve(basePath, 'imgs');
const nameMappings = ['daryl', 'rick', 'negan'];
const imgFiles = fs.readdirSync(imgsPath);
const classifier = new cv.CascadeClassifier(cv.HAAR_FRONTALFACE_ALT2);
const getFaceImage = (grayImg) => {
const faceRects = classifier.detectMultiScale(grayImg).objects;
if (!faceRects.length) {
throw new Error('failed to detect faces');
}
return grayImg.getRegion(faceRects[0]);
};
const trainImgs = imgFiles
// get absolute file path
.map(file => path.resolve(imgsPath, file))
// read image
.map(filePath => cv.imread(filePath))
// face recognizer works with gray scale images
.map(img => img.bgrToGray())
// detect and extract face
.map(getFaceImage)
// face images must be equally sized
.map(faceImg => faceImg.resize(80, 80));
// make labels
const labels = imgFiles
.map(file => nameMappings.findIndex(name => file.includes(name)));
const lbph = new cv.LBPHFaceRecognizer();
lbph.train(trainImgs, labels);
const twoFacesImg = cv.imread(path.resolve(basePath, 'daryl-rick.jpg'));
const result = classifier.detectMultiScale(twoFacesImg.bgrToGray());
const minDetections = 10;
result.objects.forEach((faceRect, i) => {
if (result.numDetections[i] < minDetections) {
return;
}
const faceImg = twoFacesImg.getRegion(faceRect).bgrToGray();
const who = nameMappings[lbph.predict(faceImg).label];
const rect = cv.drawDetection(
twoFacesImg,
faceRect,
{ color: new cv.Vec(255, 0, 0), segmentFraction: 4 }
);
const alpha = 0.4;
cv.drawTextBox(
twoFacesImg,
new cv.Point(rect.x, rect.y + rect.height + 10),
[{ text: who }],
alpha
);
});
cv.imshowWait('result', twoFacesImg);