Use OpenSeeFace to control Live2D model
[facial-landmarks-for-cubism.git] / src / facial_landmark_detector.cpp
CommitLineData
830d0ba4 1/****
cb483d3b 2Copyright (c) 2020-2021 Adrian I. Lam
830d0ba4
AIL
3
4Permission is hereby granted, free of charge, to any person obtaining a copy
5of this software and associated documentation files (the "Software"), to deal
6in the Software without restriction, including without limitation the rights
7to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8copies of the Software, and to permit persons to whom the Software is
9furnished to do so, subject to the following conditions:
10
11The above copyright notice and this permission notice shall be included in all
12copies or substantial portions of the Software.
13
14THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20SOFTWARE.
21****/
22
23#include <stdexcept>
24#include <fstream>
25#include <string>
26#include <sstream>
27#include <cmath>
28
cb483d3b
AIL
29#include <cinttypes>
30#include <sys/types.h>
31#include <sys/socket.h>
32#include <arpa/inet.h>
33#include <unistd.h>
34
830d0ba4
AIL
35#include "facial_landmark_detector.h"
36#include "math_utils.h"
37
38
39static void filterPush(std::deque<double>& buf, double newval,
40 std::size_t numTaps)
41{
42 buf.push_back(newval);
43 while (buf.size() > numTaps)
44 {
45 buf.pop_front();
46 }
47}
48
49FacialLandmarkDetector::FacialLandmarkDetector(std::string cfgPath)
50 : m_stop(false)
51{
52 parseConfig(cfgPath);
53
cb483d3b
AIL
54 struct sockaddr_in addr;
55 addr.sin_family = AF_INET;
56 addr.sin_port = htons(m_cfg.osfPort);
57 addr.sin_addr.s_addr = inet_addr(m_cfg.osfIpAddress.c_str());
58
59 m_sock = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP);
60 if (m_sock < 0)
61 {
62 throw std::runtime_error("Cannot create UDP socket");
63 }
64
65 int ret = bind(m_sock, (struct sockaddr *)&addr, sizeof addr);
66 if (ret != 0)
67 {
68 throw std::runtime_error("Cannot bind socket");
69 }
70}
71
72FacialLandmarkDetector::~FacialLandmarkDetector()
73{
74 close(m_sock);
830d0ba4
AIL
75}
76
77FacialLandmarkDetector::Params FacialLandmarkDetector::getParams(void) const
78{
79 Params params;
80
81 params.faceXAngle = avg(m_faceXAngle);
82 params.faceYAngle = avg(m_faceYAngle) + m_cfg.faceYAngleCorrection;
83 // + 10 correct for angle between computer monitor and webcam
84 params.faceZAngle = avg(m_faceZAngle);
85 params.mouthOpenness = avg(m_mouthOpenness);
86 params.mouthForm = avg(m_mouthForm);
87
88 double leftEye = avg(m_leftEyeOpenness, 1);
89 double rightEye = avg(m_rightEyeOpenness, 1);
cb483d3b
AIL
90 bool sync = !m_cfg.winkEnable;
91
92 if (m_cfg.winkEnable)
830d0ba4 93 {
cb483d3b
AIL
94 if (rightEye < 0.1 && leftEye > 0.2)
95 {
96 leftEye = 1;
97 rightEye = 0;
98 }
99 else if (leftEye < 0.1 && rightEye > 0.2)
100 {
101 leftEye = 0;
102 rightEye = 1;
103 }
104 else
105 {
106 sync = true;
107 }
830d0ba4 108 }
cb483d3b
AIL
109
110 if (sync)
830d0ba4 111 {
cb483d3b
AIL
112 // Combine the two to get better synchronized blinks
113 double bothEyes = (leftEye + rightEye) / 2;
114 leftEye = bothEyes;
115 rightEye = bothEyes;
830d0ba4 116 }
cb483d3b 117
830d0ba4
AIL
118 params.leftEyeOpenness = leftEye;
119 params.rightEyeOpenness = rightEye;
120
121 if (leftEye <= m_cfg.eyeSmileEyeOpenThreshold &&
122 rightEye <= m_cfg.eyeSmileEyeOpenThreshold &&
123 params.mouthForm > m_cfg.eyeSmileMouthFormThreshold &&
124 params.mouthOpenness > m_cfg.eyeSmileMouthOpenThreshold)
125 {
126 params.leftEyeSmile = 1;
127 params.rightEyeSmile = 1;
128 }
129 else
130 {
131 params.leftEyeSmile = 0;
132 params.rightEyeSmile = 0;
133 }
134
2b1f0c7c
AIL
135 params.autoBlink = m_cfg.autoBlink;
136 params.autoBreath = m_cfg.autoBreath;
137 params.randomMotion = m_cfg.randomMotion;
138
830d0ba4
AIL
139 return params;
140}
141
142void FacialLandmarkDetector::stop(void)
143{
144 m_stop = true;
145}
146
147void FacialLandmarkDetector::mainLoop(void)
148{
149 while (!m_stop)
150 {
cb483d3b
AIL
151 // Read UDP packet from OSF
152 static const int nPoints = 68;
153 static const int packetFrameSize = 8 + 4 + 2 * 4 + 2 * 4 + 1 + 4 + 3 * 4 + 3 * 4
154 + 4 * 4 + 4 * 68 + 4 * 2 * 68 + 4 * 3 * 70 + 4 * 14;
155
156 static const int landmarksOffset = 8 + 4 + 2 * 4 + 2 * 4 + 1 + 4 + 3 * 4 + 3 * 4
157 + 4 * 4 + 4 * 68;
830d0ba4 158
cb483d3b
AIL
159 uint8_t buf[packetFrameSize];
160 ssize_t recvSize = recv(m_sock, buf, sizeof buf, 0);
830d0ba4 161
cb483d3b
AIL
162 if (recvSize != packetFrameSize) continue;
163 // Note: This is dependent on endianness, and we would assume that
164 // the OSF instance is run on a machine with the same endianness
165 // as our current machine.
166 int recvFaceId = *(int *)(buf + 8);
167 if (recvFaceId != m_faceId) continue; // We only support one face
168
169 Point landmarks[nPoints];
170
171 for (int i = 0; i < nPoints; i++)
172 {
173 float x = *(float *)(buf + landmarksOffset + i * 2 * sizeof(float));
174 float y = *(float *)(buf + landmarksOffset + (i * 2 + 1) * sizeof(float));
175
176 landmarks[i].x = x;
177 landmarks[i].y = y;
178 }
830d0ba4 179
af96b559
AIL
180 /* The coordinates seem to be rather noisy in general.
181 * We will push everything through some moving average filters
182 * to reduce noise. The number of taps is determined empirically
183 * until we get something good.
184 * An alternative method would be to get some better dataset -
185 * perhaps even to train on a custom data set just for the user.
186 */
187
188 // Face rotation: X direction (left-right)
189 double faceXRot = calcFaceXAngle(landmarks);
190 filterPush(m_faceXAngle, faceXRot, m_cfg.faceXAngleNumTaps);
830d0ba4 191
af96b559
AIL
192 // Mouth form (smile / laugh) detection
193 double mouthForm = calcMouthForm(landmarks);
194 filterPush(m_mouthForm, mouthForm, m_cfg.mouthFormNumTaps);
195
196 // Face rotation: Y direction (up-down)
197 double faceYRot = calcFaceYAngle(landmarks, faceXRot, mouthForm);
198 filterPush(m_faceYAngle, faceYRot, m_cfg.faceYAngleNumTaps);
199
200 // Face rotation: Z direction (head tilt)
201 double faceZRot = calcFaceZAngle(landmarks);
202 filterPush(m_faceZAngle, faceZRot, m_cfg.faceZAngleNumTaps);
203
204 // Mouth openness
205 double mouthOpen = calcMouthOpenness(landmarks, mouthForm);
206 filterPush(m_mouthOpenness, mouthOpen, m_cfg.mouthOpenNumTaps);
207
208 // Eye openness
209 double eyeLeftOpen = calcEyeOpenness(LEFT, landmarks, faceYRot);
210 filterPush(m_leftEyeOpenness, eyeLeftOpen, m_cfg.leftEyeOpenNumTaps);
211 double eyeRightOpen = calcEyeOpenness(RIGHT, landmarks, faceYRot);
212 filterPush(m_rightEyeOpenness, eyeRightOpen, m_cfg.rightEyeOpenNumTaps);
213
cb483d3b
AIL
214 // Eyebrows: the landmark detection doesn't work very well for my face,
215 // so I've not implemented them.
830d0ba4
AIL
216 }
217}
218
219double FacialLandmarkDetector::calcEyeAspectRatio(
af96b559
AIL
220 Point& p1, Point& p2,
221 Point& p3, Point& p4,
222 Point& p5, Point& p6) const
830d0ba4
AIL
223{
224 double eyeWidth = dist(p1, p4);
225 double eyeHeight1 = dist(p2, p6);
226 double eyeHeight2 = dist(p3, p5);
227
228 return (eyeHeight1 + eyeHeight2) / (2 * eyeWidth);
229}
230
231double FacialLandmarkDetector::calcEyeOpenness(
232 LeftRight eye,
af96b559 233 Point landmarks[],
830d0ba4
AIL
234 double faceYAngle) const
235{
236 double eyeAspectRatio;
237 if (eye == LEFT)
238 {
af96b559
AIL
239 eyeAspectRatio = calcEyeAspectRatio(landmarks[42], landmarks[43], landmarks[44],
240 landmarks[45], landmarks[46], landmarks[47]);
830d0ba4
AIL
241 }
242 else
243 {
af96b559
AIL
244 eyeAspectRatio = calcEyeAspectRatio(landmarks[36], landmarks[37], landmarks[38],
245 landmarks[39], landmarks[40], landmarks[41]);
830d0ba4
AIL
246 }
247
248 // Apply correction due to faceYAngle
249 double corrEyeAspRat = eyeAspectRatio / std::cos(degToRad(faceYAngle));
250
251 return linearScale01(corrEyeAspRat, m_cfg.eyeClosedThreshold, m_cfg.eyeOpenThreshold);
252}
253
254
255
af96b559 256double FacialLandmarkDetector::calcMouthForm(Point landmarks[]) const
830d0ba4
AIL
257{
258 /* Mouth form parameter: 0 for normal mouth, 1 for fully smiling / laughing.
259 * Compare distance between the two corners of the mouth
260 * to the distance between the two eyes.
261 */
262
263 /* An alternative (my initial attempt) was to compare the corners of
264 * the mouth to the top of the upper lip - they almost lie on a
265 * straight line when smiling / laughing. But that is only true
266 * when facing straight at the camera. When looking up / down,
267 * the angle changes. So here we'll use the distance approach instead.
268 */
269
af96b559
AIL
270 auto eye1 = centroid(landmarks[36], landmarks[37], landmarks[38],
271 landmarks[39], landmarks[40], landmarks[41]);
272 auto eye2 = centroid(landmarks[42], landmarks[43], landmarks[44],
273 landmarks[45], landmarks[46], landmarks[47]);
830d0ba4 274 double distEyes = dist(eye1, eye2);
af96b559 275 double distMouth = dist(landmarks[58], landmarks[62]);
830d0ba4
AIL
276
277 double form = linearScale01(distMouth / distEyes,
278 m_cfg.mouthNormalThreshold,
279 m_cfg.mouthSmileThreshold);
280
281 return form;
282}
283
284double FacialLandmarkDetector::calcMouthOpenness(
af96b559 285 Point landmarks[],
830d0ba4
AIL
286 double mouthForm) const
287{
288 // Use points for the bottom of the upper lip, and top of the lower lip
289 // We have 3 pairs of points available, which give the mouth height
290 // on the left, in the middle, and on the right, resp.
291 // First let's try to use an average of all three.
af96b559
AIL
292 double heightLeft = dist(landmarks[61], landmarks[63]);
293 double heightMiddle = dist(landmarks[60], landmarks[64]);
294 double heightRight = dist(landmarks[59], landmarks[65]);
830d0ba4
AIL
295
296 double avgHeight = (heightLeft + heightMiddle + heightRight) / 3;
297
298 // Now, normalize it with the width of the mouth.
af96b559 299 double width = dist(landmarks[58], landmarks[62]);
830d0ba4
AIL
300
301 double normalized = avgHeight / width;
302
303 double scaled = linearScale01(normalized,
304 m_cfg.mouthClosedThreshold,
305 m_cfg.mouthOpenThreshold,
306 true, false);
307
308 // Apply correction according to mouthForm
309 // Notice that when you smile / laugh, width is increased
310 scaled *= (1 + m_cfg.mouthOpenLaughCorrection * mouthForm);
311
312 return scaled;
313}
314
af96b559 315double FacialLandmarkDetector::calcFaceXAngle(Point landmarks[]) const
830d0ba4
AIL
316{
317 // This function will be easier to understand if you refer to the
318 // diagram in faceXAngle.png
319
320 // Construct the y-axis using (1) average of four points on the nose and
af96b559 321 // (2) average of five points on the upper lip.
830d0ba4 322
af96b559
AIL
323 auto y0 = centroid(landmarks[27], landmarks[28], landmarks[29],
324 landmarks[30]);
325 auto y1 = centroid(landmarks[48], landmarks[49], landmarks[50],
326 landmarks[51], landmarks[52]);
830d0ba4
AIL
327
328 // Now drop a perpedicular from the left and right edges of the face,
329 // and calculate the ratio between the lengths of these perpendiculars
330
af96b559
AIL
331 auto left = centroid(landmarks[14], landmarks[15], landmarks[16]);
332 auto right = centroid(landmarks[0], landmarks[1], landmarks[2]);
830d0ba4
AIL
333
334 // Constructing a perpendicular:
335 // Join the left/right point and the upper lip. The included angle
336 // can now be determined using cosine rule.
337 // Then sine of this angle is the perpendicular divided by the newly
338 // created line.
339 double opp = dist(right, y0);
340 double adj1 = dist(y0, y1);
341 double adj2 = dist(y1, right);
342 double angle = solveCosineRuleAngle(opp, adj1, adj2);
343 double perpRight = adj2 * std::sin(angle);
344
345 opp = dist(left, y0);
346 adj2 = dist(y1, left);
347 angle = solveCosineRuleAngle(opp, adj1, adj2);
348 double perpLeft = adj2 * std::sin(angle);
349
350 // Model the head as a sphere and look from above.
351 double theta = std::asin((perpRight - perpLeft) / (perpRight + perpLeft));
352
353 theta = radToDeg(theta);
354 if (theta < -30) theta = -30;
355 if (theta > 30) theta = 30;
356 return theta;
357}
358
af96b559 359double FacialLandmarkDetector::calcFaceYAngle(Point landmarks[], double faceXAngle, double mouthForm) const
830d0ba4
AIL
360{
361 // Use the nose
362 // angle between the two left/right points and the tip
af96b559
AIL
363 double c = dist(landmarks[31], landmarks[35]);
364 double a = dist(landmarks[30], landmarks[31]);
365 double b = dist(landmarks[30], landmarks[35]);
830d0ba4
AIL
366
367 double angle = solveCosineRuleAngle(c, a, b);
368
369 // This probably varies a lot from person to person...
370
371 // Best is probably to work out some trigonometry again,
372 // but just linear interpolation seems to work ok...
373
374 // Correct for X rotation
375 double corrAngle = angle * (1 + (std::abs(faceXAngle) / 30
376 * m_cfg.faceYAngleXRotCorrection));
377
378 // Correct for smiles / laughs - this increases the angle
379 corrAngle *= (1 - mouthForm * m_cfg.faceYAngleSmileCorrection);
380
381 if (corrAngle >= m_cfg.faceYAngleZeroValue)
382 {
383 return -30 * linearScale01(corrAngle,
384 m_cfg.faceYAngleZeroValue,
385 m_cfg.faceYAngleDownThreshold,
386 false, false);
387 }
388 else
389 {
390 return 30 * (1 - linearScale01(corrAngle,
391 m_cfg.faceYAngleUpThreshold,
392 m_cfg.faceYAngleZeroValue,
393 false, false));
394 }
395}
396
af96b559 397double FacialLandmarkDetector::calcFaceZAngle(Point landmarks[]) const
830d0ba4
AIL
398{
399 // Use average of eyes and nose
400
af96b559
AIL
401 auto eyeRight = centroid(landmarks[36], landmarks[37], landmarks[38],
402 landmarks[39], landmarks[40], landmarks[41]);
403 auto eyeLeft = centroid(landmarks[42], landmarks[43], landmarks[44],
404 landmarks[45], landmarks[46], landmarks[47]);
830d0ba4 405
af96b559
AIL
406 auto noseLeft = landmarks[35];
407 auto noseRight = landmarks[31];
830d0ba4 408
af96b559
AIL
409 double eyeYDiff = eyeRight.y - eyeLeft.y;
410 double eyeXDiff = eyeRight.x - eyeLeft.x;
830d0ba4
AIL
411
412 double angle1 = std::atan(eyeYDiff / eyeXDiff);
413
af96b559
AIL
414 double noseYDiff = noseRight.y - noseLeft.y;
415 double noseXDiff = noseRight.x - noseLeft.x;
830d0ba4
AIL
416
417 double angle2 = std::atan(noseYDiff / noseXDiff);
418
419 return radToDeg((angle1 + angle2) / 2);
420}
421
422void FacialLandmarkDetector::parseConfig(std::string cfgPath)
423{
424 populateDefaultConfig();
425 if (cfgPath != "")
426 {
427 std::ifstream file(cfgPath);
428
429 if (!file)
430 {
431 throw std::runtime_error("Failed to open config file");
432 }
433
434 std::string line;
435 unsigned int lineNum = 0;
436
437 while (std::getline(file, line))
438 {
439 lineNum++;
440
441 if (line[0] == '#')
442 {
443 continue;
444 }
445
446 std::istringstream ss(line);
447 std::string paramName;
448 if (ss >> paramName)
449 {
cb483d3b
AIL
450 if (paramName == "osfIpAddress")
451 {
452 if (!(ss >> m_cfg.osfIpAddress))
453 {
454 throwConfigError(paramName, "std::string",
455 line, lineNum);
456 }
457 }
458 else if (paramName == "osfPort")
459 {
460 if (!(ss >> m_cfg.osfPort))
461 {
462 throwConfigError(paramName, "int",
463 line, lineNum);
464 }
465 }
466 else if (paramName == "faceYAngleCorrection")
830d0ba4
AIL
467 {
468 if (!(ss >> m_cfg.faceYAngleCorrection))
469 {
470 throwConfigError(paramName, "double",
471 line, lineNum);
472 }
473 }
474 else if (paramName == "eyeSmileEyeOpenThreshold")
475 {
476 if (!(ss >> m_cfg.eyeSmileEyeOpenThreshold))
477 {
478 throwConfigError(paramName, "double",
479 line, lineNum);
480 }
481 }
482 else if (paramName == "eyeSmileMouthFormThreshold")
483 {
484 if (!(ss >> m_cfg.eyeSmileMouthFormThreshold))
485 {
486 throwConfigError(paramName, "double",
487 line, lineNum);
488 }
489 }
490 else if (paramName == "eyeSmileMouthOpenThreshold")
491 {
492 if (!(ss >> m_cfg.eyeSmileMouthOpenThreshold))
493 {
494 throwConfigError(paramName, "double",
495 line, lineNum);
496 }
497 }
830d0ba4
AIL
498 else if (paramName == "faceXAngleNumTaps")
499 {
500 if (!(ss >> m_cfg.faceXAngleNumTaps))
501 {
502 throwConfigError(paramName, "std::size_t",
503 line, lineNum);
504 }
505 }
506 else if (paramName == "faceYAngleNumTaps")
507 {
508 if (!(ss >> m_cfg.faceYAngleNumTaps))
509 {
510 throwConfigError(paramName, "std::size_t",
511 line, lineNum);
512 }
513 }
514 else if (paramName == "faceZAngleNumTaps")
515 {
516 if (!(ss >> m_cfg.faceZAngleNumTaps))
517 {
518 throwConfigError(paramName, "std::size_t",
519 line, lineNum);
520 }
521 }
522 else if (paramName == "mouthFormNumTaps")
523 {
524 if (!(ss >> m_cfg.mouthFormNumTaps))
525 {
526 throwConfigError(paramName, "std::size_t",
527 line, lineNum);
528 }
529 }
530 else if (paramName == "mouthOpenNumTaps")
531 {
532 if (!(ss >> m_cfg.mouthOpenNumTaps))
533 {
534 throwConfigError(paramName, "std::size_t",
535 line, lineNum);
536 }
537 }
538 else if (paramName == "leftEyeOpenNumTaps")
539 {
540 if (!(ss >> m_cfg.leftEyeOpenNumTaps))
541 {
542 throwConfigError(paramName, "std::size_t",
543 line, lineNum);
544 }
545 }
546 else if (paramName == "rightEyeOpenNumTaps")
547 {
548 if (!(ss >> m_cfg.rightEyeOpenNumTaps))
549 {
550 throwConfigError(paramName, "std::size_t",
551 line, lineNum);
552 }
553 }
830d0ba4
AIL
554 else if (paramName == "eyeClosedThreshold")
555 {
556 if (!(ss >> m_cfg.eyeClosedThreshold))
557 {
558 throwConfigError(paramName, "double",
559 line, lineNum);
560 }
561 }
562 else if (paramName == "eyeOpenThreshold")
563 {
564 if (!(ss >> m_cfg.eyeOpenThreshold))
565 {
566 throwConfigError(paramName, "double",
567 line, lineNum);
568 }
569 }
cb483d3b
AIL
570 else if (paramName == "winkEnable")
571 {
572 if (!(ss >> m_cfg.winkEnable))
573 {
574 throwConfigError(paramName, "bool",
575 line, lineNum);
576 }
577 }
830d0ba4
AIL
578 else if (paramName == "mouthNormalThreshold")
579 {
580 if (!(ss >> m_cfg.mouthNormalThreshold))
581 {
582 throwConfigError(paramName, "double",
583 line, lineNum);
584 }
585 }
586 else if (paramName == "mouthSmileThreshold")
587 {
588 if (!(ss >> m_cfg.mouthSmileThreshold))
589 {
590 throwConfigError(paramName, "double",
591 line, lineNum);
592 }
593 }
594 else if (paramName == "mouthClosedThreshold")
595 {
596 if (!(ss >> m_cfg.mouthClosedThreshold))
597 {
598 throwConfigError(paramName, "double",
599 line, lineNum);
600 }
601 }
602 else if (paramName == "mouthOpenThreshold")
603 {
604 if (!(ss >> m_cfg.mouthOpenThreshold))
605 {
606 throwConfigError(paramName, "double",
607 line, lineNum);
608 }
609 }
610 else if (paramName == "mouthOpenLaughCorrection")
611 {
612 if (!(ss >> m_cfg.mouthOpenLaughCorrection))
613 {
614 throwConfigError(paramName, "double",
615 line, lineNum);
616 }
617 }
618 else if (paramName == "faceYAngleXRotCorrection")
619 {
620 if (!(ss >> m_cfg.faceYAngleXRotCorrection))
621 {
622 throwConfigError(paramName, "double",
623 line, lineNum);
624 }
625 }
626 else if (paramName == "faceYAngleSmileCorrection")
627 {
628 if (!(ss >> m_cfg.faceYAngleSmileCorrection))
629 {
630 throwConfigError(paramName, "double",
631 line, lineNum);
632 }
633 }
634 else if (paramName == "faceYAngleZeroValue")
635 {
636 if (!(ss >> m_cfg.faceYAngleZeroValue))
637 {
638 throwConfigError(paramName, "double",
639 line, lineNum);
640 }
641 }
642 else if (paramName == "faceYAngleUpThreshold")
643 {
644 if (!(ss >> m_cfg.faceYAngleUpThreshold))
645 {
646 throwConfigError(paramName, "double",
647 line, lineNum);
648 }
649 }
650 else if (paramName == "faceYAngleDownThreshold")
651 {
652 if (!(ss >> m_cfg.faceYAngleDownThreshold))
653 {
654 throwConfigError(paramName, "double",
655 line, lineNum);
656 }
657 }
2b1f0c7c
AIL
658 else if (paramName == "autoBlink")
659 {
660 if (!(ss >> m_cfg.autoBlink))
661 {
662 throwConfigError(paramName, "bool",
663 line, lineNum);
664 }
665 }
666 else if (paramName == "autoBreath")
667 {
668 if (!(ss >> m_cfg.autoBreath))
669 {
670 throwConfigError(paramName, "bool",
671 line, lineNum);
672 }
673 }
674 else if (paramName == "randomMotion")
675 {
676 if (!(ss >> m_cfg.randomMotion))
677 {
678 throwConfigError(paramName, "bool",
679 line, lineNum);
680 }
681 }
830d0ba4
AIL
682 else
683 {
684 std::ostringstream oss;
685 oss << "Unrecognized parameter name at line " << lineNum
686 << ": " << paramName;
687 throw std::runtime_error(oss.str());
688 }
689 }
690 }
691 }
692}
693
694void FacialLandmarkDetector::populateDefaultConfig(void)
695{
696 // These are values that I've personally tested to work OK for my face.
697 // Your milage may vary - hence the config file.
698
830d0ba4
AIL
699 m_cfg.faceYAngleCorrection = 10;
700 m_cfg.eyeSmileEyeOpenThreshold = 0.6;
701 m_cfg.eyeSmileMouthFormThreshold = 0.75;
702 m_cfg.eyeSmileMouthOpenThreshold = 0.5;
cb483d3b
AIL
703 m_cfg.faceXAngleNumTaps = 7;
704 m_cfg.faceYAngleNumTaps = 7;
705 m_cfg.faceZAngleNumTaps = 7;
830d0ba4
AIL
706 m_cfg.mouthFormNumTaps = 3;
707 m_cfg.mouthOpenNumTaps = 3;
708 m_cfg.leftEyeOpenNumTaps = 3;
709 m_cfg.rightEyeOpenNumTaps = 3;
cb483d3b
AIL
710 m_cfg.eyeClosedThreshold = 0.18;
711 m_cfg.eyeOpenThreshold = 0.21;
712 m_cfg.winkEnable = true;
830d0ba4
AIL
713 m_cfg.mouthNormalThreshold = 0.75;
714 m_cfg.mouthSmileThreshold = 1.0;
715 m_cfg.mouthClosedThreshold = 0.1;
716 m_cfg.mouthOpenThreshold = 0.4;
717 m_cfg.mouthOpenLaughCorrection = 0.2;
718 m_cfg.faceYAngleXRotCorrection = 0.15;
719 m_cfg.faceYAngleSmileCorrection = 0.075;
720 m_cfg.faceYAngleZeroValue = 1.8;
721 m_cfg.faceYAngleDownThreshold = 2.3;
722 m_cfg.faceYAngleUpThreshold = 1.3;
2b1f0c7c
AIL
723 m_cfg.autoBlink = false;
724 m_cfg.autoBreath = false;
725 m_cfg.randomMotion = false;
830d0ba4
AIL
726}
727
728void FacialLandmarkDetector::throwConfigError(std::string paramName,
729 std::string expectedType,
730 std::string line,
731 unsigned int lineNum)
732{
733 std::ostringstream ss;
734 ss << "Error parsing config file for parameter " << paramName
735 << "\nAt line " << lineNum << ": " << line
736 << "\nExpecting value of type " << expectedType;
737
738 throw std::runtime_error(ss.str());
739}
740