Add Win32 support.
[facial-landmarks-for-cubism.git] / src / facial_landmark_detector.cpp
CommitLineData
830d0ba4 1/****
cb483d3b 2Copyright (c) 2020-2021 Adrian I. Lam
830d0ba4
AIL
3
4Permission is hereby granted, free of charge, to any person obtaining a copy
5of this software and associated documentation files (the "Software"), to deal
6in the Software without restriction, including without limitation the rights
7to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8copies of the Software, and to permit persons to whom the Software is
9furnished to do so, subject to the following conditions:
10
11The above copyright notice and this permission notice shall be included in all
12copies or substantial portions of the Software.
13
14THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20SOFTWARE.
21****/
22
23#include <stdexcept>
24#include <fstream>
25#include <string>
26#include <sstream>
27#include <cmath>
28
8053891b 29#include <cstdint>
cb483d3b 30#include <cinttypes>
8053891b
AIL
31#ifdef _WIN32
32# include <WinSock2.h>
33# include <ws2tcpip.h>
34# include <basetsd.h>
35#else
36# include <sys/types.h>
37# include <sys/socket.h>
38# include <arpa/inet.h>
39# include <unistd.h>
40#endif
cb483d3b 41
830d0ba4
AIL
42#include "facial_landmark_detector.h"
43#include "math_utils.h"
44
45
46static void filterPush(std::deque<double>& buf, double newval,
47 std::size_t numTaps)
48{
49 buf.push_back(newval);
50 while (buf.size() > numTaps)
51 {
52 buf.pop_front();
53 }
54}
55
56FacialLandmarkDetector::FacialLandmarkDetector(std::string cfgPath)
57 : m_stop(false)
58{
59 parseConfig(cfgPath);
60
8053891b
AIL
61#ifdef _WIN32 // WinSock2 should be initialized before using
62 WSADATA wsaData;
63 if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0)
64 {
65 return;
66 }
67#endif
68
cb483d3b
AIL
69 struct sockaddr_in addr;
70 addr.sin_family = AF_INET;
71 addr.sin_port = htons(m_cfg.osfPort);
72 addr.sin_addr.s_addr = inet_addr(m_cfg.osfIpAddress.c_str());
73
74 m_sock = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP);
75 if (m_sock < 0)
76 {
77 throw std::runtime_error("Cannot create UDP socket");
78 }
79
80 int ret = bind(m_sock, (struct sockaddr *)&addr, sizeof addr);
81 if (ret != 0)
82 {
83 throw std::runtime_error("Cannot bind socket");
84 }
85}
86
87FacialLandmarkDetector::~FacialLandmarkDetector()
88{
8053891b
AIL
89#ifdef _WIN32
90 closesocket(m_sock);
91#else
cb483d3b 92 close(m_sock);
8053891b 93#endif
830d0ba4
AIL
94}
95
96FacialLandmarkDetector::Params FacialLandmarkDetector::getParams(void) const
97{
98 Params params;
99
100 params.faceXAngle = avg(m_faceXAngle);
101 params.faceYAngle = avg(m_faceYAngle) + m_cfg.faceYAngleCorrection;
102 // + 10 correct for angle between computer monitor and webcam
103 params.faceZAngle = avg(m_faceZAngle);
104 params.mouthOpenness = avg(m_mouthOpenness);
105 params.mouthForm = avg(m_mouthForm);
106
107 double leftEye = avg(m_leftEyeOpenness, 1);
108 double rightEye = avg(m_rightEyeOpenness, 1);
cb483d3b
AIL
109 bool sync = !m_cfg.winkEnable;
110
111 if (m_cfg.winkEnable)
830d0ba4 112 {
cb483d3b
AIL
113 if (rightEye < 0.1 && leftEye > 0.2)
114 {
115 leftEye = 1;
116 rightEye = 0;
117 }
118 else if (leftEye < 0.1 && rightEye > 0.2)
119 {
120 leftEye = 0;
121 rightEye = 1;
122 }
123 else
124 {
125 sync = true;
126 }
830d0ba4 127 }
cb483d3b
AIL
128
129 if (sync)
830d0ba4 130 {
cb483d3b
AIL
131 // Combine the two to get better synchronized blinks
132 double bothEyes = (leftEye + rightEye) / 2;
133 leftEye = bothEyes;
134 rightEye = bothEyes;
830d0ba4 135 }
cb483d3b 136
830d0ba4
AIL
137 params.leftEyeOpenness = leftEye;
138 params.rightEyeOpenness = rightEye;
139
140 if (leftEye <= m_cfg.eyeSmileEyeOpenThreshold &&
141 rightEye <= m_cfg.eyeSmileEyeOpenThreshold &&
142 params.mouthForm > m_cfg.eyeSmileMouthFormThreshold &&
143 params.mouthOpenness > m_cfg.eyeSmileMouthOpenThreshold)
144 {
145 params.leftEyeSmile = 1;
146 params.rightEyeSmile = 1;
147 }
148 else
149 {
150 params.leftEyeSmile = 0;
151 params.rightEyeSmile = 0;
152 }
153
2b1f0c7c
AIL
154 params.autoBlink = m_cfg.autoBlink;
155 params.autoBreath = m_cfg.autoBreath;
156 params.randomMotion = m_cfg.randomMotion;
157
830d0ba4
AIL
158 return params;
159}
160
161void FacialLandmarkDetector::stop(void)
162{
163 m_stop = true;
164}
165
166void FacialLandmarkDetector::mainLoop(void)
167{
168 while (!m_stop)
169 {
cb483d3b
AIL
170 // Read UDP packet from OSF
171 static const int nPoints = 68;
172 static const int packetFrameSize = 8 + 4 + 2 * 4 + 2 * 4 + 1 + 4 + 3 * 4 + 3 * 4
173 + 4 * 4 + 4 * 68 + 4 * 2 * 68 + 4 * 3 * 70 + 4 * 14;
174
175 static const int landmarksOffset = 8 + 4 + 2 * 4 + 2 * 4 + 1 + 4 + 3 * 4 + 3 * 4
176 + 4 * 4 + 4 * 68;
830d0ba4 177
8053891b
AIL
178 char buf[packetFrameSize];
179 auto recvSize = recv(m_sock, buf, sizeof buf, 0);
830d0ba4 180
cb483d3b
AIL
181 if (recvSize != packetFrameSize) continue;
182 // Note: This is dependent on endianness, and we would assume that
183 // the OSF instance is run on a machine with the same endianness
184 // as our current machine.
185 int recvFaceId = *(int *)(buf + 8);
186 if (recvFaceId != m_faceId) continue; // We only support one face
187
188 Point landmarks[nPoints];
189
190 for (int i = 0; i < nPoints; i++)
191 {
192 float x = *(float *)(buf + landmarksOffset + i * 2 * sizeof(float));
193 float y = *(float *)(buf + landmarksOffset + (i * 2 + 1) * sizeof(float));
194
195 landmarks[i].x = x;
196 landmarks[i].y = y;
197 }
830d0ba4 198
af96b559
AIL
199 /* The coordinates seem to be rather noisy in general.
200 * We will push everything through some moving average filters
201 * to reduce noise. The number of taps is determined empirically
202 * until we get something good.
203 * An alternative method would be to get some better dataset -
204 * perhaps even to train on a custom data set just for the user.
205 */
206
207 // Face rotation: X direction (left-right)
208 double faceXRot = calcFaceXAngle(landmarks);
209 filterPush(m_faceXAngle, faceXRot, m_cfg.faceXAngleNumTaps);
830d0ba4 210
af96b559
AIL
211 // Mouth form (smile / laugh) detection
212 double mouthForm = calcMouthForm(landmarks);
213 filterPush(m_mouthForm, mouthForm, m_cfg.mouthFormNumTaps);
214
215 // Face rotation: Y direction (up-down)
216 double faceYRot = calcFaceYAngle(landmarks, faceXRot, mouthForm);
217 filterPush(m_faceYAngle, faceYRot, m_cfg.faceYAngleNumTaps);
218
219 // Face rotation: Z direction (head tilt)
220 double faceZRot = calcFaceZAngle(landmarks);
221 filterPush(m_faceZAngle, faceZRot, m_cfg.faceZAngleNumTaps);
222
223 // Mouth openness
224 double mouthOpen = calcMouthOpenness(landmarks, mouthForm);
225 filterPush(m_mouthOpenness, mouthOpen, m_cfg.mouthOpenNumTaps);
226
227 // Eye openness
228 double eyeLeftOpen = calcEyeOpenness(LEFT, landmarks, faceYRot);
229 filterPush(m_leftEyeOpenness, eyeLeftOpen, m_cfg.leftEyeOpenNumTaps);
230 double eyeRightOpen = calcEyeOpenness(RIGHT, landmarks, faceYRot);
231 filterPush(m_rightEyeOpenness, eyeRightOpen, m_cfg.rightEyeOpenNumTaps);
232
cb483d3b
AIL
233 // Eyebrows: the landmark detection doesn't work very well for my face,
234 // so I've not implemented them.
830d0ba4
AIL
235 }
236}
237
238double FacialLandmarkDetector::calcEyeAspectRatio(
af96b559
AIL
239 Point& p1, Point& p2,
240 Point& p3, Point& p4,
241 Point& p5, Point& p6) const
830d0ba4
AIL
242{
243 double eyeWidth = dist(p1, p4);
244 double eyeHeight1 = dist(p2, p6);
245 double eyeHeight2 = dist(p3, p5);
246
247 return (eyeHeight1 + eyeHeight2) / (2 * eyeWidth);
248}
249
250double FacialLandmarkDetector::calcEyeOpenness(
251 LeftRight eye,
af96b559 252 Point landmarks[],
830d0ba4
AIL
253 double faceYAngle) const
254{
255 double eyeAspectRatio;
256 if (eye == LEFT)
257 {
af96b559
AIL
258 eyeAspectRatio = calcEyeAspectRatio(landmarks[42], landmarks[43], landmarks[44],
259 landmarks[45], landmarks[46], landmarks[47]);
830d0ba4
AIL
260 }
261 else
262 {
af96b559
AIL
263 eyeAspectRatio = calcEyeAspectRatio(landmarks[36], landmarks[37], landmarks[38],
264 landmarks[39], landmarks[40], landmarks[41]);
830d0ba4
AIL
265 }
266
267 // Apply correction due to faceYAngle
268 double corrEyeAspRat = eyeAspectRatio / std::cos(degToRad(faceYAngle));
269
270 return linearScale01(corrEyeAspRat, m_cfg.eyeClosedThreshold, m_cfg.eyeOpenThreshold);
271}
272
273
274
af96b559 275double FacialLandmarkDetector::calcMouthForm(Point landmarks[]) const
830d0ba4
AIL
276{
277 /* Mouth form parameter: 0 for normal mouth, 1 for fully smiling / laughing.
278 * Compare distance between the two corners of the mouth
279 * to the distance between the two eyes.
280 */
281
282 /* An alternative (my initial attempt) was to compare the corners of
283 * the mouth to the top of the upper lip - they almost lie on a
284 * straight line when smiling / laughing. But that is only true
285 * when facing straight at the camera. When looking up / down,
286 * the angle changes. So here we'll use the distance approach instead.
287 */
288
af96b559
AIL
289 auto eye1 = centroid(landmarks[36], landmarks[37], landmarks[38],
290 landmarks[39], landmarks[40], landmarks[41]);
291 auto eye2 = centroid(landmarks[42], landmarks[43], landmarks[44],
292 landmarks[45], landmarks[46], landmarks[47]);
830d0ba4 293 double distEyes = dist(eye1, eye2);
af96b559 294 double distMouth = dist(landmarks[58], landmarks[62]);
830d0ba4
AIL
295
296 double form = linearScale01(distMouth / distEyes,
297 m_cfg.mouthNormalThreshold,
298 m_cfg.mouthSmileThreshold);
299
300 return form;
301}
302
303double FacialLandmarkDetector::calcMouthOpenness(
af96b559 304 Point landmarks[],
830d0ba4
AIL
305 double mouthForm) const
306{
307 // Use points for the bottom of the upper lip, and top of the lower lip
308 // We have 3 pairs of points available, which give the mouth height
309 // on the left, in the middle, and on the right, resp.
310 // First let's try to use an average of all three.
af96b559
AIL
311 double heightLeft = dist(landmarks[61], landmarks[63]);
312 double heightMiddle = dist(landmarks[60], landmarks[64]);
313 double heightRight = dist(landmarks[59], landmarks[65]);
830d0ba4
AIL
314
315 double avgHeight = (heightLeft + heightMiddle + heightRight) / 3;
316
317 // Now, normalize it with the width of the mouth.
af96b559 318 double width = dist(landmarks[58], landmarks[62]);
830d0ba4
AIL
319
320 double normalized = avgHeight / width;
321
322 double scaled = linearScale01(normalized,
323 m_cfg.mouthClosedThreshold,
324 m_cfg.mouthOpenThreshold,
325 true, false);
326
327 // Apply correction according to mouthForm
328 // Notice that when you smile / laugh, width is increased
329 scaled *= (1 + m_cfg.mouthOpenLaughCorrection * mouthForm);
330
331 return scaled;
332}
333
af96b559 334double FacialLandmarkDetector::calcFaceXAngle(Point landmarks[]) const
830d0ba4
AIL
335{
336 // This function will be easier to understand if you refer to the
337 // diagram in faceXAngle.png
338
339 // Construct the y-axis using (1) average of four points on the nose and
af96b559 340 // (2) average of five points on the upper lip.
830d0ba4 341
af96b559
AIL
342 auto y0 = centroid(landmarks[27], landmarks[28], landmarks[29],
343 landmarks[30]);
344 auto y1 = centroid(landmarks[48], landmarks[49], landmarks[50],
345 landmarks[51], landmarks[52]);
830d0ba4
AIL
346
347 // Now drop a perpedicular from the left and right edges of the face,
348 // and calculate the ratio between the lengths of these perpendiculars
349
af96b559
AIL
350 auto left = centroid(landmarks[14], landmarks[15], landmarks[16]);
351 auto right = centroid(landmarks[0], landmarks[1], landmarks[2]);
830d0ba4
AIL
352
353 // Constructing a perpendicular:
354 // Join the left/right point and the upper lip. The included angle
355 // can now be determined using cosine rule.
356 // Then sine of this angle is the perpendicular divided by the newly
357 // created line.
358 double opp = dist(right, y0);
359 double adj1 = dist(y0, y1);
360 double adj2 = dist(y1, right);
361 double angle = solveCosineRuleAngle(opp, adj1, adj2);
362 double perpRight = adj2 * std::sin(angle);
363
364 opp = dist(left, y0);
365 adj2 = dist(y1, left);
366 angle = solveCosineRuleAngle(opp, adj1, adj2);
367 double perpLeft = adj2 * std::sin(angle);
368
369 // Model the head as a sphere and look from above.
370 double theta = std::asin((perpRight - perpLeft) / (perpRight + perpLeft));
371
372 theta = radToDeg(theta);
373 if (theta < -30) theta = -30;
374 if (theta > 30) theta = 30;
375 return theta;
376}
377
af96b559 378double FacialLandmarkDetector::calcFaceYAngle(Point landmarks[], double faceXAngle, double mouthForm) const
830d0ba4
AIL
379{
380 // Use the nose
381 // angle between the two left/right points and the tip
af96b559
AIL
382 double c = dist(landmarks[31], landmarks[35]);
383 double a = dist(landmarks[30], landmarks[31]);
384 double b = dist(landmarks[30], landmarks[35]);
830d0ba4
AIL
385
386 double angle = solveCosineRuleAngle(c, a, b);
387
388 // This probably varies a lot from person to person...
389
390 // Best is probably to work out some trigonometry again,
391 // but just linear interpolation seems to work ok...
392
393 // Correct for X rotation
394 double corrAngle = angle * (1 + (std::abs(faceXAngle) / 30
395 * m_cfg.faceYAngleXRotCorrection));
396
397 // Correct for smiles / laughs - this increases the angle
398 corrAngle *= (1 - mouthForm * m_cfg.faceYAngleSmileCorrection);
399
400 if (corrAngle >= m_cfg.faceYAngleZeroValue)
401 {
402 return -30 * linearScale01(corrAngle,
403 m_cfg.faceYAngleZeroValue,
404 m_cfg.faceYAngleDownThreshold,
405 false, false);
406 }
407 else
408 {
409 return 30 * (1 - linearScale01(corrAngle,
410 m_cfg.faceYAngleUpThreshold,
411 m_cfg.faceYAngleZeroValue,
412 false, false));
413 }
414}
415
af96b559 416double FacialLandmarkDetector::calcFaceZAngle(Point landmarks[]) const
830d0ba4
AIL
417{
418 // Use average of eyes and nose
419
af96b559
AIL
420 auto eyeRight = centroid(landmarks[36], landmarks[37], landmarks[38],
421 landmarks[39], landmarks[40], landmarks[41]);
422 auto eyeLeft = centroid(landmarks[42], landmarks[43], landmarks[44],
423 landmarks[45], landmarks[46], landmarks[47]);
830d0ba4 424
af96b559
AIL
425 auto noseLeft = landmarks[35];
426 auto noseRight = landmarks[31];
830d0ba4 427
af96b559
AIL
428 double eyeYDiff = eyeRight.y - eyeLeft.y;
429 double eyeXDiff = eyeRight.x - eyeLeft.x;
830d0ba4
AIL
430
431 double angle1 = std::atan(eyeYDiff / eyeXDiff);
432
af96b559
AIL
433 double noseYDiff = noseRight.y - noseLeft.y;
434 double noseXDiff = noseRight.x - noseLeft.x;
830d0ba4
AIL
435
436 double angle2 = std::atan(noseYDiff / noseXDiff);
437
438 return radToDeg((angle1 + angle2) / 2);
439}
440
441void FacialLandmarkDetector::parseConfig(std::string cfgPath)
442{
443 populateDefaultConfig();
444 if (cfgPath != "")
445 {
446 std::ifstream file(cfgPath);
447
448 if (!file)
449 {
450 throw std::runtime_error("Failed to open config file");
451 }
452
453 std::string line;
454 unsigned int lineNum = 0;
455
456 while (std::getline(file, line))
457 {
458 lineNum++;
459
460 if (line[0] == '#')
461 {
462 continue;
463 }
464
465 std::istringstream ss(line);
466 std::string paramName;
467 if (ss >> paramName)
468 {
cb483d3b
AIL
469 if (paramName == "osfIpAddress")
470 {
471 if (!(ss >> m_cfg.osfIpAddress))
472 {
473 throwConfigError(paramName, "std::string",
474 line, lineNum);
475 }
476 }
477 else if (paramName == "osfPort")
478 {
479 if (!(ss >> m_cfg.osfPort))
480 {
481 throwConfigError(paramName, "int",
482 line, lineNum);
483 }
484 }
485 else if (paramName == "faceYAngleCorrection")
830d0ba4
AIL
486 {
487 if (!(ss >> m_cfg.faceYAngleCorrection))
488 {
489 throwConfigError(paramName, "double",
490 line, lineNum);
491 }
492 }
493 else if (paramName == "eyeSmileEyeOpenThreshold")
494 {
495 if (!(ss >> m_cfg.eyeSmileEyeOpenThreshold))
496 {
497 throwConfigError(paramName, "double",
498 line, lineNum);
499 }
500 }
501 else if (paramName == "eyeSmileMouthFormThreshold")
502 {
503 if (!(ss >> m_cfg.eyeSmileMouthFormThreshold))
504 {
505 throwConfigError(paramName, "double",
506 line, lineNum);
507 }
508 }
509 else if (paramName == "eyeSmileMouthOpenThreshold")
510 {
511 if (!(ss >> m_cfg.eyeSmileMouthOpenThreshold))
512 {
513 throwConfigError(paramName, "double",
514 line, lineNum);
515 }
516 }
830d0ba4
AIL
517 else if (paramName == "faceXAngleNumTaps")
518 {
519 if (!(ss >> m_cfg.faceXAngleNumTaps))
520 {
521 throwConfigError(paramName, "std::size_t",
522 line, lineNum);
523 }
524 }
525 else if (paramName == "faceYAngleNumTaps")
526 {
527 if (!(ss >> m_cfg.faceYAngleNumTaps))
528 {
529 throwConfigError(paramName, "std::size_t",
530 line, lineNum);
531 }
532 }
533 else if (paramName == "faceZAngleNumTaps")
534 {
535 if (!(ss >> m_cfg.faceZAngleNumTaps))
536 {
537 throwConfigError(paramName, "std::size_t",
538 line, lineNum);
539 }
540 }
541 else if (paramName == "mouthFormNumTaps")
542 {
543 if (!(ss >> m_cfg.mouthFormNumTaps))
544 {
545 throwConfigError(paramName, "std::size_t",
546 line, lineNum);
547 }
548 }
549 else if (paramName == "mouthOpenNumTaps")
550 {
551 if (!(ss >> m_cfg.mouthOpenNumTaps))
552 {
553 throwConfigError(paramName, "std::size_t",
554 line, lineNum);
555 }
556 }
557 else if (paramName == "leftEyeOpenNumTaps")
558 {
559 if (!(ss >> m_cfg.leftEyeOpenNumTaps))
560 {
561 throwConfigError(paramName, "std::size_t",
562 line, lineNum);
563 }
564 }
565 else if (paramName == "rightEyeOpenNumTaps")
566 {
567 if (!(ss >> m_cfg.rightEyeOpenNumTaps))
568 {
569 throwConfigError(paramName, "std::size_t",
570 line, lineNum);
571 }
572 }
830d0ba4
AIL
573 else if (paramName == "eyeClosedThreshold")
574 {
575 if (!(ss >> m_cfg.eyeClosedThreshold))
576 {
577 throwConfigError(paramName, "double",
578 line, lineNum);
579 }
580 }
581 else if (paramName == "eyeOpenThreshold")
582 {
583 if (!(ss >> m_cfg.eyeOpenThreshold))
584 {
585 throwConfigError(paramName, "double",
586 line, lineNum);
587 }
588 }
cb483d3b
AIL
589 else if (paramName == "winkEnable")
590 {
591 if (!(ss >> m_cfg.winkEnable))
592 {
593 throwConfigError(paramName, "bool",
594 line, lineNum);
595 }
596 }
830d0ba4
AIL
597 else if (paramName == "mouthNormalThreshold")
598 {
599 if (!(ss >> m_cfg.mouthNormalThreshold))
600 {
601 throwConfigError(paramName, "double",
602 line, lineNum);
603 }
604 }
605 else if (paramName == "mouthSmileThreshold")
606 {
607 if (!(ss >> m_cfg.mouthSmileThreshold))
608 {
609 throwConfigError(paramName, "double",
610 line, lineNum);
611 }
612 }
613 else if (paramName == "mouthClosedThreshold")
614 {
615 if (!(ss >> m_cfg.mouthClosedThreshold))
616 {
617 throwConfigError(paramName, "double",
618 line, lineNum);
619 }
620 }
621 else if (paramName == "mouthOpenThreshold")
622 {
623 if (!(ss >> m_cfg.mouthOpenThreshold))
624 {
625 throwConfigError(paramName, "double",
626 line, lineNum);
627 }
628 }
629 else if (paramName == "mouthOpenLaughCorrection")
630 {
631 if (!(ss >> m_cfg.mouthOpenLaughCorrection))
632 {
633 throwConfigError(paramName, "double",
634 line, lineNum);
635 }
636 }
637 else if (paramName == "faceYAngleXRotCorrection")
638 {
639 if (!(ss >> m_cfg.faceYAngleXRotCorrection))
640 {
641 throwConfigError(paramName, "double",
642 line, lineNum);
643 }
644 }
645 else if (paramName == "faceYAngleSmileCorrection")
646 {
647 if (!(ss >> m_cfg.faceYAngleSmileCorrection))
648 {
649 throwConfigError(paramName, "double",
650 line, lineNum);
651 }
652 }
653 else if (paramName == "faceYAngleZeroValue")
654 {
655 if (!(ss >> m_cfg.faceYAngleZeroValue))
656 {
657 throwConfigError(paramName, "double",
658 line, lineNum);
659 }
660 }
661 else if (paramName == "faceYAngleUpThreshold")
662 {
663 if (!(ss >> m_cfg.faceYAngleUpThreshold))
664 {
665 throwConfigError(paramName, "double",
666 line, lineNum);
667 }
668 }
669 else if (paramName == "faceYAngleDownThreshold")
670 {
671 if (!(ss >> m_cfg.faceYAngleDownThreshold))
672 {
673 throwConfigError(paramName, "double",
674 line, lineNum);
675 }
676 }
2b1f0c7c
AIL
677 else if (paramName == "autoBlink")
678 {
679 if (!(ss >> m_cfg.autoBlink))
680 {
681 throwConfigError(paramName, "bool",
682 line, lineNum);
683 }
684 }
685 else if (paramName == "autoBreath")
686 {
687 if (!(ss >> m_cfg.autoBreath))
688 {
689 throwConfigError(paramName, "bool",
690 line, lineNum);
691 }
692 }
693 else if (paramName == "randomMotion")
694 {
695 if (!(ss >> m_cfg.randomMotion))
696 {
697 throwConfigError(paramName, "bool",
698 line, lineNum);
699 }
700 }
830d0ba4
AIL
701 else
702 {
703 std::ostringstream oss;
704 oss << "Unrecognized parameter name at line " << lineNum
705 << ": " << paramName;
706 throw std::runtime_error(oss.str());
707 }
708 }
709 }
710 }
711}
712
713void FacialLandmarkDetector::populateDefaultConfig(void)
714{
715 // These are values that I've personally tested to work OK for my face.
716 // Your milage may vary - hence the config file.
717
51d44d08
AIL
718 m_cfg.osfIpAddress = "127.0.0.1";
719 m_cfg.osfPort = 11573;
830d0ba4
AIL
720 m_cfg.faceYAngleCorrection = 10;
721 m_cfg.eyeSmileEyeOpenThreshold = 0.6;
722 m_cfg.eyeSmileMouthFormThreshold = 0.75;
723 m_cfg.eyeSmileMouthOpenThreshold = 0.5;
cb483d3b
AIL
724 m_cfg.faceXAngleNumTaps = 7;
725 m_cfg.faceYAngleNumTaps = 7;
726 m_cfg.faceZAngleNumTaps = 7;
830d0ba4
AIL
727 m_cfg.mouthFormNumTaps = 3;
728 m_cfg.mouthOpenNumTaps = 3;
729 m_cfg.leftEyeOpenNumTaps = 3;
730 m_cfg.rightEyeOpenNumTaps = 3;
cb483d3b
AIL
731 m_cfg.eyeClosedThreshold = 0.18;
732 m_cfg.eyeOpenThreshold = 0.21;
733 m_cfg.winkEnable = true;
830d0ba4
AIL
734 m_cfg.mouthNormalThreshold = 0.75;
735 m_cfg.mouthSmileThreshold = 1.0;
736 m_cfg.mouthClosedThreshold = 0.1;
737 m_cfg.mouthOpenThreshold = 0.4;
738 m_cfg.mouthOpenLaughCorrection = 0.2;
739 m_cfg.faceYAngleXRotCorrection = 0.15;
740 m_cfg.faceYAngleSmileCorrection = 0.075;
741 m_cfg.faceYAngleZeroValue = 1.8;
742 m_cfg.faceYAngleDownThreshold = 2.3;
743 m_cfg.faceYAngleUpThreshold = 1.3;
2b1f0c7c
AIL
744 m_cfg.autoBlink = false;
745 m_cfg.autoBreath = false;
746 m_cfg.randomMotion = false;
830d0ba4
AIL
747}
748
749void FacialLandmarkDetector::throwConfigError(std::string paramName,
750 std::string expectedType,
751 std::string line,
752 unsigned int lineNum)
753{
754 std::ostringstream ss;
755 ss << "Error parsing config file for parameter " << paramName
756 << "\nAt line " << lineNum << ": " << line
757 << "\nExpecting value of type " << expectedType;
758
759 throw std::runtime_error(ss.str());
760}
761