@@ -43,6 +43,8 @@ void dmonitoring_init(DMonitoringModelState* s) {
43
43
#else
44
44
s->m = new SNPEModel (" ../../models/dmonitoring_model_q.dlc" , &s->output [0 ], OUTPUT_SIZE, USE_DSP_RUNTIME);
45
45
#endif
46
+
47
+ s->m ->addCalib (s->calib , CALIB_LEN);
46
48
}
47
49
48
50
static inline auto get_yuv_buf (std::vector<uint8_t > &buf, const int width, int height) {
@@ -65,7 +67,7 @@ void crop_yuv(uint8_t *raw, int width, int height, uint8_t *y, uint8_t *u, uint8
65
67
}
66
68
}
67
69
68
- DMonitoringResult dmonitoring_eval_frame (DMonitoringModelState* s, void * stream_buf, int width, int height) {
70
+ DMonitoringResult dmonitoring_eval_frame (DMonitoringModelState* s, void * stream_buf, int width, int height, float *calib ) {
69
71
Rect crop_rect;
70
72
if (width == TICI_CAM_WIDTH) {
71
73
const int cropped_height = tici_dm_crop::width / 1.33 ;
@@ -167,29 +169,38 @@ DMonitoringResult dmonitoring_eval_frame(DMonitoringModelState* s, void* stream_
167
169
168
170
double t1 = millis_since_boot ();
169
171
s->m ->addImage (net_input_buf, yuv_buf_len);
172
+ for (int i = 0 ; i < CALIB_LEN; i++) {
173
+ s->calib [i] = calib[i];
174
+ }
170
175
s->m ->execute ();
171
176
double t2 = millis_since_boot ();
172
177
173
178
DMonitoringResult ret = {0 };
174
179
for (int i = 0 ; i < 3 ; ++i) {
175
- ret.face_orientation [i] = s->output [i];
176
- ret.face_orientation_meta [i] = softplus (s->output [6 + i]);
180
+ ret.face_orientation [i] = s->output [i] * REG_SCALE;
181
+ ret.face_orientation_meta [i] = exp (s->output [6 + i]);
182
+ }
183
+ for (int i = 0 ; i < 2 ; ++i) {
184
+ ret.face_position [i] = s->output [3 + i] * REG_SCALE;
185
+ ret.face_position_meta [i] = exp (s->output [9 + i]);
186
+ }
187
+ for (int i = 0 ; i < 4 ; ++i) {
188
+ ret.ready_prob [i] = sigmoid (s->output [39 + i]);
177
189
}
178
190
for (int i = 0 ; i < 2 ; ++i) {
179
- ret.face_position [i] = s->output [3 + i];
180
- ret.face_position_meta [i] = softplus (s->output [9 + i]);
191
+ ret.not_ready_prob [i] = sigmoid (s->output [43 + i]);
181
192
}
182
- ret.face_prob = s->output [12 ];
183
- ret.left_eye_prob = s->output [21 ];
184
- ret.right_eye_prob = s->output [30 ];
185
- ret.left_blink_prob = s->output [31 ];
186
- ret.right_blink_prob = s->output [32 ];
187
- ret.sg_prob = s->output [33 ];
188
- ret.poor_vision = s->output [34 ];
189
- ret.partial_face = s->output [35 ];
190
- ret.distracted_pose = s->output [36 ];
191
- ret.distracted_eyes = s->output [37 ];
192
- ret.occluded_prob = s->output [38 ];
193
+ ret.face_prob = sigmoid ( s->output [12 ]) ;
194
+ ret.left_eye_prob = sigmoid ( s->output [21 ]) ;
195
+ ret.right_eye_prob = sigmoid ( s->output [30 ]) ;
196
+ ret.left_blink_prob = sigmoid ( s->output [31 ]) ;
197
+ ret.right_blink_prob = sigmoid ( s->output [32 ]) ;
198
+ ret.sg_prob = sigmoid ( s->output [33 ]) ;
199
+ ret.poor_vision = sigmoid ( s->output [34 ]) ;
200
+ ret.partial_face = sigmoid ( s->output [35 ]) ;
201
+ ret.distracted_pose = sigmoid ( s->output [36 ]) ;
202
+ ret.distracted_eyes = sigmoid ( s->output [37 ]) ;
203
+ ret.occluded_prob = sigmoid ( s->output [38 ]) ;
193
204
ret.dsp_execution_time = (t2 - t1) / 1000 .;
194
205
return ret;
195
206
}
@@ -217,6 +228,8 @@ void dmonitoring_publish(PubMaster &pm, uint32_t frame_id, const DMonitoringResu
217
228
framed.setDistractedPose (res.distracted_pose );
218
229
framed.setDistractedEyes (res.distracted_eyes );
219
230
framed.setOccludedProb (res.occluded_prob );
231
+ framed.setReadyProb (res.ready_prob );
232
+ framed.setNotReadyProb (res.not_ready_prob );
220
233
if (send_raw_pred) {
221
234
framed.setRawPredictions (raw_pred.asBytes ());
222
235
}
0 commit comments