forked from CMU-Perceptual-Computing-Lab/openpose
-
Notifications
You must be signed in to change notification settings - Fork 0
/
5_user_asynchronous.cpp
493 lines (467 loc) · 35.7 KB
/
5_user_asynchronous.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
// ------------------------- OpenPose Library Tutorial - Wrapper - Example 1 - Asynchronous -------------------------
// Asynchronous mode: ideal for fast prototyping when performance is not an issue. The user emplaces/pushes and pops frames from the OpenPose wrapper
// when he desires to.
// This example shows the user how to use the OpenPose wrapper class:
// 1. User reads images
// 2. Extract and render keypoint / heatmap / PAF of that image
// 3. Save the results on disk
// 4. User displays the rendered pose
// Everything in a multi-thread scenario
// In addition to the previous OpenPose modules, we also need to use:
// 1. `core` module:
// For the Array<float> class that the `pose` module needs
// For the Datum struct that the `thread` module sends between the queues
// 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively
// This file should only be used for the user to take specific examples.
// C++ std library dependencies
#include <chrono> // `std::chrono::` functions and classes, e.g. std::chrono::milliseconds
#include <thread> // std::this_thread
// Other 3rdparty dependencies
// GFlags: DEFINE_bool, _int32, _int64, _uint64, _double, _string
#include <gflags/gflags.h>
// Allow Google Flags in Ubuntu 14
#ifndef GFLAGS_GFLAGS_H_
namespace gflags = google;
#endif
// OpenPose dependencies
#include <openpose/headers.hpp>
// See all the available parameter options withe the `--help` flag. E.g. `build/examples/openpose/openpose.bin --help`
// Note: This command will show you flags for other unnecessary 3rdparty files. Check only the flags for the OpenPose
// executable. E.g. for `openpose.bin`, look for `Flags from examples/openpose/openpose.cpp:`.
// Debugging/Other
DEFINE_int32(logging_level, 3, "The logging level. Integer in the range [0, 255]. 0 will output any log() message, while"
" 255 will not output any. Current OpenPose library messages are in the range 0-4: 1 for"
" low priority messages and 4 for important ones.");
DEFINE_bool(disable_multi_thread, false, "It would slightly reduce the frame rate in order to highly reduce the lag. Mainly useful"
" for 1) Cases where it is needed a low latency (e.g. webcam in real-time scenarios with"
" low-range GPU devices); and 2) Debugging OpenPose when it is crashing to locate the"
" error.");
DEFINE_int32(profile_speed, 1000, "If PROFILER_ENABLED was set in CMake or Makefile.config files, OpenPose will show some"
" runtime statistics at this frame number.");
// Producer
DEFINE_string(image_dir, "examples/media/", "Process a directory of images. Read all standard formats (jpg, png, bmp, etc.).");
DEFINE_double(camera_fps, 30.0, "Frame rate for the webcam (also used when saving video). Set this value to the minimum"
" value between the OpenPose displayed speed and the webcam real frame rate.");
// OpenPose
DEFINE_string(model_folder, "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located.");
DEFINE_string(output_resolution, "-1x-1", "The image resolution (display and output). Use \"-1x-1\" to force the program to use the"
" input image resolution.");
DEFINE_int32(num_gpu, -1, "The number of GPU devices to use. If negative, it will use all the available GPUs in your"
" machine.");
DEFINE_int32(num_gpu_start, 0, "GPU device start number.");
DEFINE_int32(keypoint_scale, 0, "Scaling of the (x,y) coordinates of the final pose data array, i.e. the scale of the (x,y)"
" coordinates that will be saved with the `write_json` & `write_keypoint` flags."
" Select `0` to scale it to the original source resolution; `1`to scale it to the net output"
" size (set with `net_resolution`); `2` to scale it to the final output size (set with"
" `resolution`); `3` to scale it in the range [0,1], where (0,0) would be the top-left"
" corner of the image, and (1,1) the bottom-right one; and 4 for range [-1,1], where"
" (-1,-1) would be the top-left corner of the image, and (1,1) the bottom-right one. Non"
" related with `scale_number` and `scale_gap`.");
DEFINE_int32(number_people_max, -1, "This parameter will limit the maximum number of people detected, by keeping the people with"
" top scores. The score is based in person area over the image, body part score, as well as"
" joint score (between each pair of connected body parts). Useful if you know the exact"
" number of people in the scene, so it can remove false positives (if all the people have"
" been detected. However, it might also include false negatives by removing very small or"
" highly occluded people. -1 will keep them all.");
// OpenPose Body Pose
DEFINE_bool(body_disable, false, "Disable body keypoint detection. Option only possible for faster (but less accurate) face"
" keypoint detection.");
DEFINE_string(model_pose, "BODY_25", "Model to be used. E.g. `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), "
"`MPI_4_layers` (15 keypoints, even faster but less accurate).");
DEFINE_string(net_resolution, "-1x368", "Multiples of 16. If it is increased, the accuracy potentially increases. If it is"
" decreased, the speed increases. For maximum speed-accuracy balance, it should keep the"
" closest aspect ratio possible to the images or videos to be processed. Using `-1` in"
" any of the dimensions, OP will choose the optimal aspect ratio depending on the user's"
" input value. E.g. the default `-1x368` is equivalent to `656x368` in 16:9 resolutions,"
" e.g. full HD (1980x1080) and HD (1280x720) resolutions.");
DEFINE_int32(scale_number, 1, "Number of scales to average.");
DEFINE_double(scale_gap, 0.3, "Scale gap between scales. No effect unless scale_number > 1. Initial scale is always 1."
" If you want to change the initial scale, you actually want to multiply the"
" `net_resolution` by your desired initial scale.");
// OpenPose Body Pose Heatmaps and Part Candidates
DEFINE_bool(heatmaps_add_parts, false, "If true, it will fill op::Datum::poseHeatMaps array with the body part heatmaps, and"
" analogously face & hand heatmaps to op::Datum::faceHeatMaps & op::Datum::handHeatMaps."
" If more than one `add_heatmaps_X` flag is enabled, it will place then in sequential"
" memory order: body parts + bkg + PAFs. It will follow the order on"
" POSE_BODY_PART_MAPPING in `src/openpose/pose/poseParameters.cpp`. Program speed will"
" considerably decrease. Not required for OpenPose, enable it only if you intend to"
" explicitly use this information later.");
DEFINE_bool(heatmaps_add_bkg, false, "Same functionality as `add_heatmaps_parts`, but adding the heatmap corresponding to"
" background.");
DEFINE_bool(heatmaps_add_PAFs, false, "Same functionality as `add_heatmaps_parts`, but adding the PAFs.");
DEFINE_int32(heatmaps_scale, 2, "Set 0 to scale op::Datum::poseHeatMaps in the range [-1,1], 1 for [0,1]; 2 for integer"
" rounded [0,255]; and 3 for no scaling.");
DEFINE_bool(part_candidates, false, "Also enable `write_json` in order to save this information. If true, it will fill the"
" op::Datum::poseCandidates array with the body part candidates. Candidates refer to all"
" the detected body parts, before being assembled into people. Note that the number of"
" candidates is equal or higher than the number of final body parts (i.e. after being"
" assembled into people). The empty body parts are filled with 0s. Program speed will"
" slightly decrease. Not required for OpenPose, enable it only if you intend to explicitly"
" use this information.");
// OpenPose Face
DEFINE_bool(face, false, "Enables face keypoint detection. It will share some parameters from the body pose, e.g."
" `model_folder`. Note that this will considerable slow down the performance and increse"
" the required GPU memory. In addition, the greater number of people on the image, the"
" slower OpenPose will be.");
DEFINE_string(face_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the face keypoint"
" detector. 320x320 usually works fine while giving a substantial speed up when multiple"
" faces on the image.");
// OpenPose Hand
DEFINE_bool(hand, false, "Enables hand keypoint detection. It will share some parameters from the body pose, e.g."
" `model_folder`. Analogously to `--face`, it will also slow down the performance, increase"
" the required GPU memory and its speed depends on the number of people.");
DEFINE_string(hand_net_resolution, "368x368", "Multiples of 16 and squared. Analogous to `net_resolution` but applied to the hand keypoint"
" detector.");
DEFINE_int32(hand_scale_number, 1, "Analogous to `scale_number` but applied to the hand keypoint detector. Our best results"
" were found with `hand_scale_number` = 6 and `hand_scale_range` = 0.4.");
DEFINE_double(hand_scale_range, 0.4, "Analogous purpose than `scale_gap` but applied to the hand keypoint detector. Total range"
" between smallest and biggest scale. The scales will be centered in ratio 1. E.g. if"
" scaleRange = 0.4 and scalesNumber = 2, then there will be 2 scales, 0.8 and 1.2.");
DEFINE_bool(hand_tracking, false, "Adding hand tracking might improve hand keypoints detection for webcam (if the frame rate"
" is high enough, i.e. >7 FPS per GPU) and video. This is not person ID tracking, it"
" simply looks for hands in positions at which hands were located in previous frames, but"
" it does not guarantee the same person ID among frames.");
// OpenPose 3-D Reconstruction
DEFINE_bool(3d, false, "Running OpenPose 3-D reconstruction demo: 1) Reading from a stereo camera system."
" 2) Performing 3-D reconstruction from the multiple views. 3) Displaying 3-D reconstruction"
" results. Note that it will only display 1 person. If multiple people is present, it will"
" fail.");
DEFINE_int32(3d_min_views, -1, "Minimum number of views required to reconstruct each keypoint. By default (-1), it will"
" require all the cameras to see the keypoint in order to reconstruct it.");
DEFINE_int32(3d_views, 1, "Complementary option to `--image_dir` or `--video`. OpenPose will read as many images per"
" iteration, allowing tasks such as stereo camera processing (`--3d`). Note that"
" `--camera_parameters_folder` must be set. OpenPose must find as many `xml` files in the"
" parameter folder as this number indicates.");
// Extra algorithms
DEFINE_bool(identification, false, "Experimental, not available yet. Whether to enable people identification across frames.");
DEFINE_int32(tracking, -1, "Experimental, not available yet. Whether to enable people tracking across frames. The"
" value indicates the number of frames where tracking is run between each OpenPose keypoint"
" detection. Select -1 (default) to disable it or 0 to run simultaneously OpenPose keypoint"
" detector and tracking for potentially higher accurary than only OpenPose.");
DEFINE_int32(ik_threads, 0, "Experimental, not available yet. Whether to enable inverse kinematics (IK) from 3-D"
" keypoints to obtain 3-D joint angles. By default (0 threads), it is disabled. Increasing"
" the number of threads will increase the speed but also the global system latency.");
// OpenPose Rendering
DEFINE_int32(part_to_show, 0, "Prediction channel to visualize (default: 0). 0 for all the body parts, 1-18 for each body"
" part heat map, 19 for the background heat map, 20 for all the body part heat maps"
" together, 21 for all the PAFs, 22-40 for each body part pair PAF.");
DEFINE_bool(disable_blending, false, "If enabled, it will render the results (keypoint skeletons or heatmaps) on a black"
" background, instead of being rendered into the original image. Related: `part_to_show`,"
" `alpha_pose`, and `alpha_pose`.");
// OpenPose Rendering Pose
DEFINE_double(render_threshold, 0.05, "Only estimated keypoints whose score confidences are higher than this threshold will be"
" rendered. Generally, a high threshold (> 0.5) will only render very clear body parts;"
" while small thresholds (~0.1) will also output guessed and occluded keypoints, but also"
" more false positives (i.e. wrong detections).");
DEFINE_int32(render_pose, -1, "Set to 0 for no rendering, 1 for CPU rendering (slightly faster), and 2 for GPU rendering"
" (slower but greater functionality, e.g. `alpha_X` flags). If -1, it will pick CPU if"
" CPU_ONLY is enabled, or GPU if CUDA is enabled. If rendering is enabled, it will render"
" both `outputData` and `cvOutputData` with the original image and desired body part to be"
" shown (i.e. keypoints, heat maps or PAFs).");
DEFINE_double(alpha_pose, 0.6, "Blending factor (range 0-1) for the body part rendering. 1 will show it completely, 0 will"
" hide it. Only valid for GPU rendering.");
DEFINE_double(alpha_heatmap, 0.7, "Blending factor (range 0-1) between heatmap and original frame. 1 will only show the"
" heatmap, 0 will only show the frame. Only valid for GPU rendering.");
// OpenPose Rendering Face
DEFINE_double(face_render_threshold, 0.4, "Analogous to `render_threshold`, but applied to the face keypoints.");
DEFINE_int32(face_render, -1, "Analogous to `render_pose` but applied to the face. Extra option: -1 to use the same"
" configuration that `render_pose` is using.");
DEFINE_double(face_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to face.");
DEFINE_double(face_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to face.");
// OpenPose Rendering Hand
DEFINE_double(hand_render_threshold, 0.2, "Analogous to `render_threshold`, but applied to the hand keypoints.");
DEFINE_int32(hand_render, -1, "Analogous to `render_pose` but applied to the hand. Extra option: -1 to use the same"
" configuration that `render_pose` is using.");
DEFINE_double(hand_alpha_pose, 0.6, "Analogous to `alpha_pose` but applied to hand.");
DEFINE_double(hand_alpha_heatmap, 0.7, "Analogous to `alpha_heatmap` but applied to hand.");
// Result Saving
DEFINE_string(write_images, "", "Directory to write rendered frames in `write_images_format` image format.");
DEFINE_string(write_images_format, "png", "File extension and format for `write_images`, e.g. png, jpg or bmp. Check the OpenCV"
" function cv::imwrite for all compatible extensions.");
DEFINE_string(write_video, "", "Full file path to write rendered frames in motion JPEG video format. It might fail if the"
" final path does not finish in `.avi`. It internally uses cv::VideoWriter. Flag"
" `camera_fps` controls FPS.");
DEFINE_string(write_json, "", "Directory to write OpenPose output in JSON format. It includes body, hand, and face pose"
" keypoints (2-D and 3-D), as well as pose candidates (if `--part_candidates` enabled).");
DEFINE_string(write_coco_json, "", "Full file path to write people pose data with JSON COCO validation format.");
DEFINE_string(write_coco_foot_json, "", "Full file path to write people foot pose data with JSON COCO validation format.");
DEFINE_string(write_heatmaps, "", "Directory to write body pose heatmaps in PNG format. At least 1 `add_heatmaps_X` flag"
" must be enabled.");
DEFINE_string(write_heatmaps_format, "png", "File extension and format for `write_heatmaps`, analogous to `write_images_format`."
" For lossless compression, recommended `png` for integer `heatmaps_scale` and `float` for"
" floating values.");
DEFINE_string(write_keypoint, "", "(Deprecated, use `write_json`) Directory to write the people pose keypoint data. Set format"
" with `write_keypoint_format`.");
DEFINE_string(write_keypoint_format, "yml", "(Deprecated, use `write_json`) File extension and format for `write_keypoint`: json, xml,"
" yaml & yml. Json not available for OpenCV < 3.0, use `write_json` instead.");
// Result Saving - Extra Algorithms
DEFINE_string(write_video_adam, "", "Experimental, not available yet. E.g.: `~/Desktop/adamResult.avi`. Flag `camera_fps`"
" controls FPS.");
DEFINE_string(write_bvh, "", "Experimental, not available yet. E.g.: `~/Desktop/mocapResult.bvh`.");
// UDP communication
DEFINE_string(udp_host, "", "Experimental, not available yet. IP for UDP communication. E.g., `192.168.0.1`.");
DEFINE_string(udp_port, "8051", "Experimental, not available yet. Port number for UDP communication.");
// If the user needs his own variables, he can inherit the op::Datum struct and add them
// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define Wrapper<UserDatum> instead of
// Wrapper<op::Datum>
struct UserDatum : public op::Datum
{
bool boolThatUserNeedsForSomeReason;
UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) :
boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_}
{}
};
// The W-classes can be implemented either as a template or as simple classes given
// that the user usually knows which kind of data he will move between the queues,
// in this case we assume a std::shared_ptr of a std::vector of UserDatum
// This worker will just read and return all the jpg files in a directory
class UserInputClass
{
public:
UserInputClass(const std::string& directoryPath) :
mImageFiles{op::getFilesOnDirectory(directoryPath, "jpg")},
// If we want "jpg" + "png" images
// mImageFiles{op::getFilesOnDirectory(directoryPath, std::vector<std::string>{"jpg", "png"})},
mCounter{0},
mClosed{false}
{
if (mImageFiles.empty())
op::error("No images found on: " + directoryPath, __LINE__, __FUNCTION__, __FILE__);
}
std::shared_ptr<std::vector<UserDatum>> createDatum()
{
// Close program when empty frame
if (mClosed || mImageFiles.size() <= mCounter)
{
op::log("Last frame read and added to queue. Closing program after it is processed.", op::Priority::High);
// This funtion stops this worker, which will eventually stop the whole thread system once all the frames
// have been processed
mClosed = true;
return nullptr;
}
else // if (!mClosed)
{
// Create new datum
auto datumsPtr = std::make_shared<std::vector<UserDatum>>();
datumsPtr->emplace_back();
auto& datum = datumsPtr->at(0);
// Fill datum
datum.cvInputData = cv::imread(mImageFiles.at(mCounter++));
// If empty frame -> return nullptr
if (datum.cvInputData.empty())
{
op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.",
op::Priority::High);
mClosed = true;
datumsPtr = nullptr;
}
return datumsPtr;
}
}
bool isFinished() const
{
return mClosed;
}
private:
const std::vector<std::string> mImageFiles;
unsigned long long mCounter;
bool mClosed;
};
// This worker will just read and return all the jpg files in a directory
class UserOutputClass
{
public:
bool display(const std::shared_ptr<std::vector<UserDatum>>& datumsPtr)
{
// User's displaying/saving/other processing here
// datum.cvOutputData: rendered frame with pose or heatmaps
// datum.poseKeypoints: Array<float> with the estimated pose
char key = ' ';
if (datumsPtr != nullptr && !datumsPtr->empty())
{
cv::imshow("User worker GUI", datumsPtr->at(0).cvOutputData);
// Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image)
key = (char)cv::waitKey(1);
}
else
op::log("Nullptr or empty datumsPtr found.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__);
return (key == 27);
}
void printKeypoints(const std::shared_ptr<std::vector<UserDatum>>& datumsPtr)
{
// Example: How to use the pose keypoints
if (datumsPtr != nullptr && !datumsPtr->empty())
{
op::log("\nKeypoints:");
// Accesing each element of the keypoints
const auto& poseKeypoints = datumsPtr->at(0).poseKeypoints;
op::log("Person pose keypoints:");
for (auto person = 0 ; person < poseKeypoints.getSize(0) ; person++)
{
op::log("Person " + std::to_string(person) + " (x, y, score):");
for (auto bodyPart = 0 ; bodyPart < poseKeypoints.getSize(1) ; bodyPart++)
{
std::string valueToPrint;
for (auto xyscore = 0 ; xyscore < poseKeypoints.getSize(2) ; xyscore++)
valueToPrint += std::to_string( poseKeypoints[{person, bodyPart, xyscore}] ) + " ";
op::log(valueToPrint);
}
}
op::log(" ");
// Alternative: just getting std::string equivalent
op::log("Face keypoints: " + datumsPtr->at(0).faceKeypoints.toString());
op::log("Left hand keypoints: " + datumsPtr->at(0).handKeypoints[0].toString());
op::log("Right hand keypoints: " + datumsPtr->at(0).handKeypoints[1].toString());
// Heatmaps
const auto& poseHeatMaps = datumsPtr->at(0).poseHeatMaps;
if (!poseHeatMaps.empty())
{
op::log("Pose heatmaps size: [" + std::to_string(poseHeatMaps.getSize(0)) + ", "
+ std::to_string(poseHeatMaps.getSize(1)) + ", "
+ std::to_string(poseHeatMaps.getSize(2)) + "]");
const auto& faceHeatMaps = datumsPtr->at(0).faceHeatMaps;
op::log("Face heatmaps size: [" + std::to_string(faceHeatMaps.getSize(0)) + ", "
+ std::to_string(faceHeatMaps.getSize(1)) + ", "
+ std::to_string(faceHeatMaps.getSize(2)) + ", "
+ std::to_string(faceHeatMaps.getSize(3)) + "]");
const auto& handHeatMaps = datumsPtr->at(0).handHeatMaps;
op::log("Left hand heatmaps size: [" + std::to_string(handHeatMaps[0].getSize(0)) + ", "
+ std::to_string(handHeatMaps[0].getSize(1)) + ", "
+ std::to_string(handHeatMaps[0].getSize(2)) + ", "
+ std::to_string(handHeatMaps[0].getSize(3)) + "]");
op::log("Right hand heatmaps size: [" + std::to_string(handHeatMaps[1].getSize(0)) + ", "
+ std::to_string(handHeatMaps[1].getSize(1)) + ", "
+ std::to_string(handHeatMaps[1].getSize(2)) + ", "
+ std::to_string(handHeatMaps[1].getSize(3)) + "]");
}
}
else
op::log("Nullptr or empty datumsPtr found.", op::Priority::High, __LINE__, __FUNCTION__, __FILE__);
}
};
int openPoseTutorialWrapper3()
{
try
{
op::log("Starting OpenPose demo...", op::Priority::High);
const auto timerBegin = std::chrono::high_resolution_clock::now();
// logging_level
op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.",
__LINE__, __FUNCTION__, __FILE__);
op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level);
op::Profiler::setDefaultX(FLAGS_profile_speed);
// Applying user defined configuration - Google flags to program variables
// outputSize
const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1");
// netInputSize
const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368");
// faceNetInputSize
const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)");
// handNetInputSize
const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)");
// poseModel
const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose);
// JSON saving
if (!FLAGS_write_keypoint.empty())
op::log("Flag `write_keypoint` is deprecated and will eventually be removed."
" Please, use `write_json` instead.", op::Priority::Max);
// keypointScale
const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale);
// heatmaps to add
const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg,
FLAGS_heatmaps_add_PAFs);
const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale);
// >1 camera view?
const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1);
// Enabling Google Logging
const bool enableGoogleLogging = true;
// Logging
op::log("", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__);
// Configure OpenPose
op::log("Configuring OpenPose wrapper...", op::Priority::Low, __LINE__, __FUNCTION__, __FILE__);
op::Wrapper<std::vector<UserDatum>> opWrapper{op::ThreadManagerMode::Asynchronous};
// Pose configuration (use WrapperStructPose{} for default and recommended configuration)
const op::WrapperStructPose wrapperStructPose{
!FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start,
FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView),
poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap,
FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates,
(float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging};
// Face configuration (use op::WrapperStructFace{} to disable it)
const op::WrapperStructFace wrapperStructFace{
FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose),
(float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold};
// Hand configuration (use op::WrapperStructHand{} to disable it)
const op::WrapperStructHand wrapperStructHand{
FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking,
op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose,
(float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold};
// Extra functionality configuration (use op::WrapperStructExtra{} to disable it)
const op::WrapperStructExtra wrapperStructExtra{
FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads};
// Consumer (comment or use default argument to disable any output)
const auto displayMode = op::DisplayMode::NoDisplay;
const bool guiVerbose = false;
const bool fullScreen = false;
const op::WrapperStructOutput wrapperStructOutput{
displayMode, guiVerbose, fullScreen, FLAGS_write_keypoint,
op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json, FLAGS_write_coco_json,
FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format, FLAGS_write_video,
FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format, FLAGS_write_video_adam,
FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port};
// Configure wrapper
opWrapper.configure(wrapperStructPose, wrapperStructFace, wrapperStructHand, wrapperStructExtra,
op::WrapperStructInput{}, wrapperStructOutput);
// Set to single-thread running (to debug and/or reduce latency)
if (FLAGS_disable_multi_thread)
opWrapper.disableMultiThreading();
op::log("Starting thread(s)...", op::Priority::High);
opWrapper.start();
// User processing
UserInputClass userInputClass(FLAGS_image_dir);
UserOutputClass userOutputClass;
bool userWantsToExit = false;
while (!userWantsToExit && !userInputClass.isFinished())
{
// Push frame
auto datumToProcess = userInputClass.createDatum();
if (datumToProcess != nullptr)
{
auto successfullyEmplaced = opWrapper.waitAndEmplace(datumToProcess);
// Pop frame
std::shared_ptr<std::vector<UserDatum>> datumProcessed;
if (successfullyEmplaced && opWrapper.waitAndPop(datumProcessed))
{
userWantsToExit = userOutputClass.display(datumProcessed);
userOutputClass.printKeypoints(datumProcessed);
}
else
op::log("Processed datum could not be emplaced.", op::Priority::High,
__LINE__, __FUNCTION__, __FILE__);
}
}
op::log("Stopping thread(s)", op::Priority::High);
opWrapper.stop();
// Measuring total time
const auto now = std::chrono::high_resolution_clock::now();
const auto totalTimeSec = (double)std::chrono::duration_cast<std::chrono::nanoseconds>(now-timerBegin).count()
* 1e-9;
const auto message = "OpenPose demo successfully finished. Total time: "
+ std::to_string(totalTimeSec) + " seconds.";
op::log(message, op::Priority::High);
// Return successful message
return 0;
}
catch (const std::exception& e)
{
op::error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return -1;
}
}
int main(int argc, char *argv[])
{
// Parsing command line flags
gflags::ParseCommandLineFlags(&argc, &argv, true);
// Running openPoseTutorialWrapper3
return openPoseTutorialWrapper3();
}