020004bb2263322e3660f9ebf05274ec66402e03
1 /******************************************************************************
2 * Copyright (c) 2018, Texas Instruments Incorporated - http://www.ti.com/
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 * * Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 * * Neither the name of Texas Instruments Incorporated nor the
13 * names of its contributors may be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
20 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
26 * THE POSSIBILITY OF SUCH DAMAGE.
27 *****************************************************************************/
28 #include <signal.h>
29 #include <getopt.h>
30 #include <iostream>
31 #include <iomanip>
32 #include <fstream>
33 #include <cassert>
34 #include <string>
35 #include <functional>
36 #include <queue>
37 #include <algorithm>
38 #include <time.h>
39 #include <memory.h>
40 #include <string.h>
42 #include "executor.h"
43 #include "execution_object.h"
44 #include "execution_object_pipeline.h"
45 #include "configuration.h"
46 #include "avg_fps_window.h"
47 #include "imgutil.h"
49 #include "opencv2/core.hpp"
50 #include "opencv2/imgproc.hpp"
51 #include "opencv2/highgui.hpp"
52 #include "opencv2/videoio.hpp"
55 //#define TWO_ROIs
56 #define LIVE_DISPLAY
57 #define PERF_VERBOSE
58 //#define RMT_GST_STREAMER
60 #define MAX_NUM_ROI 4
62 int live_input = 1;
63 char video_clip[320];
65 #ifdef TWO_ROIs
66 #define RES_X 400
67 #define RES_Y 300
68 #define NUM_ROI_X 2
69 #define NUM_ROI_Y 1
70 #define X_OFFSET 0
71 #define X_STEP 176
72 #define Y_OFFSET 52
73 #define Y_STEP 224
74 #else
75 #define RES_X 480
76 #define RES_Y 480
77 #define NUM_ROI_X 1
78 #define NUM_ROI_Y 1
79 #define X_OFFSET 10
80 #define X_STEP 460
81 #define Y_OFFSET 10
82 #define Y_STEP 460
83 #endif
85 #define NUM_ROI (NUM_ROI_X * NUM_ROI_Y)
87 //Temporal averaging
88 int TOP_CANDIDATES = 3;
90 using namespace tidl;
91 using namespace cv;
93 #ifdef LIVE_DISPLAY
94 char imagenet_win[160];
95 char tmp_classwindow_string[160];
96 Mat classlist_image;
98 void imagenetCallBackFunc(int event, int x, int y, int flags, void* userdata)
99 {
100 if ( event == EVENT_RBUTTONDOWN )
101 {
102 std::cout << "Right button of the mouse is clicked - position (" << x << ", " << y << ")" << " ... prepare to exit!" << std::endl;
103 exit(0);
104 }
105 }
106 #endif
108 Mat in_image, image, r_image, cnn_image, show_image, bgr_frames[3];
109 Mat to_stream;
110 Rect rectCrop[NUM_ROI];
111 // Report average FPS across a sliding window of 16 frames
112 AvgFPSWindow fps_window(16);
114 static int tf_postprocess(uchar *in, int size, int roi_idx, int frame_idx, int f_id);
115 static int ShowRegion(int roi_history[]);
116 // from most recent to oldest at top indices
117 static int selclass_history[MAX_NUM_ROI][3];
119 bool RunConfiguration(const std::string& config_file, int num_layers_groups,
120 uint32_t num_dsps, uint32_t num_eves);
121 bool CreateExecutionObjectPipelines(uint32_t num_eves, uint32_t num_dsps,
122 Configuration& configuration,
123 uint32_t num_layers_groups,
124 Executor*& e_eve, Executor*& e_dsp,
125 std::vector<ExecutionObjectPipeline*>& eops);
126 void AllocateMemory(const std::vector<ExecutionObjectPipeline*>& eops);
127 void SetupLiveDisplay(uint32_t num_eves, uint32_t num_dsps);
128 bool SetupInput(VideoCapture& cap, VideoWriter& writer);
129 bool ReadFrame(ExecutionObjectPipeline* eop, const Configuration& c,
130 int frame_idx, VideoCapture &cap, VideoWriter& writer);
131 void DisplayFrame(const ExecutionObjectPipeline* eop, VideoWriter& writer,
132 uint32_t frame_idx, uint32_t num_eops,
133 uint32_t num_eves, uint32_t num_dsps);
134 static void ProcessArgs(int argc, char *argv[],
135 std::string& config_file,
136 uint32_t & num_dsps, uint32_t &num_eves,
137 int & num_layers_groups);
139 static void DisplayHelp();
140 extern std::string labels_classes[];
141 extern int IMAGE_CLASSES_NUM;
142 extern int selected_items_size;
143 extern int selected_items[];
144 extern int populate_selected_items (char *filename);
145 extern void populate_labels (char *filename);
147 bool verbose = false;
149 int main(int argc, char *argv[])
150 {
151 // Catch ctrl-c to ensure a clean exit
152 signal(SIGABRT, exit);
153 signal(SIGTERM, exit);
155 // If there are no devices capable of offloading TIDL on the SoC, exit
156 uint32_t num_eves = Executor::GetNumDevices(DeviceType::EVE);
157 uint32_t num_dsps = Executor::GetNumDevices(DeviceType::DSP);
158 int num_layers_groups = 1;
160 if (num_eves == 0 && num_dsps == 0)
161 {
162 std::cout << "TI DL not supported on this SoC." << std::endl;
163 return EXIT_SUCCESS;
164 }
166 // Process arguments
167 std::string config_file;
168 ProcessArgs(argc, argv, config_file, num_dsps, num_eves, num_layers_groups);
170 bool status = false;
171 if (!config_file.empty()) {
172 std::cout << "Run single configuration: " << config_file << std::endl;
173 status = RunConfiguration(config_file, num_layers_groups, num_dsps, num_eves);
174 }
176 if (!status)
177 {
178 std::cout << "tidl FAILED" << std::endl;
179 return EXIT_FAILURE;
180 }
182 std::cout << "tidl PASSED" << std::endl;
183 return EXIT_SUCCESS;
184 }
186 bool RunConfiguration(const std::string& config_file, int num_layers_groups, uint32_t num_dsps, uint32_t num_eves)
187 {
189 // Read the TI DL configuration file
190 Configuration configuration;
191 if (!configuration.ReadFromFile(config_file))
192 return false;
194 if (verbose)
195 configuration.enableApiTrace = true;
197 if (num_layers_groups == 1)
198 configuration.runFullNet = true; //Force all layers to be in the same group
200 try
201 {
202 // Create ExecutionObjectPipelines
203 Executor *e_eve = nullptr;
204 Executor *e_dsp = nullptr;
205 std::vector<ExecutionObjectPipeline *> eops;
206 if (! CreateExecutionObjectPipelines(num_eves, num_dsps, configuration,
207 num_layers_groups, e_eve, e_dsp, eops))
208 return false;
210 // Allocate input/output memory for each EOP
211 AllocateMemory(eops);
213 // Setup Live Display
214 SetupLiveDisplay(num_eves, num_dsps);
216 // Setup Input
217 VideoCapture cap;
218 VideoWriter writer; // gstreamer
219 if (! SetupInput(cap, writer)) return false;
221 // More initialization
222 for (int k = 0; k < NUM_ROI; k++)
223 for(int i = 0; i < 3; i ++)
224 selclass_history[k][i] = -1;
225 std::cout << "About to start ProcessFrame loop!!" << std::endl;
227 // Process frames with available EOPs in a pipelined manner
228 // additional num_eops iterations to flush the pipeline (epilogue)
229 int num_eops = eops.size();
230 for (int frame_idx = 0;
231 frame_idx < configuration.numFrames + num_eops; frame_idx++)
232 {
233 ExecutionObjectPipeline* eop = eops[frame_idx % num_eops];
235 // Wait for previous frame on the same eo to finish processing
236 if (eop->ProcessFrameWait())
237 {
238 DisplayFrame(eop, writer, frame_idx, num_eops,
239 num_eves, num_dsps);
240 }
241 fps_window.Tick();
243 if (ReadFrame(eop, configuration, frame_idx, cap, writer))
244 eop->ProcessFrameStartAsync();
245 }
247 // Cleanup
248 for (auto eop : eops)
249 {
250 free(eop->GetInputBufferPtr());
251 free(eop->GetOutputBufferPtr());
252 delete eop;
253 }
254 if (e_dsp) delete e_dsp;
255 if (e_eve) delete e_eve;
256 }
257 catch (tidl::Exception &e)
258 {
259 std::cerr << e.what() << std::endl;
260 return false;
261 }
263 return true;
264 }
267 bool CreateExecutionObjectPipelines(uint32_t num_eves, uint32_t num_dsps,
268 Configuration& configuration,
269 uint32_t num_layers_groups,
270 Executor*& e_eve, Executor*& e_dsp,
271 std::vector<ExecutionObjectPipeline*>& eops)
272 {
273 DeviceIds ids_eve, ids_dsp;
274 for (uint32_t i = 0; i < num_eves; i++)
275 ids_eve.insert(static_cast<DeviceId>(i));
276 for (uint32_t i = 0; i < num_dsps; i++)
277 ids_dsp.insert(static_cast<DeviceId>(i));
278 const uint32_t buffer_factor = 2;
280 switch(num_layers_groups)
281 {
282 case 1: // Single layers group
283 e_eve = num_eves == 0 ? nullptr :
284 new Executor(DeviceType::EVE, ids_eve, configuration);
285 e_dsp = num_dsps == 0 ? nullptr :
286 new Executor(DeviceType::DSP, ids_dsp, configuration);
288 // Construct ExecutionObjectPipeline with single Execution Object to
289 // process each frame. This is parallel processing of frames with
290 // as many DSP and EVE cores that we have on hand.
291 // If buffer_factor == 2, duplicating EOPs for double buffering
292 // and overlapping host pre/post-processing with device processing
293 for (uint32_t j = 0; j < buffer_factor; j++)
294 {
295 for (uint32_t i = 0; i < num_eves; i++)
296 eops.push_back(new ExecutionObjectPipeline({(*e_eve)[i]}));
297 for (uint32_t i = 0; i < num_dsps; i++)
298 eops.push_back(new ExecutionObjectPipeline({(*e_dsp)[i]}));
299 }
300 break;
302 case 2: // Two layers group
303 // Create Executors with the approriate core type, number of cores
304 // and configuration specified
305 // EVE will run layersGroupId 1 in the network, while
306 // DSP will run layersGroupId 2 in the network
307 e_eve = num_eves == 0 ? nullptr :
308 new Executor(DeviceType::EVE, ids_eve, configuration, 1);
309 e_dsp = num_dsps == 0 ? nullptr :
310 new Executor(DeviceType::DSP, ids_dsp, configuration, 2);
312 // Construct ExecutionObjectPipeline that utilizes multiple
313 // ExecutionObjects to process a single frame, each ExecutionObject
314 // processes one layerGroup of the network
315 // If buffer_factor == 2, duplicating EOPs for pipelining at
316 // EO level rather than at EOP level, in addition to double buffering
317 // and overlapping host pre/post-processing with device processing
318 for (uint32_t j = 0; j < buffer_factor; j++)
319 {
320 for (uint32_t i = 0; i < std::max(num_eves, num_dsps); i++)
321 eops.push_back(new ExecutionObjectPipeline(
322 {(*e_eve)[i%num_eves], (*e_dsp)[i%num_dsps]}));
323 }
324 break;
326 default:
327 std::cout << "Layers groups can be either 1 or 2!" << std::endl;
328 return false;
329 break;
330 }
332 return true;
333 }
335 void AllocateMemory(const std::vector<ExecutionObjectPipeline*>& eops)
336 {
337 for (auto eop : eops)
338 {
339 size_t in_size = eop->GetInputBufferSizeInBytes();
340 size_t out_size = eop->GetOutputBufferSizeInBytes();
341 void* in_ptr = malloc(in_size);
342 void* out_ptr = malloc(out_size);
343 assert(in_ptr != nullptr && out_ptr != nullptr);
345 ArgInfo in(in_ptr, in_size);
346 ArgInfo out(out_ptr, out_size);
347 eop->SetInputOutputBuffer(in, out);
348 }
349 }
351 void SetupLiveDisplay(uint32_t num_eves, uint32_t num_dsps)
352 {
353 #ifdef LIVE_DISPLAY
354 sprintf(imagenet_win, "Imagenet_EVEx%d_DSPx%d", num_eves, num_dsps);
356 if(NUM_ROI > 1)
357 {
358 for(int i = 0; i < NUM_ROI; i ++) {
359 char tmp_string[80];
360 sprintf(tmp_string, "ROI[%02d]", i);
361 namedWindow(tmp_string, WINDOW_AUTOSIZE | CV_GUI_NORMAL);
362 }
363 }
364 Mat sw_stack_image = imread(
365 "/usr/share/ti/tidl/examples/classification/tidl-sw-stack-small.png",
366 IMREAD_COLOR); // Read the file
367 if( sw_stack_image.empty() ) // Check for invalid input
368 {
369 std::cout << "Could not open or find the tidl-sw-stack-small image"
370 << std::endl ;
371 } else {
372 // Create a window for display.
373 namedWindow( "TIDL SW Stack", WINDOW_AUTOSIZE | CV_GUI_NORMAL );
374 // Show our image inside it.
375 cv::imshow( "TIDL SW Stack", sw_stack_image );
376 }
378 namedWindow("ClassList", WINDOW_AUTOSIZE | CV_GUI_NORMAL);
379 namedWindow(imagenet_win, WINDOW_AUTOSIZE | CV_GUI_NORMAL);
380 //set the callback function for any mouse event
381 setMouseCallback(imagenet_win, imagenetCallBackFunc, NULL);
383 classlist_image = cv::Mat::zeros(40 + selected_items_size * 20, 220,
384 CV_8UC3);
385 //Erase window
386 classlist_image.setTo(Scalar::all(0));
388 for (int i = 0; i < selected_items_size; i ++)
389 {
390 sprintf(tmp_classwindow_string, "%2d) %12s", 1+i,
391 labels_classes[selected_items[i]].c_str());
392 cv::putText(classlist_image, tmp_classwindow_string,
393 cv::Point(5, 40 + i * 20),
394 cv::FONT_HERSHEY_COMPLEX_SMALL,
395 0.75,
396 cv::Scalar(255,255,255), 1, 8);
397 }
398 cv::imshow("ClassList", classlist_image);
399 #endif
400 }
402 bool SetupInput(VideoCapture& cap, VideoWriter& writer)
403 {
404 if(live_input >= 0)
405 {
406 cap.open(live_input);
408 const double fps = cap.get(CAP_PROP_FPS);
409 const int width = cap.get(CAP_PROP_FRAME_WIDTH);
410 const int height = cap.get(CAP_PROP_FRAME_HEIGHT);
411 std::cout << "Capture camera with " << fps << " fps, " << width << "x"
412 << height << " px" << std::endl;
414 #ifdef RMT_GST_STREAMER
415 writer.open(" appsrc ! videoconvert ! video/x-raw, format=(string)NV12, width=(int)640, height=(int)480, framerate=(fraction)30/1 ! \
416 ducatih264enc bitrate=2000 ! queue ! h264parse config-interval=1 ! \
417 mpegtsmux ! udpsink host=192.168.1.2 sync=false port=5000",
418 0,fps,Size(640,480),true);
420 if (!writer.isOpened()) {
421 cap.release();
422 std::cerr << "Can't create gstreamer writer. "
423 << "Do you have the correct version installed?" << std::endl;
424 std::cerr << "Print out OpenCV build information" << std::endl;
425 std::cout << getBuildInformation() << std::endl;
426 return false;
427 }
428 #endif
429 } else {
430 std::cout << "Video input clip: " << video_clip << std::endl;
431 cap.open(std::string(video_clip));
432 const double fps = cap.get(CAP_PROP_FPS);
433 const int width = cap.get(CAP_PROP_FRAME_WIDTH);
434 const int height = cap.get(CAP_PROP_FRAME_HEIGHT);
435 std::cout << "Clip with " << fps << " fps, " << width << "x"
436 << height << " px" << std::endl;
437 }
439 if (!cap.isOpened()) {
440 std::cout << "Video input not opened!" << std::endl;
441 return false;
442 }
444 for (int y = 0; y < NUM_ROI_Y; y ++) {
445 for (int x = 0; x < NUM_ROI_X; x ++) {
446 rectCrop[y * NUM_ROI_X + x] = Rect(X_OFFSET + x * X_STEP,
447 Y_OFFSET + y * Y_STEP, X_STEP, Y_STEP);
448 std::cout << "Rect[" << X_OFFSET + x * X_STEP << ", "
449 << Y_OFFSET + y * Y_STEP << "]" << std::endl;
450 }
451 }
453 return true;
454 }
456 bool ReadFrame(ExecutionObjectPipeline* eop, const Configuration& c,
457 int frame_idx, VideoCapture &cap, VideoWriter& writer)
458 {
460 if (cap.grab() && frame_idx < c.numFrames)
461 {
462 if (cap.retrieve(in_image))
463 {
464 if(live_input >= 0)
465 { //Crop central square portion
466 int loc_xmin = (in_image.size().width - in_image.size().height) / 2; //Central position
467 int loc_ymin = 0;
468 int loc_w = in_image.size().height;
469 int loc_h = in_image.size().height;
471 cv::resize(in_image(Rect(loc_xmin, loc_ymin, loc_w, loc_h)), image, Size(RES_X, RES_Y));
472 } else {
473 if((in_image.size().width != RES_X) || (in_image.size().height != RES_Y))
474 {
475 cv::resize(in_image, image, Size(RES_X,RES_Y));
476 }
477 }
479 r_image = Mat(image, rectCrop[frame_idx % NUM_ROI]);
481 #ifdef LIVE_DISPLAY
482 if(NUM_ROI > 1)
483 {
484 char tmp_string[80];
485 sprintf(tmp_string, "ROI[%02d]", frame_idx % NUM_ROI);
486 cv::imshow(tmp_string, r_image);
487 }
488 #endif
489 imgutil::PreprocessImage(r_image, eop->GetInputBufferPtr(), c);
490 eop->SetFrameIndex(frame_idx);
492 #ifdef RMT_GST_STREAMER
493 cv::resize(Mat(image, Rect(0,32,640,448)), to_stream,
494 Size(640,480));
495 writer << to_stream;
496 #endif
498 #ifdef LIVE_DISPLAY
499 //waitKey(2);
500 image.copyTo(show_image);
501 #endif
502 return true;
503 }
504 } else {
505 if(live_input == -1) {
506 //Rewind!
507 cap.set(CAP_PROP_POS_FRAMES, 0);
508 }
509 }
511 return false;
512 }
515 void DisplayFrame(const ExecutionObjectPipeline* eop, VideoWriter& writer,
516 uint32_t frame_idx, uint32_t num_eops,
517 uint32_t num_eves, uint32_t num_dsps)
518 {
519 int f_id = eop->GetFrameIndex();
520 int curr_roi = f_id % NUM_ROI;
521 int is_object = tf_postprocess((uchar*) eop->GetOutputBufferPtr(),
522 IMAGE_CLASSES_NUM, curr_roi, frame_idx, f_id);
523 selclass_history[curr_roi][2] = selclass_history[curr_roi][1];
524 selclass_history[curr_roi][1] = selclass_history[curr_roi][0];
525 selclass_history[curr_roi][0] = is_object;
526 for (int r = 0; r < NUM_ROI; r ++)
527 {
528 int rpt_id = ShowRegion(selclass_history[r]);
529 if(rpt_id >= 0)
530 {
531 // overlay the display window, if ball seen during last two times
532 cv::putText(show_image, labels_classes[rpt_id].c_str(),
533 cv::Point(rectCrop[r].x + 5,rectCrop[r].y + 32), // Coordinates
534 cv::FONT_HERSHEY_COMPLEX_SMALL, // Font
535 1.5, // Scale. 2.0 = 2x bigger
536 cv::Scalar(0,0,0), // Color
537 1, // Thickness
538 8); // Line type
539 cv::rectangle(show_image, rectCrop[r], Scalar(255,0,0), 3);
540 std::cout << "ROI(" << r << ")(" << rpt_id << ")="
541 << labels_classes[rpt_id].c_str() << std::endl;
543 classlist_image.setTo(Scalar::all(0));
544 for (int k = 0; k < selected_items_size; k ++)
545 {
546 sprintf(tmp_classwindow_string, "%2d) %12s", 1+k,
547 labels_classes[selected_items[k]].c_str());
548 cv::putText(classlist_image, tmp_classwindow_string,
549 cv::Point(5, 40 + k * 20),
550 cv::FONT_HERSHEY_COMPLEX_SMALL,
551 0.75,
552 selected_items[k] == rpt_id ? cv::Scalar(0,0,255) :
553 cv::Scalar(255,255,255), 1, 8);
554 }
556 double avg_fps = fps_window.UpdateAvgFPS();
557 sprintf(tmp_classwindow_string, "FPS:%5.2lf", avg_fps );
559 #ifdef PERF_VERBOSE
560 std::cout << "Device:" << eop->GetDeviceName() << " eops("
561 << num_eops << "), EVES(" << num_eves << ") DSPS("
562 << num_dsps << ") FPS:" << avg_fps << std::endl;
563 #endif
564 cv::putText(classlist_image, tmp_classwindow_string,
565 cv::Point(5, 20),
566 cv::FONT_HERSHEY_COMPLEX_SMALL,
567 0.75,
568 cv::Scalar(0,255,0), 1, 8);
569 cv::imshow("ClassList", classlist_image);
570 }
571 }
573 #ifdef LIVE_DISPLAY
574 cv::imshow(imagenet_win, show_image);
575 #endif
577 #ifdef RMT_GST_STREAMER
578 cv::resize(show_image, to_stream, cv::Size(640,480));
579 writer << to_stream;
580 #endif
582 #ifdef LIVE_DISPLAY
583 waitKey(2);
584 #endif
585 }
587 // Function to process all command line arguments
588 void ProcessArgs(int argc, char *argv[], std::string& config_file,
589 uint32_t & num_dsps, uint32_t & num_eves, int & num_layers_groups )
590 {
591 const struct option long_options[] =
592 {
593 {"labels_classes_file", required_argument, 0, 'l'},
594 {"selected_classes_file", required_argument, 0, 's'},
595 {"config_file", required_argument, 0, 'c'},
596 {"num_dsps", required_argument, 0, 'd'},
597 {"num_eves", required_argument, 0, 'e'},
598 {"num_layers_groups", required_argument, 0, 'g'},
599 {"help", no_argument, 0, 'h'},
600 {"verbose", no_argument, 0, 'v'},
601 {0, 0, 0, 0}
602 };
604 int option_index = 0;
606 while (true)
607 {
608 int c = getopt_long(argc, argv, "l:c:s:i:d:e:g:hv", long_options, &option_index);
610 if (c == -1)
611 break;
613 switch (c)
614 {
615 case 'l': populate_labels(optarg);
616 break;
618 case 's': populate_selected_items(optarg);
619 break;
621 case 'i': if(strlen(optarg) == 1)
622 {
623 live_input = atoi(optarg);
624 } else {
625 live_input = -1;
626 strcpy(video_clip, optarg);
627 }
628 break;
630 case 'c': config_file = optarg;
631 break;
633 case 'g': num_layers_groups = atoi(optarg);
634 assert(num_layers_groups >= 1 && num_layers_groups <= 2);
635 break;
637 case 'd': num_dsps = atoi(optarg);
638 assert (num_dsps >= 0 && num_dsps <= 2);
639 break;
641 case 'e': num_eves = atoi(optarg);
642 assert (num_eves >= 0 && num_eves <= 4);
643 break;
645 case 'v': verbose = true;
646 break;
648 case 'h': DisplayHelp();
649 exit(EXIT_SUCCESS);
650 break;
652 case '?': // Error in getopt_long
653 exit(EXIT_FAILURE);
654 break;
656 default:
657 std::cerr << "Unsupported option: " << c << std::endl;
658 break;
659 }
660 }
662 // if no eves available, we can only run full net as one layer group
663 if (num_eves == 0) num_layers_groups = 1;
664 }
666 void DisplayHelp()
667 {
668 std::cout << "Usage: tidl_classification\n"
669 " Will run all available networks if tidl is invoked without"
670 " any arguments.\n Use -c to run a single network.\n"
671 "Optional arguments:\n"
672 " -c Path to the configuration file\n"
673 " -d <number of DSP cores> Number of DSP cores to use (0 - 2)\n"
674 " -e <number of EVE cores> Number of EVE cores to use (0 - 2)\n"
675 " -g <1|2> Number of layer groups\n"
676 " -l List of label strings (of all classes in model)\n"
677 " -s List of strings with selected classes\n"
678 " -i Video input (for camera:0,1 or video clip)\n"
679 " -v Verbose output during execution\n"
680 " -h Help\n";
681 }
683 // Function to filter all the reported decisions
684 bool tf_expected_id(int id)
685 {
686 // Filter out unexpected IDs
687 for (int i = 0; i < selected_items_size; i ++)
688 {
689 if(id == selected_items[i]) return true;
690 }
691 return false;
692 }
694 int tf_postprocess(uchar *in, int size, int roi_idx, int frame_idx, int f_id)
695 {
696 //prob_i = exp(TIDL_Lib_output_i) / sum(exp(TIDL_Lib_output))
697 // sort and get k largest values and corresponding indices
698 const int k = TOP_CANDIDATES;
699 int rpt_id = -1;
701 typedef std::pair<uchar, int> val_index;
702 auto constexpr cmp = [](val_index &left, val_index &right) { return left.first > right.first; };
703 std::priority_queue<val_index, std::vector<val_index>, decltype(cmp)> queue(cmp);
704 // initialize priority queue with smallest value on top
705 for (int i = 0; i < k; i++) {
706 queue.push(val_index(in[i], i));
707 }
708 // for rest input, if larger than current minimum, pop mininum, push new val
709 for (int i = k; i < size; i++)
710 {
711 if (in[i] > queue.top().first)
712 {
713 queue.pop();
714 queue.push(val_index(in[i], i));
715 }
716 }
718 // output top k values in reverse order: largest val first
719 std::vector<val_index> sorted;
720 while (! queue.empty())
721 {
722 sorted.push_back(queue.top());
723 queue.pop();
724 }
726 for (int i = 0; i < k; i++)
727 {
728 int id = sorted[i].second;
730 if (tf_expected_id(id))
731 {
732 std::cout << "Frame:" << frame_idx << "," << f_id << " ROI[" << roi_idx << "]: rank="
733 << k-i << ", outval=" << (float)sorted[i].first / 255 << ", "
734 << labels_classes[sorted[i].second] << std::endl;
735 rpt_id = id;
736 }
737 }
738 return rpt_id;
739 }
741 int ShowRegion(int roi_history[])
742 {
743 if((roi_history[0] >= 0) && (roi_history[0] == roi_history[1])) return roi_history[0];
744 if((roi_history[0] >= 0) && (roi_history[0] == roi_history[2])) return roi_history[0];
745 if((roi_history[1] >= 0) && (roi_history[1] == roi_history[2])) return roi_history[1];
746 return -1;
747 }