75fc89a1daf382252993b07616e1df8d0fff1a82
1 /******************************************************************************
2 * Copyright (c) 2018, Texas Instruments Incorporated - http://www.ti.com/
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 * * Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 * * Neither the name of Texas Instruments Incorporated nor the
13 * names of its contributors may be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
20 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
26 * THE POSSIBILITY OF SUCH DAMAGE.
27 *****************************************************************************/
28 #include <signal.h>
29 #include <getopt.h>
30 #include <iostream>
31 #include <iomanip>
32 #include <fstream>
33 #include <cassert>
34 #include <string>
35 #include <functional>
36 #include <queue>
37 #include <algorithm>
38 #include <time.h>
39 #include <memory.h>
40 #include <string.h>
42 #include "executor.h"
43 #include "execution_object.h"
44 #include "execution_object_pipeline.h"
45 #include "configuration.h"
47 #include "opencv2/core.hpp"
48 #include "opencv2/imgproc.hpp"
49 #include "opencv2/highgui.hpp"
50 #include "opencv2/videoio.hpp"
52 //#define TWO_ROIs
53 #define LIVE_DISPLAY
54 #define PERF_VERBOSE
55 //#define RMT_GST_STREAMER
57 #define MAX_NUM_ROI 4
59 int live_input = 1;
60 char video_clip[320];
62 #ifdef TWO_ROIs
63 #define RES_X 400
64 #define RES_Y 300
65 #define NUM_ROI_X 2
66 #define NUM_ROI_Y 1
67 #define X_OFFSET 0
68 #define X_STEP 176
69 #define Y_OFFSET 52
70 #define Y_STEP 224
71 #else
72 #define RES_X 244
73 #define RES_Y 244
74 #define NUM_ROI_X 1
75 #define NUM_ROI_Y 1
76 #define X_OFFSET 10
77 #define X_STEP 224
78 #define Y_OFFSET 10
79 #define Y_STEP 224
80 #endif
82 int NUM_ROI = NUM_ROI_X * NUM_ROI_Y;
84 //Temporal averaging
85 int TOP_CANDIDATES = 3;
87 using namespace tidl;
88 using namespace cv;
90 #ifdef LIVE_DISPLAY
91 void imagenetCallBackFunc(int event, int x, int y, int flags, void* userdata)
92 {
93 if ( event == EVENT_RBUTTONDOWN )
94 {
95 std::cout << "Right button of the mouse is clicked - position (" << x << ", " << y << ")" << " ... prepare to exit!" << std::endl;
96 exit(0);
97 }
98 }
99 #endif
101 static int tf_postprocess(uchar *in, int size, int roi_idx, int frame_idx, int f_id);
102 static void tf_preprocess(uchar *out, uchar *in, int size);
103 static int ShowRegion(int roi_history[]);
104 static int selclass_history[MAX_NUM_ROI][3]; // from most recent to oldest at top indices
106 bool __TI_show_debug_ = false;
108 bool RunConfiguration(const std::string& config_file, int num_layers_groups,
109 uint32_t num_dsps, uint32_t num_eves);
111 bool ReadFrame(ExecutionObjectPipeline& ep,
112 int frame_idx,
113 const Configuration& configuration,
114 std::istream& input_file);
116 static void ProcessArgs(int argc, char *argv[],
117 std::string& config_file,
118 uint32_t & num_dsps, uint32_t &num_eves,
119 int & num_layers_groups);
121 static void DisplayHelp();
122 extern std::string labels_classes[];
123 extern int IMAGE_CLASSES_NUM;
124 extern int selected_items_size;
125 extern int selected_items[];
126 extern int populate_selected_items (char *filename);
127 extern void populate_labels (char *filename);
129 void ReportTime(int frame_index, std::string device_name, double elapsed_host,
130 double elapsed_device)
131 {
132 double overhead = 100 - (elapsed_device/elapsed_host*100);
133 std::cout << "frame[" << frame_index << "]: "
134 << "Time on " << device_name << ": "
135 << std::setw(6) << std::setprecision(4)
136 << elapsed_device << "ms, "
137 << "host: "
138 << std::setw(6) << std::setprecision(4)
139 << elapsed_host << "ms ";
140 std::cout << "API overhead: "
141 << std::setw(6) << std::setprecision(3)
142 << overhead << " %" << std::endl;
143 }
145 int main(int argc, char *argv[])
146 {
147 // Catch ctrl-c to ensure a clean exit
148 signal(SIGABRT, exit);
149 signal(SIGTERM, exit);
151 // If there are no devices capable of offloading TIDL on the SoC, exit
152 uint32_t num_eves = Executor::GetNumDevices(DeviceType::EVE);
153 uint32_t num_dsps = Executor::GetNumDevices(DeviceType::DSP);
154 int num_layers_groups = 1;
156 if (num_eves == 0 && num_dsps == 0)
157 {
158 std::cout << "TI DL not supported on this SoC." << std::endl;
159 return EXIT_SUCCESS;
160 }
162 // Process arguments
163 std::string config_file;
164 ProcessArgs(argc, argv, config_file, num_dsps, num_eves, num_layers_groups);
166 bool status = true;
167 if (!config_file.empty()) {
168 std::cout << "Run single configuration: " << config_file << std::endl;
169 status = RunConfiguration(config_file, num_layers_groups, num_dsps, num_eves);
170 } else
171 {
172 status = false;
173 }
175 if (!status)
176 {
177 std::cout << "tidl FAILED" << std::endl;
178 return EXIT_FAILURE;
179 }
181 std::cout << "tidl PASSED" << std::endl;
182 return EXIT_SUCCESS;
183 }
185 bool RunConfiguration(const std::string& config_file, int num_layers_groups, uint32_t num_dsps, uint32_t num_eves)
186 {
187 char imagenet_win[160];
189 DeviceIds ids_eve, ids_dsp;
191 for (uint32_t i = 0; i < num_eves; i++)
192 ids_eve.insert(static_cast<DeviceId>(i));
193 for (uint32_t i = 0; i < num_dsps; i++)
194 ids_dsp.insert(static_cast<DeviceId>(i));
197 // Read the TI DL configuration file
198 Configuration configuration;
199 bool status = configuration.ReadFromFile(config_file);
200 if (!status)
201 {
202 std::cerr << "Error in configuration file: " << config_file
203 << std::endl;
204 return false;
205 }
207 std::ifstream input_data_file(configuration.inData, std::ios::binary);
208 std::ofstream output_data_file(configuration.outData, std::ios::binary);
209 assert (input_data_file.good());
210 assert (output_data_file.good());
212 sprintf(imagenet_win, "Imagenet_EVEx%d_DSPx%d", num_eves, num_dsps);
214 try
215 {
216 Executor *exe_eve = NULL;
217 Executor *exe_dsp = NULL;
218 int num_eps = 0;
219 std::vector<ExecutionObjectPipeline *> eps;
220 switch(num_layers_groups)
221 {
222 case 1: // Single layers group
223 exe_eve = num_eves > 0 ? new Executor(DeviceType::EVE, ids_eve, configuration) : NULL;
224 exe_dsp = num_dsps > 0 ? new Executor(DeviceType::DSP, ids_dsp, configuration) : NULL;
225 num_eps = num_eves + num_dsps;
227 // Construct ExecutionObjectPipeline with single Execution Object to
228 // process each frame. This is parallel processing of frames with as many
229 // DSP and EVE cores that we have on hand.
230 for (uint32_t i = 0; i < num_eves; i++)
231 eps.push_back(new ExecutionObjectPipeline({(*exe_eve)[i]}));
233 for (uint32_t i = 0; i < num_dsps; i++)
234 eps.push_back(new ExecutionObjectPipeline({(*exe_dsp)[i]}));
236 break;
238 case 2: // Two layers group
239 // JacintoNet11 specific : specify only layers that will be in layers group 2
240 // ...by default all other layers are in group 1.
241 configuration.layerIndex2LayerGroupId = { {12, 2}, {13, 2}, {14, 2} };
243 // Create a executor with the approriate core type, number of cores
244 // and configuration specified
245 // EVE will run layersGroupId 1 in the network, while
246 // DSP will run layersGroupId 2 in the network
247 exe_eve = num_eves > 0 ? new Executor(DeviceType::EVE, ids_eve, configuration, 1) : NULL;
248 exe_dsp = num_dsps > 0 ? new Executor(DeviceType::DSP, ids_dsp, configuration, 2) : NULL;
250 // Construct ExecutionObjectPipeline that utilizes multiple
251 // ExecutionObjects to process a single frame, each ExecutionObject
252 // processes one layerGroup of the network
253 num_eps = std::max(num_eves, num_dsps);
254 for (int i = 0; i < num_eps; i++)
255 eps.push_back(new ExecutionObjectPipeline({(*exe_eve)[i%num_eves],
256 (*exe_dsp)[i%num_dsps]}));
257 break;
259 default:
260 std::cout << "Layers groups can be either 1 or 2!" << std::endl;
261 return false;
262 }
263 // Allocate input/output memory for each EOP
264 std::vector<void *> buffers;
265 for (auto ep : eps)
266 {
267 size_t in_size = ep->GetInputBufferSizeInBytes();
268 size_t out_size = ep->GetOutputBufferSizeInBytes();
269 void* in_ptr = malloc(in_size);
270 void* out_ptr = malloc(out_size);
271 assert(in_ptr != nullptr && out_ptr != nullptr);
272 buffers.push_back(in_ptr);
273 buffers.push_back(out_ptr);
275 ArgInfo in(in_ptr, in_size);
276 ArgInfo out(out_ptr, out_size);
277 ep->SetInputOutputBuffer(in, out);
278 }
280 #ifdef LIVE_DISPLAY
281 if(NUM_ROI > 1)
282 {
283 for(int i = 0; i < NUM_ROI; i ++) {
284 char tmp_string[80];
285 sprintf(tmp_string, "ROI[%02d]", i);
286 namedWindow(tmp_string, WINDOW_AUTOSIZE | CV_GUI_NORMAL);
287 }
288 }
289 Mat sw_stack_image = imread("/usr/share/ti/tidl/examples/classification/tidl-sw-stack-small.png", IMREAD_COLOR); // Read the file
290 if( sw_stack_image.empty() ) // Check for invalid input
291 {
292 std::cout << "Could not open or find the tidl-sw-stack-small image" << std::endl ;
293 } else {
294 namedWindow( "TIDL SW Stack", WINDOW_AUTOSIZE | CV_GUI_NORMAL ); // Create a window for display.
295 cv::imshow( "TIDL SW Stack", sw_stack_image ); // Show our image inside it.
296 }
298 namedWindow("ClassList", WINDOW_AUTOSIZE | CV_GUI_NORMAL);
299 namedWindow(imagenet_win, WINDOW_AUTOSIZE | CV_GUI_NORMAL);
300 //set the callback function for any mouse event
301 setMouseCallback(imagenet_win, imagenetCallBackFunc, NULL);
303 Mat classlist_image = cv::Mat::zeros(40 + selected_items_size * 20, 220, CV_8UC3);
304 char tmp_classwindow_string[160];
305 //Erase window
306 classlist_image.setTo(Scalar::all(0));
308 for (int i = 0; i < selected_items_size; i ++)
309 {
310 sprintf(tmp_classwindow_string, "%2d) %12s", 1+i, labels_classes[selected_items[i]].c_str());
311 cv::putText(classlist_image, tmp_classwindow_string,
312 cv::Point(5, 40 + i * 20),
313 cv::FONT_HERSHEY_COMPLEX_SMALL,
314 0.75,
315 cv::Scalar(255,255,255), 1, 8);
316 }
317 cv::imshow("ClassList", classlist_image);
319 #endif
320 Mat r_frame, r_mframe, r_blend;
321 Mat to_stream;
322 VideoCapture cap;
323 double avg_fps = 0.0;
325 if(live_input >= 0)
326 {
327 cap.open(live_input);
328 VideoWriter writer; // gstreamer
330 const double fps = cap.get(CAP_PROP_FPS);
331 const int width = cap.get(CAP_PROP_FRAME_WIDTH);
332 const int height = cap.get(CAP_PROP_FRAME_HEIGHT);
333 std::cout << "Capture camera with " << fps << " fps, " << width << "x" << height << " px" << std::endl;
335 #ifdef RMT_GST_STREAMER
336 writer.open(" appsrc ! videoconvert ! video/x-raw, format=(string)NV12, width=(int)640, height=(int)480, framerate=(fraction)30/1 ! \
337 ducatih264enc bitrate=2000 ! queue ! h264parse config-interval=1 ! \
338 mpegtsmux ! udpsink host=192.168.1.2 sync=false port=5000",
339 0,fps,Size(640,480),true);
341 if (!writer.isOpened()) {
342 cap.release();
343 std::cerr << "Can't create gstreamer writer. Do you have the correct version installed?" << std::endl;
344 std::cerr << "Print out OpenCV build information" << std::endl;
345 std::cout << getBuildInformation() << std::endl;
346 return false;
347 }
348 #endif
349 } else {
350 std::cout << "Video input clip: " << video_clip << std::endl;
351 cap.open(std::string(video_clip));
352 const double fps = cap.get(CAP_PROP_FPS);
353 const int width = cap.get(CAP_PROP_FRAME_WIDTH);
354 const int height = cap.get(CAP_PROP_FRAME_HEIGHT);
355 std::cout << "Clip with " << fps << " fps, " << width << "x" << height << " px" << std::endl;
357 }
358 std::cout << "About to start ProcessFrame loop!!" << std::endl;
361 Rect rectCrop[NUM_ROI];
362 for (int y = 0; y < NUM_ROI_Y; y ++) {
363 for (int x = 0; x < NUM_ROI_X; x ++) {
364 rectCrop[y * NUM_ROI_X + x] = Rect(X_OFFSET + x * X_STEP, Y_OFFSET + y * Y_STEP, 224, 224);
365 std::cout << "Rect[" << X_OFFSET + x * X_STEP << ", " << Y_OFFSET + y * Y_STEP << "]" << std::endl;
366 }
367 }
368 int num_frames = 99999;
370 if (!cap.isOpened()) {
371 std::cout << "Video input not opened!" << std::endl;
372 return false;
373 }
374 Mat in_image, image, r_image, show_image, bgr_frames[3];
375 int is_object;
376 for(int k = 0; k < NUM_ROI; k++) {
377 for(int i = 0; i < 3; i ++) selclass_history[k][i] = -1;
378 }
380 // Process frames with available execution objects in a pipelined manner
381 // additional num_eps iterations to flush the pipeline (epilogue)
382 for (int frame_idx = 0; frame_idx < configuration.numFrames + num_eps; frame_idx++)
383 {
384 ExecutionObjectPipeline* ep = eps[frame_idx % num_eps];
386 // Wait for previous frame on the same eo to finish processing
387 if (ep->ProcessFrameWait())
388 {
389 double elapsed_host = ep->GetHostProcessTimeInMilliSeconds();
390 /* Exponential averaging */
391 avg_fps = 0.1 * ((double)num_eps * 1000.0 / ((double)NUM_ROI * elapsed_host)) + 0.9 * avg_fps;
392 #ifdef PERF_VERBOSE
393 ReportTime(ep->GetFrameIndex(), ep->GetDeviceName(),
394 ep->GetHostProcessTimeInMilliSeconds(),
395 ep->GetProcessTimeInMilliSeconds());
396 #endif
397 int f_id = ep->GetFrameIndex();
398 int curr_roi = f_id % NUM_ROI;
399 is_object = tf_postprocess((uchar*) ep->GetOutputBufferPtr(), IMAGE_CLASSES_NUM, curr_roi, frame_idx, f_id);
400 selclass_history[curr_roi][2] = selclass_history[curr_roi][1];
401 selclass_history[curr_roi][1] = selclass_history[curr_roi][0];
402 selclass_history[curr_roi][0] = is_object;
403 for (int r = 0; r < NUM_ROI; r ++)
404 {
405 int rpt_id = ShowRegion(selclass_history[r]);
406 if(rpt_id >= 0)
407 {
408 // overlay the display window, if ball seen during last two times
409 cv::putText(show_image, labels_classes[rpt_id].c_str(),
410 cv::Point(rectCrop[r].x + 5,rectCrop[r].y + 20), // Coordinates
411 cv::FONT_HERSHEY_COMPLEX_SMALL, // Font
412 1.0, // Scale. 2.0 = 2x bigger
413 cv::Scalar(0,0,255), // Color
414 1, // Thickness
415 8); // Line type
416 cv::rectangle(show_image, rectCrop[r], Scalar(255,0,0), 3);
417 std::cout << "ROI(" << r << ")(" << rpt_id << ")=" << labels_classes[rpt_id].c_str() << std::endl;
419 classlist_image.setTo(Scalar::all(0));
420 for (int k = 0; k < selected_items_size; k ++)
421 {
422 sprintf(tmp_classwindow_string, "%2d) %12s", 1+k, labels_classes[selected_items[k]].c_str());
423 cv::putText(classlist_image, tmp_classwindow_string,
424 cv::Point(5, 40 + k * 20),
425 cv::FONT_HERSHEY_COMPLEX_SMALL,
426 0.75,
427 selected_items[k] == rpt_id ? cv::Scalar(0,0,255) : cv::Scalar(255,255,255), 1, 8);
428 }
429 sprintf(tmp_classwindow_string, "FPS:%5.2lf", avg_fps );
431 #ifdef PERF_VERBOSE
432 std::cout << "Device:" << ep->GetDeviceName() << " eps(" << num_eps << "), EVES(" << num_eves <<
433 ") DSPS(" << num_dsps << ") FPS:" << avg_fps << std::endl;
434 #endif
435 cv::putText(classlist_image, tmp_classwindow_string,
436 cv::Point(5, 20),
437 cv::FONT_HERSHEY_COMPLEX_SMALL,
438 0.75,
439 cv::Scalar(0,255,0), 1, 8);
440 cv::imshow("ClassList", classlist_image);
441 }
442 }
443 #ifdef LIVE_DISPLAY
444 cv::imshow(imagenet_win, show_image);
445 #endif
447 #ifdef RMT_GST_STREAMER
448 cv::resize(show_image, to_stream, cv::Size(640,480));
449 writer << to_stream;
450 #endif
452 #ifdef LIVE_DISPLAY
453 waitKey(2);
454 #endif
455 }
457 if (cap.grab() && frame_idx < num_frames)
458 {
459 if (cap.retrieve(in_image))
460 {
461 cv::resize(in_image, image, Size(RES_X,RES_Y));
462 r_image = Mat(image, rectCrop[frame_idx % NUM_ROI]);
464 #ifdef LIVE_DISPLAY
465 if(NUM_ROI > 1)
466 {
467 char tmp_string[80];
468 sprintf(tmp_string, "ROI[%02d]", frame_idx % NUM_ROI);
469 cv::imshow(tmp_string, r_image);
470 }
471 #endif
472 //Convert from BGR pixel interleaved to BGR plane interleaved!
473 cv::split(r_image, bgr_frames);
474 tf_preprocess((uchar*) ep->GetInputBufferPtr(), bgr_frames[0].ptr(), 224*224);
475 tf_preprocess((uchar*) ep->GetInputBufferPtr()+224*224, bgr_frames[1].ptr(), 224*224);
476 tf_preprocess((uchar*) ep->GetInputBufferPtr()+2*224*224, bgr_frames[2].ptr(), 224*224);
477 ep->SetFrameIndex(frame_idx);
478 ep->ProcessFrameStartAsync();
480 #ifdef RMT_GST_STREAMER
481 cv::resize(Mat(image, Rect(0,32,640,448)), to_stream, Size(640,480));
482 writer << to_stream;
483 #endif
485 #ifdef LIVE_DISPLAY
486 //waitKey(2);
487 image.copyTo(show_image);
488 #endif
489 }
490 } else {
491 if(live_input == -1) {
492 //Rewind!
493 cap.release();
494 cap.open(std::string(video_clip));
495 }
496 }
497 }
498 for (auto ep : eps) delete ep;
499 for (auto b : buffers) free(b);
500 if(num_dsps) delete exe_dsp;
501 if(num_eves) delete exe_eve;
502 }
503 catch (tidl::Exception &e)
504 {
505 std::cerr << e.what() << std::endl;
506 status = false;
507 }
510 input_data_file.close();
511 output_data_file.close();
513 return status;
514 }
516 bool ReadFrame(ExecutionObjectPipeline &ep, int frame_idx,
517 const Configuration& configuration,
518 std::istream& input_file)
519 {
520 if (frame_idx >= configuration.numFrames)
521 return false;
523 char* frame_buffer = ep.GetInputBufferPtr();
524 assert (frame_buffer != nullptr);
526 memset (frame_buffer, 0, ep.GetInputBufferSizeInBytes());
527 input_file.read(frame_buffer, ep.GetInputBufferSizeInBytes() / (configuration.inNumChannels == 1 ? 2 : 1));
529 if (input_file.eof())
530 return false;
532 assert (input_file.good());
534 // Set the frame index being processed by the EO. This is used to
535 // sort the frames before they are output
536 ep.SetFrameIndex(frame_idx);
538 if (input_file.good())
539 return true;
541 return false;
542 }
544 // Function to process all command line arguments
545 void ProcessArgs(int argc, char *argv[], std::string& config_file,
546 uint32_t & num_dsps, uint32_t & num_eves, int & num_layers_groups )
547 {
548 const struct option long_options[] =
549 {
550 {"labels_classes_file", required_argument, 0, 'l'},
551 {"selected_classes_file", required_argument, 0, 's'},
552 {"config_file", required_argument, 0, 'c'},
553 {"num_dsps", required_argument, 0, 'd'},
554 {"num_eves", required_argument, 0, 'e'},
555 {"num_layers_groups", required_argument, 0, 'g'},
556 {"help", no_argument, 0, 'h'},
557 {"verbose", no_argument, 0, 'v'},
558 {0, 0, 0, 0}
559 };
561 int option_index = 0;
563 while (true)
564 {
565 int c = getopt_long(argc, argv, "l:c:s:i:d:e:g:hv", long_options, &option_index);
567 if (c == -1)
568 break;
570 switch (c)
571 {
572 case 'l': populate_labels(optarg);
573 break;
575 case 's': populate_selected_items(optarg);
576 break;
578 case 'i': if(strlen(optarg) == 1)
579 {
580 live_input = atoi(optarg);
581 } else {
582 live_input = -1;
583 strcpy(video_clip, optarg);
584 }
585 break;
587 case 'c': config_file = optarg;
588 break;
590 case 'g': num_layers_groups = atoi(optarg);
591 assert(num_layers_groups >= 1 && num_layers_groups <= 2);
592 break;
594 case 'd': num_dsps = atoi(optarg);
595 assert (num_dsps >= 0 && num_dsps <= 2);
596 break;
598 case 'e': num_eves = atoi(optarg);
599 assert (num_eves >= 0 && num_eves <= 2);
600 break;
602 case 'v': __TI_show_debug_ = true;
603 break;
605 case 'h': DisplayHelp();
606 exit(EXIT_SUCCESS);
607 break;
609 case '?': // Error in getopt_long
610 exit(EXIT_FAILURE);
611 break;
613 default:
614 std::cerr << "Unsupported option: " << c << std::endl;
615 break;
616 }
617 }
618 }
620 void DisplayHelp()
621 {
622 std::cout << "Usage: tidl_classification\n"
623 " Will run all available networks if tidl is invoked without"
624 " any arguments.\n Use -c to run a single network.\n"
625 "Optional arguments:\n"
626 " -c Path to the configuration file\n"
627 " -d <number of DSP cores> Number of DSP cores to use (0 - 2)\n"
628 " -e <number of EVE cores> Number of EVE cores to use (0 - 2)\n"
629 " -g <1|2> Number of layer groups\n"
630 " -l List of label strings (of all classes in model)\n"
631 " -s List of strings with selected classes\n"
632 " -i Video input (for camera:0,1 or video clip)\n"
633 " -v Verbose output during execution\n"
634 " -h Help\n";
635 }
637 // Function to filter all the reported decisions
638 bool tf_expected_id(int id)
639 {
640 // Filter out unexpected IDs
641 for (int i = 0; i < selected_items_size; i ++)
642 {
643 if(id == selected_items[i]) return true;
644 }
645 return false;
646 }
648 int tf_postprocess(uchar *in, int size, int roi_idx, int frame_idx, int f_id)
649 {
650 //prob_i = exp(TIDL_Lib_output_i) / sum(exp(TIDL_Lib_output))
651 // sort and get k largest values and corresponding indices
652 const int k = TOP_CANDIDATES;
653 int rpt_id = -1;
655 typedef std::pair<uchar, int> val_index;
656 auto constexpr cmp = [](val_index &left, val_index &right) { return left.first > right.first; };
657 std::priority_queue<val_index, std::vector<val_index>, decltype(cmp)> queue(cmp);
658 // initialize priority queue with smallest value on top
659 for (int i = 0; i < k; i++) {
660 queue.push(val_index(in[i], i));
661 }
662 // for rest input, if larger than current minimum, pop mininum, push new val
663 for (int i = k; i < size; i++)
664 {
665 if (in[i] > queue.top().first)
666 {
667 queue.pop();
668 queue.push(val_index(in[i], i));
669 }
670 }
672 // output top k values in reverse order: largest val first
673 std::vector<val_index> sorted;
674 while (! queue.empty())
675 {
676 sorted.push_back(queue.top());
677 queue.pop();
678 }
680 for (int i = k-1; i >= 0; i--)
681 {
682 int id = sorted[i].second;
684 if (tf_expected_id(id))
685 {
686 std::cout << "Frame:" << frame_idx << "," << f_id << " ROI[" << roi_idx << "]: rank="
687 << k-i << ", outval=" << (float)sorted[i].first / 255 << ", "
688 << labels_classes[sorted[i].second] << std::endl;
689 rpt_id = id;
690 }
691 }
692 return rpt_id;
693 }
695 void tf_preprocess(uchar *out, uchar *in, int size)
696 {
697 for (int i = 0; i < size; i++)
698 {
699 out[i] = (uchar) (in[i] /*- 128*/);
700 }
701 }
703 int ShowRegion(int roi_history[])
704 {
705 if((roi_history[0] >= 0) && (roi_history[0] == roi_history[1])) return roi_history[0];
706 if((roi_history[0] >= 0) && (roi_history[0] == roi_history[2])) return roi_history[0];
707 if((roi_history[1] >= 0) && (roi_history[1] == roi_history[2])) return roi_history[1];
708 return -1;
709 }