1 /******************************************************************************
2 * Copyright (c) 2018, Texas Instruments Incorporated - http://www.ti.com/
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 * * Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 * * Neither the name of Texas Instruments Incorporated nor the
13 * names of its contributors may be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
20 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
26 * THE POSSIBILITY OF SUCH DAMAGE.
27 *****************************************************************************/
28 #include <signal.h>
29 #include <getopt.h>
30 #include <iostream>
31 #include <iomanip>
32 #include <fstream>
33 #include <cassert>
34 #include <string>
35 #include <functional>
36 #include <queue>
37 #include <algorithm>
38 #include <time.h>
39 #include <memory.h>
40 #include <string.h>
42 #include "executor.h"
43 #include "execution_object.h"
44 #include "configuration.h"
46 #include "opencv2/core.hpp"
47 #include "opencv2/imgproc.hpp"
48 #include "opencv2/highgui.hpp"
49 #include "opencv2/videoio.hpp"
51 //#define TWO_ROIs
52 #define LIVE_DISPLAY
53 //#define PERF_VERBOSE
55 //#define RMT_GST_STREAMER
57 #define MAX_NUM_ROI 4
59 int live_input = 1;
60 char video_clip[320];
62 #ifdef TWO_ROIs
63 #define RES_X 400
64 #define RES_Y 300
65 #define NUM_ROI_X 2
66 #define NUM_ROI_Y 1
67 #define X_OFFSET 0
68 #define X_STEP 176
69 #define Y_OFFSET 52
70 #define Y_STEP 224
71 #else
72 #define RES_X 244
73 #define RES_Y 244
74 #define NUM_ROI_X 1
75 #define NUM_ROI_Y 1
76 #define X_OFFSET 10
77 #define X_STEP 224
78 #define Y_OFFSET 10
79 #define Y_STEP 224
80 #endif
82 int NUM_ROI = NUM_ROI_X * NUM_ROI_Y;
84 //Temporal averaging
85 int TOP_CANDIDATES = 2;
87 using namespace tidl;
88 using namespace cv;
90 #ifdef LIVE_DISPLAY
91 void imagenetCallBackFunc(int event, int x, int y, int flags, void* userdata)
92 {
93 if ( event == EVENT_RBUTTONDOWN )
94 {
95 std::cout << "Right button of the mouse is clicked - position (" << x << ", " << y << ")" << " ... prepare to exit!" << std::endl;
96 exit(0);
97 }
98 }
99 #endif
101 static int tf_postprocess(uchar *in, int size, int roi_idx, int frame_idx, int f_id);
102 static void tf_preprocess(uchar *out, uchar *in, int size);
103 static int ShowRegion(int roi_history[]);
104 static int selclass_history[MAX_NUM_ROI][3]; // from most recent to oldest at top indices
106 bool __TI_show_debug_ = false;
108 bool RunMultipleExecutors(const std::string& config_file_1,
109 const std::string& config_file_2,
110 uint32_t num_devices_available);
112 bool RunConfiguration(const std::string& config_file, int num_devices,
113 DeviceType device_type);
114 bool RunAllConfigurations(int32_t num_devices, DeviceType device_type);
116 bool ReadFrame(ExecutionObject& eo,
117 int frame_idx,
118 const Configuration& configuration,
119 std::istream& input_file);
121 bool WriteFrame(const ExecutionObject &eo,
122 std::ostream& output_file);
124 static void ProcessArgs(int argc, char *argv[],
125 std::string& config_file,
126 int& num_devices,
127 DeviceType& device_type);
129 static void DisplayHelp();
130 extern std::string labels_classes[];
131 extern int IMAGE_CLASSES_NUM;
132 extern int selected_items_size;
133 extern int selected_items[];
134 extern int populate_selected_items (char *filename);
135 extern void populate_labels (char *filename);
137 static double ms_diff(struct timespec &t0, struct timespec &t1)
138 { return (t1.tv_sec - t0.tv_sec) * 1e3 + (t1.tv_nsec - t0.tv_nsec) / 1e6; }
141 int main(int argc, char *argv[])
142 {
143 // Catch ctrl-c to ensure a clean exit
144 signal(SIGABRT, exit);
145 signal(SIGTERM, exit);
147 // If there are no devices capable of offloading TIDL on the SoC, exit
148 uint32_t num_dla =
149 Executor::GetNumDevices(DeviceType::EVE);
150 uint32_t num_dsp =
151 Executor::GetNumDevices(DeviceType::DSP);
152 if (num_dla == 0 && num_dsp == 0)
153 {
154 std::cout << "TI DL not supported on this SoC." << std::endl;
155 return EXIT_SUCCESS;
156 }
158 // Process arguments
159 std::string config_file;
160 int num_devices = 1;
161 DeviceType device_type = DeviceType::EVE;
162 ProcessArgs(argc, argv, config_file, num_devices, device_type);
164 bool status = true;
165 if (!config_file.empty()) {
166 std::cout << "Run single configuration: " << config_file << std::endl;
167 status = RunConfiguration(config_file, num_devices, device_type);
168 } else
169 {
170 status = false;
171 }
173 if (!status)
174 {
175 std::cout << "tidl FAILED" << std::endl;
176 return EXIT_FAILURE;
177 }
179 std::cout << "tidl PASSED" << std::endl;
180 return EXIT_SUCCESS;
181 }
183 bool RunConfiguration(const std::string& config_file, int num_devices,
184 DeviceType device_type)
185 {
186 DeviceIds ids;
187 char imagenet_win[160];
188 for (int i = 0; i < num_devices; i++)
189 ids.insert(static_cast<DeviceId>(i));
191 // Read the TI DL configuration file
192 Configuration configuration;
193 bool status = configuration.ReadFromFile(config_file);
194 if (!status)
195 {
196 std::cerr << "Error in configuration file: " << config_file
197 << std::endl;
198 return false;
199 }
201 std::ifstream input_data_file(configuration.inData, std::ios::binary);
202 std::ofstream output_data_file(configuration.outData, std::ios::binary);
203 assert (input_data_file.good());
204 assert (output_data_file.good());
206 sprintf(imagenet_win, "Imagenet_%sx%d", (device_type == DeviceType::EVE) ? "EVE" : "DSP", num_devices);
208 // Determine input frame size from configuration
209 size_t frame_sz_in = configuration.inWidth * configuration.inHeight *
210 configuration.inNumChannels * (configuration.inNumChannels == 1 ? 1 : 1);
211 size_t frame_sz_out = configuration.inWidth * configuration.inHeight * 3;
213 try
214 {
215 // Create a executor with the approriate core type, number of cores
216 // and configuration specified
217 Executor executor(device_type, ids, configuration);
220 // Query Executor for set of ExecutionObjects created
221 const ExecutionObjects& execution_objects =
222 executor.GetExecutionObjects();
223 int num_eos = execution_objects.size();
225 // Allocate input and output buffers for each execution object
226 std::vector<void *> buffers;
227 for (auto &eo : execution_objects)
228 {
229 ArgInfo in = { ArgInfo(malloc_ddr<char>(frame_sz_in), frame_sz_in)};
230 ArgInfo out = { ArgInfo(malloc_ddr<char>(frame_sz_out), frame_sz_out)};
231 eo->SetInputOutputBuffer(in, out);
233 buffers.push_back(in.ptr());
234 buffers.push_back(out.ptr());
235 }
237 #ifdef LIVE_DISPLAY
238 if(NUM_ROI > 1)
239 {
240 for(int i = 0; i < NUM_ROI; i ++) {
241 char tmp_string[80];
242 sprintf(tmp_string, "ROI[%02d]", i);
243 namedWindow(tmp_string, WINDOW_AUTOSIZE | CV_GUI_NORMAL);
244 }
245 }
246 Mat sw_stack_image = imread("/usr/share/ti/tidl/examples/classification/tidl-sw-stack-small.png", IMREAD_COLOR); // Read the file
247 if( sw_stack_image.empty() ) // Check for invalid input
248 {
249 std::cout << "Could not open or find the tidl-sw-stack-small image" << std::endl ;
250 } else {
251 namedWindow( "TIDL SW Stack", WINDOW_AUTOSIZE | CV_GUI_NORMAL ); // Create a window for display.
252 cv::imshow( "TIDL SW Stack", sw_stack_image ); // Show our image inside it.
253 }
255 namedWindow("ClassList", WINDOW_AUTOSIZE | CV_GUI_NORMAL);
256 namedWindow(imagenet_win, WINDOW_AUTOSIZE | CV_GUI_NORMAL);
257 //set the callback function for any mouse event
258 setMouseCallback(imagenet_win, imagenetCallBackFunc, NULL);
260 Mat classlist_image = cv::Mat::zeros(40 + selected_items_size * 20, 220, CV_8UC3);
261 char tmp_classwindow_string[160];
262 //Erase window
263 classlist_image.setTo(Scalar::all(0));
265 for (int i = 0; i < selected_items_size; i ++)
266 {
267 sprintf(tmp_classwindow_string, "%2d) %12s", 1+i, labels_classes[selected_items[i]].c_str());
268 cv::putText(classlist_image, tmp_classwindow_string,
269 cv::Point(5, 40 + i * 20),
270 cv::FONT_HERSHEY_COMPLEX_SMALL,
271 0.75,
272 cv::Scalar(255,255,255), 1, 8);
273 }
274 cv::imshow("ClassList", classlist_image);
276 #endif
277 Mat r_frame, r_mframe, r_blend;
278 Mat to_stream;
279 VideoCapture cap;
281 if(live_input >= 0)
282 {
283 cap.open(live_input);
284 VideoWriter writer; // gstreamer
286 const double fps = cap.get(CAP_PROP_FPS);
287 const int width = cap.get(CAP_PROP_FRAME_WIDTH);
288 const int height = cap.get(CAP_PROP_FRAME_HEIGHT);
289 std::cout << "Capture camera with " << fps << " fps, " << width << "x" << height << " px" << std::endl;
291 #ifdef RMT_GST_STREAMER
292 writer.open(" appsrc ! videoconvert ! video/x-raw, format=(string)NV12, width=(int)640, height=(int)480, framerate=(fraction)30/1 ! \
293 ducatih264enc bitrate=2000 ! queue ! h264parse config-interval=1 ! \
294 mpegtsmux ! udpsink host=158.218.102.235 sync=false port=5000",
295 0,fps,Size(640,480),true);
297 if (!writer.isOpened()) {
298 cap.release();
299 std::cerr << "Can't create gstreamer writer. Do you have the correct version installed?" << std::endl;
300 std::cerr << "Print out OpenCV build information" << std::endl;
301 std::cout << getBuildInformation() << std::endl;
302 return false;
303 }
304 #endif
305 } else {
306 std::cout << "Video input clip: " << video_clip << std::endl;
307 cap.open(std::string(video_clip));
308 const double fps = cap.get(CAP_PROP_FPS);
309 const int width = cap.get(CAP_PROP_FRAME_WIDTH);
310 const int height = cap.get(CAP_PROP_FRAME_HEIGHT);
311 std::cout << "Clip with " << fps << " fps, " << width << "x" << height << " px" << std::endl;
313 }
314 std::cout << "About to start ProcessFrame loop!!" << std::endl;
317 Rect rectCrop[NUM_ROI];
318 for (int y = 0; y < NUM_ROI_Y; y ++) {
319 for (int x = 0; x < NUM_ROI_X; x ++) {
320 rectCrop[y * NUM_ROI_X + x] = Rect(X_OFFSET + x * X_STEP, Y_OFFSET + y * Y_STEP, 224, 224);
321 std::cout << "Rect[" << X_OFFSET + x * X_STEP << ", " << Y_OFFSET + y * Y_STEP << "]" << std::endl;
322 }
323 }
324 int num_frames = 99999;
326 if (!cap.isOpened()) {
327 std::cout << "Video input not opened!" << std::endl;
328 return false;
329 }
330 Mat in_image, image, r_image, show_image, bgr_frames[3];
331 int is_object;
332 for(int k = 0; k < NUM_ROI; k++) {
333 for(int i = 0; i < 3; i ++) selclass_history[k][i] = -1;
334 }
336 #define MAX_NUM_EOS 4
337 struct timespec t0[MAX_NUM_EOS], t1;
339 // Process frames with available execution objects in a pipelined manner
340 // additional num_eos iterations to flush the pipeline (epilogue)
341 for (int frame_idx = 0;
342 frame_idx < configuration.numFrames + num_eos; frame_idx++)
343 {
344 ExecutionObject* eo = execution_objects[frame_idx % num_eos].get();
346 // Wait for previous frame on the same eo to finish processing
347 if (eo->ProcessFrameWait())
348 {
349 clock_gettime(CLOCK_MONOTONIC, &t1);
350 double elapsed_host =
351 ms_diff(t0[eo->GetFrameIndex() % num_eos], t1);
352 double elapsed_device = eo->GetProcessTimeInMilliSeconds();
353 #ifdef PERF_VERBOSE
354 std::cout << "frame[" << eo->GetFrameIndex() << "]: "
355 << "Time on device: "
356 << std::setw(6) << std::setprecision(4)
357 << elapsed_device << "ms, "
358 << "host: "
359 << std::setw(6) << std::setprecision(4)
360 << elapsed_host << "ms ";
361 std::cout << "API overhead: "
362 << std::setw(6) << std::setprecision(3)
363 << overhead << " %" << std::endl;
364 #endif
366 int f_id = eo->GetFrameIndex();
367 int curr_roi = f_id % NUM_ROI;
368 is_object = tf_postprocess((uchar*) eo->GetOutputBufferPtr(), IMAGE_CLASSES_NUM, curr_roi, frame_idx, f_id);
369 selclass_history[curr_roi][2] = selclass_history[curr_roi][1];
370 selclass_history[curr_roi][1] = selclass_history[curr_roi][0];
371 selclass_history[curr_roi][0] = is_object;
373 if(is_object >= 0) {
374 std::cout << "frame[" << eo->GetFrameIndex() << "]: "
375 << "Time on device: "
376 << std::setw(6) << std::setprecision(4)
377 << elapsed_device << "ms, "
378 << "host: "
379 << std::setw(6) << std::setprecision(4)
380 << elapsed_host << "ms ";
381 }
383 for (int r = 0; r < NUM_ROI; r ++)
384 {
385 int rpt_id = ShowRegion(selclass_history[r]);
386 if(rpt_id >= 0)
387 {
388 // overlay the display window, if ball seen during last two times
389 cv::putText(show_image, labels_classes[rpt_id].c_str(),
390 cv::Point(rectCrop[r].x + 5,rectCrop[r].y + 20), // Coordinates
391 cv::FONT_HERSHEY_COMPLEX_SMALL, // Font
392 1.0, // Scale. 2.0 = 2x bigger
393 cv::Scalar(0,0,255), // Color
394 1, // Thickness
395 8); // Line type
396 cv::rectangle(show_image, rectCrop[r], Scalar(255,0,0), 3);
397 std::cout << "ROI(" << r << ")(" << rpt_id << ")=" << labels_classes[rpt_id].c_str() << std::endl;
399 classlist_image.setTo(Scalar::all(0));
400 for (int k = 0; k < selected_items_size; k ++)
401 {
402 sprintf(tmp_classwindow_string, "%2d) %12s", 1+k, labels_classes[selected_items[k]].c_str());
403 cv::putText(classlist_image, tmp_classwindow_string,
404 cv::Point(5, 40 + k * 20),
405 cv::FONT_HERSHEY_COMPLEX_SMALL,
406 0.75,
407 selected_items[k] == rpt_id ? cv::Scalar(0,0,255) : cv::Scalar(255,255,255), 1, 8);
408 }
409 sprintf(tmp_classwindow_string, "FPS:%5.2lf", (double)num_devices * 1000.0 / elapsed_host );
410 cv::putText(classlist_image, tmp_classwindow_string,
411 cv::Point(5, 20),
412 cv::FONT_HERSHEY_COMPLEX_SMALL,
413 0.75,
414 cv::Scalar(0,255,0), 1, 8);
415 cv::imshow("ClassList", classlist_image);
416 }
417 }
418 #ifdef LIVE_DISPLAY
419 cv::imshow(imagenet_win, show_image);
420 #endif
422 #ifdef RMT_GST_STREAMER
423 cv::resize(show_image, to_stream, cv::Size(640,480));
424 writer << to_stream;
425 #endif
427 #ifdef LIVE_DISPLAY
428 waitKey(2);
429 #endif
431 }
434 if (cap.grab() && frame_idx < num_frames)
435 {
436 if (cap.retrieve(in_image))
437 {
438 cv::resize(in_image, image, Size(RES_X,RES_Y));
439 r_image = Mat(image, rectCrop[frame_idx % NUM_ROI]);
441 #ifdef LIVE_DISPLAY
442 if(NUM_ROI > 1)
443 {
444 char tmp_string[80];
445 sprintf(tmp_string, "ROI[%02d]", frame_idx % NUM_ROI);
446 cv::imshow(tmp_string, r_image);
447 }
448 #endif
449 //Convert from BGR pixel interleaved to BGR plane interleaved!
450 cv::split(r_image, bgr_frames);
451 tf_preprocess((uchar*) eo->GetInputBufferPtr(), bgr_frames[0].ptr(), 224*224);
452 tf_preprocess((uchar*) eo->GetInputBufferPtr()+224*224, bgr_frames[1].ptr(), 224*224);
453 tf_preprocess((uchar*) eo->GetInputBufferPtr()+2*224*224, bgr_frames[2].ptr(), 224*224);
454 eo->SetFrameIndex(frame_idx);
455 clock_gettime(CLOCK_MONOTONIC, &t0[frame_idx % num_eos]);
456 eo->ProcessFrameStartAsync();
458 #ifdef RMT_GST_STREAMER
459 cv::resize(Mat(image, Rect(0,32,640,448)), to_stream, Size(640,480));
460 writer << to_stream;
461 #endif
463 #ifdef LIVE_DISPLAY
464 //waitKey(2);
465 image.copyTo(show_image);
466 #endif
467 }
468 } else {
469 if(live_input == -1) {
470 //Rewind!
471 cap.release();
472 cap.open(std::string(video_clip));
473 }
474 }
476 }
478 for (auto b : buffers)
479 __free_ddr(b);
481 }
482 catch (tidl::Exception &e)
483 {
484 std::cerr << e.what() << std::endl;
485 status = false;
486 }
489 input_data_file.close();
490 output_data_file.close();
492 return status;
493 }
495 bool ReadFrame(ExecutionObject &eo, int frame_idx,
496 const Configuration& configuration,
497 std::istream& input_file)
498 {
499 if (frame_idx >= configuration.numFrames)
500 return false;
502 char* frame_buffer = eo.GetInputBufferPtr();
503 assert (frame_buffer != nullptr);
505 memset (frame_buffer, 0, eo.GetInputBufferSizeInBytes());
506 input_file.read(frame_buffer, eo.GetInputBufferSizeInBytes() / (configuration.inNumChannels == 1 ? 2 : 1));
508 if (input_file.eof())
509 return false;
511 assert (input_file.good());
513 // Set the frame index being processed by the EO. This is used to
514 // sort the frames before they are output
515 eo.SetFrameIndex(frame_idx);
517 if (input_file.good())
518 return true;
520 return false;
521 }
523 bool WriteFrame(const ExecutionObject &eo, std::ostream& output_file)
524 {
525 output_file.write(
526 eo.GetOutputBufferPtr(), eo.GetOutputBufferSizeInBytes());
527 assert(output_file.good() == true);
529 if (output_file.good())
530 return true;
532 return false;
533 }
535 void ProcessArgs(int argc, char *argv[], std::string& config_file,
536 int& num_devices, DeviceType& device_type)
537 {
538 const struct option long_options[] =
539 {
540 {"labels_classes_file", required_argument, 0, 'l'},
541 {"selected_classes_file", required_argument, 0, 's'},
542 {"config_file", required_argument, 0, 'c'},
543 {"num_devices", required_argument, 0, 'n'},
544 {"device_type", required_argument, 0, 't'},
545 {"help", no_argument, 0, 'h'},
546 {"verbose", no_argument, 0, 'v'},
547 {0, 0, 0, 0}
548 };
550 int option_index = 0;
552 while (true)
553 {
554 int c = getopt_long(argc, argv, "l:c:s:i:n:t:hv", long_options, &option_index);
556 if (c == -1)
557 break;
559 switch (c)
560 {
561 case 'l': populate_labels(optarg);
562 break;
564 case 's': populate_selected_items(optarg);
565 break;
567 case 'i': if(strlen(optarg) == 1)
568 {
569 live_input = atoi(optarg);
570 } else {
571 live_input = -1;
572 strcpy(video_clip, optarg);
573 }
574 break;
576 case 'c': config_file = optarg;
577 break;
579 case 'n': num_devices = atoi(optarg);
580 assert (num_devices > 0 && num_devices <= 4);
581 break;
583 case 't': if (*optarg == 'e')
584 device_type = DeviceType::EVE;
585 else if (*optarg == 'd')
586 device_type = DeviceType::DSP;
587 else
588 {
589 std::cerr << "Invalid argument to -t, only e or d"
590 " allowed" << std::endl;
591 exit(EXIT_FAILURE);
592 }
593 break;
595 case 'v': __TI_show_debug_ = true;
596 break;
598 case 'h': DisplayHelp();
599 exit(EXIT_SUCCESS);
600 break;
602 case '?': // Error in getopt_long
603 exit(EXIT_FAILURE);
604 break;
606 default:
607 std::cerr << "Unsupported option: " << c << std::endl;
608 break;
609 }
610 }
611 }
613 void DisplayHelp()
614 {
615 std::cout << "Usage: tidl\n"
616 " Will run all available networks if tidl is invoked without"
617 " any arguments.\n Use -c to run a single network.\n"
618 "Optional arguments:\n"
619 " -c Path to the configuration file\n"
620 " -n <number of cores> Number of cores to use (1 - 4)\n"
621 " -t <d|e> Type of core. d -> DSP, e -> EVE\n"
622 " -l List of label strings (of all classes in model)\n"
623 " -s List of strings with selected classes\n"
624 " -i Video input (for camera:0,1 or video clip)\n"
625 " -v Verbose output during execution\n"
626 " -h Help\n";
628 }
631 bool tf_expected_id(int id)
632 {
633 // Filter out unexpected IDs
634 for (int i = 0; i < selected_items_size; i ++)
635 {
636 if(id == selected_items[i]) return true;
637 }
638 return false;
639 }
641 int tf_postprocess(uchar *in, int size, int roi_idx, int frame_idx, int f_id)
642 {
643 // sort and get k largest values and corresponding indices
644 const int k = TOP_CANDIDATES;
645 int accum_in = 0;
646 int rpt_id = -1;
648 typedef std::pair<uchar, int> val_index;
649 auto constexpr cmp = [](val_index &left, val_index &right) { return left.first > right.first; };
650 std::priority_queue<val_index, std::vector<val_index>, decltype(cmp)> queue(cmp);
651 // initialize priority queue with smallest value on top
652 for (int i = 0; i < k; i++) {
653 queue.push(val_index(in[i], i));
654 accum_in += (int)in[i];
655 }
656 // for rest input, if larger than current minimum, pop mininum, push new val
657 for (int i = k; i < size; i++)
658 {
659 if (in[i] > queue.top().first)
660 {
661 queue.pop();
662 queue.push(val_index(in[i], i));
663 }
664 accum_in += (int)in[i];
665 }
667 // output top k values in reverse order: largest val first
668 std::vector<val_index> sorted;
669 while (! queue.empty())
670 {
671 sorted.push_back(queue.top());
672 queue.pop();
673 }
675 for (int i = k-1; i >= 0; i--)
676 {
677 int id = sorted[i].second;
679 if (tf_expected_id(id))
680 {
681 std::cout << "Frame:" << frame_idx << "," << f_id << " ROI[" << roi_idx << "]: rank="
682 << k-i << ", prob=" << (float) sorted[i].first / 255 << ", "
683 << labels_classes[sorted[i].second] << " accum_in=" << accum_in << std::endl;
684 rpt_id = id;
685 }
686 }
687 return rpt_id;
688 }
690 void tf_preprocess(uchar *out, uchar *in, int size)
691 {
692 for (int i = 0; i < size; i++)
693 {
694 out[i] = (uchar) (in[i] /*- 128*/);
695 }
696 }
698 int ShowRegion(int roi_history[])
699 {
700 if((roi_history[0] >= 0) && (roi_history[0] == roi_history[1])) return roi_history[0];
701 if((roi_history[0] >= 0) && (roi_history[0] == roi_history[2])) return roi_history[0];
702 if((roi_history[1] >= 0) && (roi_history[1] == roi_history[2])) return roi_history[1];
703 return -1;
704 }