1 /******************************************************************************
2 * Copyright (c) 2018, Texas Instruments Incorporated - http://www.ti.com/
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 * * Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 * * Neither the name of Texas Instruments Incorporated nor the
13 * names of its contributors may be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
20 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
26 * THE POSSIBILITY OF SUCH DAMAGE.
27 *****************************************************************************/
28 #include <signal.h>
29 #include <getopt.h>
30 #include <iostream>
31 #include <iomanip>
32 #include <fstream>
33 #include <cassert>
34 #include <string>
35 #include <functional>
36 #include <algorithm>
37 #include <time.h>
38 #include <unistd.h>
40 #include <queue>
41 #include <vector>
42 #include <cstdio>
44 #include "executor.h"
45 #include "execution_object.h"
46 #include "configuration.h"
47 #include "../segmentation/object_classes.h"
49 #include "opencv2/core.hpp"
50 #include "opencv2/imgproc.hpp"
51 #include "opencv2/highgui.hpp"
52 #include "opencv2/videoio.hpp"
54 #define NUM_VIDEO_FRAMES 100
55 #define DEFAULT_CONFIG "jdetnet"
56 #define DEFAULT_INPUT "../test/testvecs/input/preproc_0_768x320.y"
58 bool __TI_show_debug_ = false;
59 bool is_default_input = false;
60 bool is_preprocessed_input = false;
61 bool is_camera_input = false;
62 bool is_partitioned = true;
63 int orig_width;
64 int orig_height;
65 object_class_table_t *object_class_table;
67 using namespace tinn;
68 using namespace cv;
71 bool RunConfiguration(const std::string& config_file, uint32_t num_devices,
72 DeviceType device_type, std::string& input_file);
73 bool ReadFrame(ExecutionObject& eo, int frame_idx,
74 const Configuration& configuration, int num_frames,
75 std::string& image_file, VideoCapture &cap);
76 bool WriteFrameOutput(const ExecutionObject &eo_in,
77 const ExecutionObject &eo_out,
78 const Configuration& configuration);
80 void ReportTime(int frame_index, std::string device_name, double elapsed_host,
81 double elapsed_device);
83 static void ProcessArgs(int argc, char *argv[],
84 std::string& config,
85 uint32_t& num_devices,
86 DeviceType& device_type,
87 std::string& input_file);
89 static void DisplayHelp();
91 static double ms_diff(struct timespec &t0, struct timespec &t1)
92 { return (t1.tv_sec - t0.tv_sec) * 1e3 + (t1.tv_nsec - t0.tv_nsec) / 1e6; }
95 int main(int argc, char *argv[])
96 {
97 // Catch ctrl-c to ensure a clean exit
98 signal(SIGABRT, exit);
99 signal(SIGTERM, exit);
101 // If there are no devices capable of offloading TIDL on the SoC, exit
102 uint32_t num_dla = Executor::GetNumDevices(DeviceType::DLA);
103 uint32_t num_dsp = Executor::GetNumDevices(DeviceType::DSP);
104 if (num_dla == 0 && num_dsp == 0)
105 {
106 std::cout << "TI DL not supported on this SoC." << std::endl;
107 return EXIT_SUCCESS;
108 }
110 // Process arguments
111 std::string config = DEFAULT_CONFIG;
112 std::string input_file = DEFAULT_INPUT;
113 uint32_t num_devices = 1;
114 DeviceType device_type = DeviceType::DLA;
115 ProcessArgs(argc, argv, config, num_devices, device_type, input_file);
117 if (is_partitioned)
118 num_devices = std::min(num_devices, std::min(num_dla, num_dsp));
119 if (num_devices == 0)
120 {
121 std::cout << "Partitioned execution requires at least 1 DLA and 1 DSP."
122 << std::endl;
123 return EXIT_FAILURE;
124 }
125 if ((object_class_table = GetObjectClassTable(config)) == nullptr)
126 {
127 std::cout << "No object classes defined for this config." << std::endl;
128 return EXIT_FAILURE;
129 }
131 if (input_file == DEFAULT_INPUT) is_default_input = true;
132 if (input_file == "camera") is_camera_input = true;
133 if (input_file.length() > 2 &&
134 input_file.compare(input_file.length() - 2, 2, ".y") == 0)
135 is_preprocessed_input = true;
136 std::cout << "Input: " << input_file << std::endl;
137 std::string config_file = "../test/testvecs/config/infer/tidl_config_"
138 + config + ".txt";
139 bool status = RunConfiguration(config_file, num_devices, device_type,
140 input_file);
142 if (!status)
143 {
144 std::cout << "ssd_multibox FAILED" << std::endl;
145 return EXIT_FAILURE;
146 }
148 std::cout << "ssd_multibox PASSED" << std::endl;
149 return EXIT_SUCCESS;
150 }
152 bool RunConfiguration(const std::string& config_file, uint32_t num_devices,
153 DeviceType device_type, std::string& input_file)
154 {
155 DeviceIds ids;
156 for (int i = 0; i < num_devices; i++)
157 ids.insert(static_cast<DeviceId>(i));
159 // Read the TI DL configuration file
160 Configuration configuration;
161 bool status = configuration.ReadFromFile(config_file);
162 if (!status)
163 {
164 std::cerr << "Error in configuration file: " << config_file
165 << std::endl;
166 return false;
167 }
168 configuration.runFullNet = is_partitioned ? 0 : 1;
170 // setup input
171 int num_frames = is_default_input ? 3 : 1;
172 VideoCapture cap;
173 std::string image_file;
174 if (is_camera_input)
175 {
176 cap = VideoCapture(1); // cap = VideoCapture("test.mp4");
177 if (! cap.isOpened())
178 {
179 std::cerr << "Cannot open camera input." << std::endl;
180 return false;
181 }
182 num_frames = NUM_VIDEO_FRAMES;
183 namedWindow("SSD_Multibox", WINDOW_AUTOSIZE | CV_GUI_NORMAL);
184 }
185 else
186 {
187 image_file = input_file;
188 }
190 try
191 {
192 // Create a executor with the approriate core type, number of cores
193 // and configuration specified
194 configuration.layersGroupId = 1;
195 Executor *executor_1 = new Executor(device_type, ids, configuration);
196 Executor *executor_2 = nullptr;
197 if (is_partitioned)
198 {
199 configuration.layersGroupId = 2;
200 configuration.enableInternalInput = 1; // 0 is also valid
201 executor_2 = new Executor(DeviceType::DSP, ids, configuration);
202 }
204 // Query Executor for set of ExecutionObjects created
205 const ExecutionObjects *execution_objects_1, *execution_objects_2;
206 execution_objects_1 = & executor_1->GetExecutionObjects();
207 int num_eos = execution_objects_1->size();
208 if (is_partitioned)
209 execution_objects_2 = & executor_2->GetExecutionObjects();
211 // Allocate input and output buffers for each execution object
212 std::vector<void *> buffers;
213 for (int i = 0; i < num_eos; i++)
214 {
215 ExecutionObject *eo1 = execution_objects_1->at(i).get();
216 size_t in_size = eo1->GetInputBufferSizeInBytes();
217 size_t out_size = eo1->GetOutputBufferSizeInBytes();
218 ArgInfo in = { ArgInfo(malloc(in_size), in_size)};
219 ArgInfo out = { ArgInfo(nullptr, 0) };
220 if (configuration.enableInternalInput == 0)
221 out = ArgInfo(malloc(out_size), out_size);
222 eo1->SetInputOutputBuffer(in, out);
224 buffers.push_back(in.ptr());
225 buffers.push_back(out.ptr());
227 if (is_partitioned)
228 {
229 ExecutionObject *eo2 = execution_objects_2->at(i).get();
230 size_t out2_size = eo2->GetOutputBufferSizeInBytes();
231 ArgInfo out2 = { ArgInfo(malloc(out2_size), out2_size) };
232 eo2->SetInputOutputBuffer(out, out2);
233 buffers.push_back(out2.ptr());
234 }
235 }
237 #define MAX_NUM_EOS 4
238 struct timespec t0[MAX_NUM_EOS], t1, tloop0, tloop1;
239 clock_gettime(CLOCK_MONOTONIC, &tloop0);
241 // Process frames with available execution objects in a pipelined manner
242 // additional num_eos iterations to flush the pipeline (epilogue)
243 ExecutionObject *eo1, *eo2, *eo_wait, *eo_input;
244 for (int frame_idx = 0;
245 frame_idx < num_frames + num_eos; frame_idx++)
246 {
247 eo1 = execution_objects_1->at(frame_idx % num_eos).get();
248 eo_wait = eo1;
249 if (is_partitioned)
250 {
251 eo2 = execution_objects_2->at(frame_idx % num_eos).get();
252 eo_wait = eo2;
253 }
255 // Wait for previous frame on the same eo to finish processing
256 if (eo_wait->ProcessFrameWait())
257 {
258 int finished_idx = eo_wait->GetFrameIndex();
259 clock_gettime(CLOCK_MONOTONIC, &t1);
260 ReportTime(finished_idx,
261 (is_partitioned || device_type == DeviceType::DSP) ?
262 "DSP" : "DLA",
263 ms_diff(t0[finished_idx % num_eos], t1),
264 eo_wait->GetProcessTimeInMilliSeconds());
266 eo_input = execution_objects_1->at(finished_idx %num_eos).get();
267 WriteFrameOutput(*eo_input, *eo_wait, configuration);
268 }
270 // Read a frame and start processing it with current eo
271 if (ReadFrame(*eo1, frame_idx, configuration, num_frames,
272 image_file, cap))
273 {
274 clock_gettime(CLOCK_MONOTONIC, &t0[frame_idx % num_eos]);
275 eo1->ProcessFrameStartAsync();
277 if (is_partitioned && eo1->ProcessFrameWait())
278 {
279 clock_gettime(CLOCK_MONOTONIC, &t1);
280 ReportTime(frame_idx, "DLA",
281 ms_diff(t0[frame_idx % num_eos], t1),
282 eo1->GetProcessTimeInMilliSeconds());
284 clock_gettime(CLOCK_MONOTONIC, &t0[frame_idx % num_eos]);
285 eo2->ProcessFrameStartAsync();
286 }
287 }
288 }
290 clock_gettime(CLOCK_MONOTONIC, &tloop1);
291 std::cout << "Loop total time (including read/write/print/etc): "
292 << std::setw(6) << std::setprecision(4)
293 << ms_diff(tloop0, tloop1) << "ms" << std::endl;
295 delete executor_1;
296 delete executor_2;
297 for (auto b : buffers)
298 free(b);
299 }
300 catch (tinn::Exception &e)
301 {
302 std::cerr << e.what() << std::endl;
303 status = false;
304 }
306 return status;
307 }
309 void ReportTime(int frame_index, std::string device_name, double elapsed_host,
310 double elapsed_device)
311 {
312 double overhead = 100 - (elapsed_device/elapsed_host*100);
313 std::cout << "frame[" << frame_index << "]: "
314 << "Time on " << device_name << ": "
315 << std::setw(6) << std::setprecision(4)
316 << elapsed_device << "ms, "
317 << "host: "
318 << std::setw(6) << std::setprecision(4)
319 << elapsed_host << "ms ";
320 std::cout << "API overhead: "
321 << std::setw(6) << std::setprecision(3)
322 << overhead << " %" << std::endl;
323 }
326 bool ReadFrame(ExecutionObject &eo, int frame_idx,
327 const Configuration& configuration, int num_frames,
328 std::string& image_file, VideoCapture &cap)
329 {
330 if (frame_idx >= num_frames)
331 return false;
332 eo.SetFrameIndex(frame_idx);
334 char* frame_buffer = eo.GetInputBufferPtr();
335 assert (frame_buffer != nullptr);
336 int channel_size = configuration.inWidth * configuration.inHeight;
338 Mat image;
339 if (! image_file.empty())
340 {
341 if (is_preprocessed_input)
342 {
343 std::ifstream ifs(image_file, std::ios::binary);
344 ifs.seekg(frame_idx * channel_size * 3);
345 ifs.read(frame_buffer, channel_size * 3);
346 bool ifs_status = ifs.good();
347 ifs.close();
348 orig_width = configuration.inWidth;
349 orig_height = configuration.inHeight;
350 return ifs_status; // already PreProc-ed
351 }
352 else
353 {
354 image = cv::imread(image_file, CV_LOAD_IMAGE_COLOR);
355 if (image.empty())
356 {
357 std::cerr << "Unable to read from: " << image_file << std::endl;
358 return false;
359 }
360 }
361 }
362 else
363 {
364 // 640x480 camera input, process one in every 5 frames,
365 // can adjust number of skipped frames to match real time processing
366 if (! cap.grab()) return false;
367 if (! cap.grab()) return false;
368 if (! cap.grab()) return false;
369 if (! cap.grab()) return false;
370 if (! cap.grab()) return false;
371 if (! cap.retrieve(image)) return false;
372 }
374 // scale to network input size
375 Mat s_image, bgr_frames[3];
376 orig_width = image.cols;
377 orig_height = image.rows;
378 cv::resize(image, s_image,
379 Size(configuration.inWidth, configuration.inHeight),
380 0, 0, cv::INTER_AREA);
381 cv::split(s_image, bgr_frames);
382 memcpy(frame_buffer, bgr_frames[0].ptr(), channel_size);
383 memcpy(frame_buffer+1*channel_size, bgr_frames[1].ptr(), channel_size);
384 memcpy(frame_buffer+2*channel_size, bgr_frames[2].ptr(), channel_size);
385 return true;
386 }
388 // Create frame with boxes drawn around classified objects
389 bool WriteFrameOutput(const ExecutionObject &eo_in,
390 const ExecutionObject &eo_out,
391 const Configuration& configuration)
392 {
393 // Asseembly original frame
394 int width = configuration.inWidth;
395 int height = configuration.inHeight;
396 int channel_size = width * height;
397 Mat frame, r_frame, bgr[3];
399 unsigned char *in = (unsigned char *) eo_in.GetInputBufferPtr();
400 bgr[0] = Mat(height, width, CV_8UC(1), in);
401 bgr[1] = Mat(height, width, CV_8UC(1), in + channel_size);
402 bgr[2] = Mat(height, width, CV_8UC(1), in + channel_size*2);
403 cv::merge(bgr, 3, frame);
405 int frame_index = eo_in.GetFrameIndex();
406 char outfile_name[64];
407 if (! is_camera_input && is_preprocessed_input)
408 {
409 snprintf(outfile_name, 64, "frame_%d.png", frame_index);
410 cv::imwrite(outfile_name, frame);
411 printf("Saving frame %d to: %s\n", frame_index, outfile_name);
412 }
414 // Draw boxes around classified objects
415 float *out = (float *) eo_out.GetOutputBufferPtr();
416 int num_floats = eo_out.GetOutputBufferSizeInBytes() / sizeof(float);
417 for (int i = 0; i < num_floats / 7; i++)
418 {
419 int index = (int) out[i * 7 + 0];
420 if (index < 0) break;
422 int label = (int) out[i * 7 + 1];
423 float score = out[i * 7 + 2];
424 int xmin = (int) (out[i * 7 + 3] * width);
425 int ymin = (int) (out[i * 7 + 4] * height);
426 int xmax = (int) (out[i * 7 + 5] * width);
427 int ymax = (int) (out[i * 7 + 6] * height);
429 object_class_t *object_class = GetObjectClass(object_class_table,
430 label);
431 if (object_class == nullptr) continue;
433 #if 0
434 printf("(%d, %d) -> (%d, %d): %s, score=%f\n",
435 xmin, ymin, xmax, ymax, object_class->label, score);
436 #endif
438 cv::rectangle(frame, Point(xmin, ymin), Point(xmax, ymax),
439 Scalar(object_class->color.blue,
440 object_class->color.green,
441 object_class->color.red), 2);
442 }
444 // output
445 cv::resize(frame, r_frame, Size(orig_width, orig_height));
446 if (is_camera_input)
447 {
448 cv::imshow("SSD_Multibox", r_frame);
449 waitKey(1);
450 }
451 else
452 {
453 snprintf(outfile_name, 64, "multibox_%d.png", frame_index);
454 cv::imwrite(outfile_name, r_frame);
455 printf("Saving frame %d with SSD multiboxes to: %s\n",
456 frame_index, outfile_name);
457 }
459 return true;
460 }
463 void ProcessArgs(int argc, char *argv[], std::string& config,
464 uint32_t& num_devices, DeviceType& device_type,
465 std::string& input_file)
466 {
467 const struct option long_options[] =
468 {
469 {"config", required_argument, 0, 'c'},
470 {"num_devices", required_argument, 0, 'n'},
471 {"device_type", required_argument, 0, 't'},
472 {"image_file", required_argument, 0, 'i'},
473 {"help", no_argument, 0, 'h'},
474 {"verbose", no_argument, 0, 'v'},
475 {0, 0, 0, 0}
476 };
478 int option_index = 0;
480 while (true)
481 {
482 int c = getopt_long(argc, argv, "c:n:t:i:hv", long_options, &option_index);
484 if (c == -1)
485 break;
487 switch (c)
488 {
489 case 'c': config = optarg;
490 break;
492 case 'n': num_devices = atoi(optarg);
493 assert (num_devices > 0 && num_devices <= 4);
494 break;
496 case 't': if (*optarg == 'e')
497 {
498 device_type = DeviceType::DLA;
499 is_partitioned = false;
500 }
501 #if 0
502 else if (*optarg == 'd')
503 {
504 device_type = DeviceType::DSP;
505 is_partitioned = false;
506 }
507 #endif
508 else
509 {
510 //std::cerr << "Invalid argument to -t, only e or d"
511 std::cerr << "Invalid argument to -t, only e"
512 " allowed" << std::endl;
513 exit(EXIT_FAILURE);
514 }
515 break;
517 case 'i': input_file = optarg;
518 break;
520 case 'v': __TI_show_debug_ = true;
521 break;
523 case 'h': DisplayHelp();
524 exit(EXIT_SUCCESS);
525 break;
527 case '?': // Error in getopt_long
528 exit(EXIT_FAILURE);
529 break;
531 default:
532 std::cerr << "Unsupported option: " << c << std::endl;
533 break;
534 }
535 }
536 }
538 void DisplayHelp()
539 {
540 std::cout << "Usage: ssd_multibox\n"
541 " Will run ssd_multibox network to perform multi-objects"
542 " classification.\n Use -c to run a different"
543 " segmentation network. Default is jdetnet.\n"
544 "Optional arguments:\n"
545 " -c <config> Valid configs: jdetnet, jdetnet_512x256\n"
546 " -n <number of cores> Number of cores to use (1 - 4)\n"
547 " -t <d|e> Type of core. d -> DSP, e -> DLA\n"
548 " DSP not supported at this time\n"
549 " Default to partitioned execution: \n"
550 " part 1 on DLA, part 2 on DSP\n"
551 " -i <image> Path to the image file\n"
552 " Default is 1 frame in testvecs\n"
553 " -i camera Use camera as input\n"
554 " -v Verbose output during execution\n"
555 " -h Help\n";
556 }