1 /******************************************************************************
2 * Copyright (c) 2018, Texas Instruments Incorporated - http://www.ti.com/
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 * * Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 * * Neither the name of Texas Instruments Incorporated nor the
13 * names of its contributors may be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
20 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
26 * THE POSSIBILITY OF SUCH DAMAGE.
27 *****************************************************************************/
28 #include <signal.h>
29 #include <getopt.h>
30 #include <iostream>
31 #include <iomanip>
32 #include <fstream>
33 #include <cassert>
34 #include <string>
35 #include <functional>
36 #include <algorithm>
37 #include <time.h>
38 #include <unistd.h>
40 #include <queue>
41 #include <vector>
42 #include <cstdio>
44 #include "executor.h"
45 #include "execution_object.h"
46 #include "configuration.h"
47 #include "../segmentation/object_classes.h"
49 #include "opencv2/core.hpp"
50 #include "opencv2/imgproc.hpp"
51 #include "opencv2/highgui.hpp"
52 #include "opencv2/videoio.hpp"
54 #define NUM_VIDEO_FRAMES 100
55 #define DEFAULT_CONFIG "jdetnet"
56 #define DEFAULT_INPUT "../test/testvecs/input/preproc_0_768x320.y"
58 bool __TI_show_debug_ = false;
59 bool is_default_input = false;
60 bool is_preprocessed_input = false;
61 bool is_camera_input = false;
62 int orig_width;
63 int orig_height;
64 object_class_table_t *object_class_table;
66 using namespace tinn;
67 using namespace cv;
70 bool RunConfiguration(const std::string& config_file, int num_devices,
71 DeviceType device_type, std::string& input_file);
72 bool RunAllConfigurations(int32_t num_devices, DeviceType device_type);
74 bool ReadFrame(ExecutionObject& eo, int frame_idx,
75 const Configuration& configuration, int num_frames,
76 std::string& image_file, VideoCapture &cap);
77 bool WriteFrameOutput(const ExecutionObject &eo,
78 const Configuration& configuration);
80 static void ProcessArgs(int argc, char *argv[],
81 std::string& config,
82 int& num_devices,
83 DeviceType& device_type,
84 std::string& input_file);
86 static void DisplayHelp();
88 static double ms_diff(struct timespec &t0, struct timespec &t1)
89 { return (t1.tv_sec - t0.tv_sec) * 1e3 + (t1.tv_nsec - t0.tv_nsec) / 1e6; }
92 int main(int argc, char *argv[])
93 {
94 // Catch ctrl-c to ensure a clean exit
95 signal(SIGABRT, exit);
96 signal(SIGTERM, exit);
98 // If there are no devices capable of offloading TIDL on the SoC, exit
99 uint32_t num_dla = Executor::GetNumDevices(DeviceType::DLA);
100 uint32_t num_dsp = Executor::GetNumDevices(DeviceType::DSP);
101 if (num_dla == 0 && num_dsp == 0)
102 {
103 std::cout << "TI DL not supported on this SoC." << std::endl;
104 return EXIT_SUCCESS;
105 }
107 // Process arguments
108 std::string config = DEFAULT_CONFIG;
109 std::string input_file = DEFAULT_INPUT;
110 int num_devices = 1;
111 DeviceType device_type = DeviceType::DLA;
112 ProcessArgs(argc, argv, config, num_devices, device_type, input_file);
114 if ((object_class_table = GetObjectClassTable(config)) == nullptr)
115 {
116 std::cout << "No object classes defined for this config." << std::endl;
117 return EXIT_FAILURE;
118 }
120 if (input_file == DEFAULT_INPUT) is_default_input = true;
121 if (input_file == "camera") is_camera_input = true;
122 if (input_file.length() > 2 &&
123 input_file.compare(input_file.length() - 2, 2, ".y") == 0)
124 is_preprocessed_input = true;
125 std::cout << "Input: " << input_file << std::endl;
126 std::string config_file = "../test/testvecs/config/infer/tidl_config_"
127 + config + ".txt";
128 bool status = RunConfiguration(config_file, num_devices, device_type,
129 input_file);
131 if (!status)
132 {
133 std::cout << "ssd_multibox FAILED" << std::endl;
134 return EXIT_FAILURE;
135 }
137 std::cout << "ssd_multibox PASSED" << std::endl;
138 return EXIT_SUCCESS;
139 }
141 bool RunConfiguration(const std::string& config_file, int num_devices,
142 DeviceType device_type, std::string& input_file)
143 {
144 DeviceIds ids;
145 for (int i = 0; i < num_devices; i++)
146 ids.insert(static_cast<DeviceId>(i));
148 // Read the TI DL configuration file
149 Configuration configuration;
150 bool status = configuration.ReadFromFile(config_file);
151 if (!status)
152 {
153 std::cerr << "Error in configuration file: " << config_file
154 << std::endl;
155 return false;
156 }
157 if (device_type == DeviceType::DLA || device_type == DeviceType::DSP)
158 configuration.runFullNet = 1;
160 // setup input
161 int num_frames = is_default_input ? 3 : 1;
162 VideoCapture cap;
163 std::string image_file;
164 if (is_camera_input)
165 {
166 cap = VideoCapture(1); // cap = VideoCapture("test.mp4");
167 if (! cap.isOpened())
168 {
169 std::cerr << "Cannot open camera input." << std::endl;
170 return false;
171 }
172 num_frames = NUM_VIDEO_FRAMES;
173 namedWindow("SSD_Multibox", WINDOW_AUTOSIZE | CV_GUI_NORMAL);
174 }
175 else
176 {
177 image_file = input_file;
178 }
180 // Determine input frame size from configuration
181 size_t frame_sz = configuration.inWidth * configuration.inHeight *
182 configuration.inNumChannels;
184 try
185 {
186 // Create a executor with the approriate core type, number of cores
187 // and configuration specified
188 Executor executor(device_type, ids, configuration);
190 // Query Executor for set of ExecutionObjects created
191 const ExecutionObjects& execution_objects =
192 executor.GetExecutionObjects();
193 int num_eos = execution_objects.size();
195 // Allocate input and output buffers for each execution object
196 std::vector<void *> buffers;
197 for (auto &eo : execution_objects)
198 {
199 ArgInfo in = { ArgInfo(malloc(frame_sz), frame_sz)};
200 ArgInfo out = { ArgInfo(malloc(frame_sz), frame_sz)};
201 eo->SetInputOutputBuffer(in, out);
203 buffers.push_back(in.ptr());
204 buffers.push_back(out.ptr());
205 }
207 #define MAX_NUM_EOS 4
208 struct timespec t0[MAX_NUM_EOS], t1;
210 // Process frames with available execution objects in a pipelined manner
211 // additional num_eos iterations to flush the pipeline (epilogue)
212 for (int frame_idx = 0;
213 frame_idx < num_frames + num_eos; frame_idx++)
214 {
215 ExecutionObject* eo = execution_objects[frame_idx % num_eos].get();
217 // Wait for previous frame on the same eo to finish processing
218 if (eo->ProcessFrameWait())
219 {
220 clock_gettime(CLOCK_MONOTONIC, &t1);
221 double elapsed_host =
222 ms_diff(t0[eo->GetFrameIndex() % num_eos], t1);
223 double elapsed_device = eo->GetProcessTimeInMilliSeconds();
224 double overhead = 100 - (elapsed_device/elapsed_host*100);
226 std::cout << "frame[" << eo->GetFrameIndex() << "]: "
227 << "Time on device: "
228 << std::setw(6) << std::setprecision(4)
229 << elapsed_device << "ms, "
230 << "host: "
231 << std::setw(6) << std::setprecision(4)
232 << elapsed_host << "ms ";
233 std::cout << "API overhead: "
234 << std::setw(6) << std::setprecision(3)
235 << overhead << " %" << std::endl;
237 WriteFrameOutput(*eo, configuration);
238 }
240 // Read a frame and start processing it with current eo
241 if (ReadFrame(*eo, frame_idx, configuration, num_frames,
242 image_file, cap))
243 {
244 clock_gettime(CLOCK_MONOTONIC, &t0[frame_idx % num_eos]);
245 eo->ProcessFrameStartAsync();
246 }
247 }
249 for (auto b : buffers)
250 free(b);
252 }
253 catch (tinn::Exception &e)
254 {
255 std::cerr << e.what() << std::endl;
256 status = false;
257 }
259 return status;
260 }
263 bool ReadFrame(ExecutionObject &eo, int frame_idx,
264 const Configuration& configuration, int num_frames,
265 std::string& image_file, VideoCapture &cap)
266 {
267 if (frame_idx >= num_frames)
268 return false;
269 eo.SetFrameIndex(frame_idx);
271 char* frame_buffer = eo.GetInputBufferPtr();
272 assert (frame_buffer != nullptr);
273 int channel_size = configuration.inWidth * configuration.inHeight;
275 Mat image;
276 if (! image_file.empty())
277 {
278 if (is_preprocessed_input)
279 {
280 std::ifstream ifs(image_file, std::ios::binary);
281 ifs.seekg(frame_idx * channel_size * 3);
282 ifs.read(frame_buffer, channel_size * 3);
283 bool ifs_status = ifs.good();
284 ifs.close();
285 orig_width = configuration.inWidth;
286 orig_height = configuration.inHeight;
287 return ifs_status; // already PreProc-ed
288 }
289 else
290 {
291 image = cv::imread(image_file, CV_LOAD_IMAGE_COLOR);
292 if (image.empty())
293 {
294 std::cerr << "Unable to read from: " << image_file << std::endl;
295 return false;
296 }
297 }
298 }
299 else
300 {
301 // 640x480 camera input, process one in every 5 frames,
302 // can adjust number of skipped frames to match real time processing
303 if (! cap.grab()) return false;
304 if (! cap.grab()) return false;
305 if (! cap.grab()) return false;
306 if (! cap.grab()) return false;
307 if (! cap.grab()) return false;
308 if (! cap.retrieve(image)) return false;
309 }
311 // scale to network input size
312 Mat s_image, bgr_frames[3];
313 orig_width = image.cols;
314 orig_height = image.rows;
315 cv::resize(image, s_image,
316 Size(configuration.inWidth, configuration.inHeight),
317 0, 0, cv::INTER_AREA);
318 cv::split(s_image, bgr_frames);
319 memcpy(frame_buffer, bgr_frames[0].ptr(), channel_size);
320 memcpy(frame_buffer+1*channel_size, bgr_frames[1].ptr(), channel_size);
321 memcpy(frame_buffer+2*channel_size, bgr_frames[2].ptr(), channel_size);
322 return true;
323 }
325 // Create frame with boxes drawn around classified objects
326 bool WriteFrameOutput(const ExecutionObject &eo,
327 const Configuration& configuration)
328 {
329 // Asseembly original frame
330 int width = configuration.inWidth;
331 int height = configuration.inHeight;
332 int channel_size = width * height;
333 Mat frame, r_frame, bgr[3];
335 unsigned char *in = (unsigned char *) eo.GetInputBufferPtr();
336 bgr[0] = Mat(height, width, CV_8UC(1), in);
337 bgr[1] = Mat(height, width, CV_8UC(1), in + channel_size);
338 bgr[2] = Mat(height, width, CV_8UC(1), in + channel_size*2);
339 cv::merge(bgr, 3, frame);
341 int frame_index = eo.GetFrameIndex();
342 char outfile_name[64];
343 if (! is_camera_input && is_preprocessed_input)
344 {
345 snprintf(outfile_name, 64, "frame_%d.png", frame_index);
346 cv::imwrite(outfile_name, frame);
347 printf("Saving frame %d to: %s\n", frame_index, outfile_name);
348 }
350 // Draw boxes around classified objects
351 float *out = (float *) eo.GetOutputBufferPtr();
352 int num_floats = eo.GetOutputBufferSizeInBytes() / sizeof(float);
353 for (int i = 0; i < num_floats / 7; i++)
354 {
355 int index = (int) out[i * 7 + 0];
356 if (index < 0) break;
358 int label = (int) out[i * 7 + 1];
359 float score = out[i * 7 + 2];
360 int xmin = (int) (out[i * 7 + 3] * width);
361 int ymin = (int) (out[i * 7 + 4] * height);
362 int xmax = (int) (out[i * 7 + 5] * width);
363 int ymax = (int) (out[i * 7 + 6] * height);
365 object_class_t *object_class = GetObjectClass(object_class_table,
366 label);
367 if (object_class == nullptr) continue;
369 #if 0
370 printf("(%d, %d) -> (%d, %d): %s, score=%f\n",
371 xmin, ymin, xmax, ymax, object_class->label, score);
372 #endif
374 cv::rectangle(frame, Point(xmin, ymin), Point(xmax, ymax),
375 Scalar(object_class->color.blue,
376 object_class->color.green,
377 object_class->color.red), 2);
378 }
380 // output
381 cv::resize(frame, r_frame, Size(orig_width, orig_height));
382 if (is_camera_input)
383 {
384 cv::imshow("SSD_Multibox", r_frame);
385 waitKey(1);
386 }
387 else
388 {
389 snprintf(outfile_name, 64, "multibox_%d.png", frame_index);
390 cv::imwrite(outfile_name, r_frame);
391 printf("Saving frame %d with SSD multiboxes to: %s\n",
392 frame_index, outfile_name);
393 }
395 return true;
396 }
399 void ProcessArgs(int argc, char *argv[], std::string& config,
400 int& num_devices, DeviceType& device_type,
401 std::string& input_file)
402 {
403 const struct option long_options[] =
404 {
405 {"config", required_argument, 0, 'c'},
406 {"num_devices", required_argument, 0, 'n'},
407 {"device_type", required_argument, 0, 't'},
408 {"image_file", required_argument, 0, 'i'},
409 {"help", no_argument, 0, 'h'},
410 {"verbose", no_argument, 0, 'v'},
411 {0, 0, 0, 0}
412 };
414 int option_index = 0;
416 while (true)
417 {
418 int c = getopt_long(argc, argv, "c:n:t:i:hv", long_options, &option_index);
420 if (c == -1)
421 break;
423 switch (c)
424 {
425 case 'c': config = optarg;
426 break;
428 case 'n': num_devices = atoi(optarg);
429 assert (num_devices > 0 && num_devices <= 4);
430 break;
432 case 't': if (*optarg == 'e')
433 device_type = DeviceType::DLA;
434 #if 0
435 else if (*optarg == 'd')
436 device_type = DeviceType::DSP;
437 #endif
438 else
439 {
440 //std::cerr << "Invalid argument to -t, only e or d"
441 std::cerr << "Invalid argument to -t, only e"
442 " allowed" << std::endl;
443 exit(EXIT_FAILURE);
444 }
445 break;
447 case 'i': input_file = optarg;
448 break;
450 case 'v': __TI_show_debug_ = true;
451 break;
453 case 'h': DisplayHelp();
454 exit(EXIT_SUCCESS);
455 break;
457 case '?': // Error in getopt_long
458 exit(EXIT_FAILURE);
459 break;
461 default:
462 std::cerr << "Unsupported option: " << c << std::endl;
463 break;
464 }
465 }
466 }
468 void DisplayHelp()
469 {
470 std::cout << "Usage: ssd_multibox\n"
471 " Will run ssd_multibox network to perform multi-objects"
472 " classification.\n Use -c to run a different"
473 " segmentation network. Default is jdetnet.\n"
474 "Optional arguments:\n"
475 " -c <config> Valid configs: jdetnet, jdetnet_512x256\n"
476 " -n <number of cores> Number of cores to use (1 - 4)\n"
477 " -t <d|e> Type of core. d -> DSP, e -> DLA\n"
478 " Only support DLA for now\n"
479 " -i <image> Path to the image file\n"
480 " Default is 1 frame in testvecs\n"
481 " -i camera Use camera as input\n"
482 " -v Verbose output during execution\n"
483 " -h Help\n";
484 }