1 /******************************************************************************
2 * Copyright (c) 2018, Texas Instruments Incorporated - http://www.ti.com/
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 * * Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 * * Neither the name of Texas Instruments Incorporated nor the
13 * names of its contributors may be used to endorse or promote products
14 * derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
20 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
26 * THE POSSIBILITY OF SUCH DAMAGE.
27 *****************************************************************************/
28 #include <signal.h>
29 #include <iostream>
30 #include <iomanip>
31 #include <fstream>
32 #include <cassert>
33 #include <string>
34 #include <functional>
35 #include <algorithm>
36 #include <time.h>
37 #include <unistd.h>
39 #include <queue>
40 #include <vector>
41 #include <chrono>
43 #include "executor.h"
44 #include "execution_object.h"
45 #include "execution_object_pipeline.h"
46 #include "configuration.h"
47 #include "../common/object_classes.h"
48 #include "imgutil.h"
49 #include "../common/video_utils.h"
51 #include "opencv2/core.hpp"
52 #include "opencv2/imgproc.hpp"
53 #include "opencv2/highgui.hpp"
54 #include "opencv2/videoio.hpp"
56 using namespace std;
57 using namespace tidl;
58 using namespace cv;
60 #define NUM_VIDEO_FRAMES 300
61 #define DEFAULT_CONFIG "j11_v2"
62 #define NUM_DEFAULT_INPUTS 1
63 #define DEFAULT_OBJECT_CLASSES_LIST_FILE "imagenet_objects.json"
64 const char *default_inputs[NUM_DEFAULT_INPUTS] =
65 {
66 "../test/testvecs/input/objects/cat-pet-animal-domestic-104827.jpeg"
67 };
68 std::unique_ptr<ObjectClasses> object_classes;
71 Executor* CreateExecutor(DeviceType dt, uint32_t num, const Configuration& c);
72 bool RunConfiguration(cmdline_opts_t& opts);
73 bool ReadFrame(ExecutionObjectPipeline& eop,
74 uint32_t frame_idx, const Configuration& c,
75 const cmdline_opts_t& opts, VideoCapture &cap);
76 bool WriteFrameOutput(const ExecutionObjectPipeline &eop);
77 void DisplayHelp();
80 int main(int argc, char *argv[])
81 {
82 // Catch ctrl-c to ensure a clean exit
83 signal(SIGABRT, exit);
84 signal(SIGTERM, exit);
86 // If there are no devices capable of offloading TIDL on the SoC, exit
87 uint32_t num_eves = Executor::GetNumDevices(DeviceType::EVE);
88 uint32_t num_dsps = Executor::GetNumDevices(DeviceType::DSP);
89 if (num_eves == 0 && num_dsps == 0)
90 {
91 cout << "TI DL not supported on this SoC." << endl;
92 return EXIT_SUCCESS;
93 }
95 // Process arguments
96 cmdline_opts_t opts;
97 opts.config = DEFAULT_CONFIG;
98 opts.object_classes_list_file = DEFAULT_OBJECT_CLASSES_LIST_FILE;
99 if (num_eves != 0) { opts.num_eves = 1; opts.num_dsps = 0; }
100 else { opts.num_eves = 0; opts.num_dsps = 1; }
101 if (! ProcessArgs(argc, argv, opts))
102 {
103 DisplayHelp();
104 exit(EXIT_SUCCESS);
105 }
106 assert(opts.num_dsps != 0 || opts.num_eves != 0);
107 if (opts.num_frames == 0)
108 opts.num_frames = (opts.is_camera_input || opts.is_video_input) ?
109 NUM_VIDEO_FRAMES : 1;
110 if (opts.input_file.empty())
111 cout << "Input: " << default_inputs[0] << endl;
112 else
113 cout << "Input: " << opts.input_file << endl;
115 // Get object classes list
116 object_classes = std::unique_ptr<ObjectClasses>(
117 new ObjectClasses(opts.object_classes_list_file));
118 if (object_classes->GetNumClasses() == 0)
119 {
120 cout << "No object classes defined for this config." << endl;
121 return EXIT_FAILURE;
122 }
124 // Run network
125 bool status = RunConfiguration(opts);
126 if (!status)
127 {
128 cout << "imagenet FAILED" << endl;
129 return EXIT_FAILURE;
130 }
132 cout << "imagenet PASSED" << endl;
133 return EXIT_SUCCESS;
134 }
136 bool RunConfiguration(cmdline_opts_t& opts)
137 {
138 // Read the TI DL configuration file
139 Configuration c;
140 string config_file = "../test/testvecs/config/infer/tidl_config_"
141 + opts.config + ".txt";
142 bool status = c.ReadFromFile(config_file);
143 if (!status)
144 {
145 cerr << "Error in configuration file: " << config_file << endl;
146 return false;
147 }
148 c.enableApiTrace = opts.verbose;
150 // setup camera/video input/output
151 VideoCapture cap;
152 if (! SetVideoInputOutput(cap, opts, "ImageNet")) return false;
154 try
155 {
156 // Create Executors with the approriate core type, number of cores
157 // and configuration specified
158 Executor* e_eve = CreateExecutor(DeviceType::EVE, opts.num_eves, c);
159 Executor* e_dsp = CreateExecutor(DeviceType::DSP, opts.num_dsps, c);
161 // Get ExecutionObjects from Executors
162 vector<ExecutionObject*> eos;
163 for (uint32_t i = 0; i < opts.num_eves; i++) eos.push_back((*e_eve)[i]);
164 for (uint32_t i = 0; i < opts.num_dsps; i++) eos.push_back((*e_dsp)[i]);
165 uint32_t num_eos = eos.size();
167 // Use duplicate EOPs to do double buffering on frame input/output
168 // because each EOP has its own set of input/output buffers,
169 // so that host ReadFrame() can be overlapped with device processing
170 // Use one EO as an example, with different buffer_factor,
171 // we have different execution behavior:
172 // If buffer_factor is set to 1 -> single buffering
173 // we create one EOP: eop0 (eo0)
174 // pipeline execution of multiple frames over time is as follows:
175 // --------------------- time ------------------->
176 // eop0: [RF][eo0.....][WF]
177 // eop0: [RF][eo0.....][WF]
178 // eop0: [RF][eo0.....][WF]
179 // If buffer_factor is set to 2 -> double buffering
180 // we create two EOPs: eop0 (eo0), eop1(eo0)
181 // pipeline execution of multiple frames over time is as follows:
182 // --------------------- time ------------------->
183 // eop0: [RF][eo0.....][WF]
184 // eop1: [RF] [eo0.....][WF]
185 // eop0: [RF] [eo0.....][WF]
186 // eop1: [RF] [eo0.....][WF]
187 vector<ExecutionObjectPipeline *> eops;
188 uint32_t buffer_factor = 2; // set to 1 for single buffering
189 for (uint32_t j = 0; j < buffer_factor; j++)
190 for (uint32_t i = 0; i < num_eos; i++)
191 eops.push_back(new ExecutionObjectPipeline({eos[i]}));
192 uint32_t num_eops = eops.size();
194 // Allocate input and output buffers for each EOP
195 AllocateMemory(eops);
197 chrono::time_point<chrono::steady_clock> tloop0, tloop1;
198 tloop0 = chrono::steady_clock::now();
200 // Process frames with available eops in a pipelined manner
201 // additional num_eos iterations to flush the pipeline (epilogue)
202 for (uint32_t frame_idx = 0;
203 frame_idx < opts.num_frames + num_eops; frame_idx++)
204 {
205 ExecutionObjectPipeline* eop = eops[frame_idx % num_eops];
207 // Wait for previous frame on the same eop to finish processing
208 if (eop->ProcessFrameWait())
209 {
210 WriteFrameOutput(*eop);
211 }
213 // Read a frame and start processing it with current eop
214 if (ReadFrame(*eop, frame_idx, c, opts, cap))
215 eop->ProcessFrameStartAsync();
216 }
218 tloop1 = chrono::steady_clock::now();
219 chrono::duration<float> elapsed = tloop1 - tloop0;
220 cout << "Loop total time (including read/write/opencv/print/etc): "
221 << setw(6) << setprecision(4)
222 << (elapsed.count() * 1000) << "ms" << endl;
224 FreeMemory(eops);
225 for (auto eop : eops) delete eop;
226 delete e_eve;
227 delete e_dsp;
228 }
229 catch (tidl::Exception &e)
230 {
231 cerr << e.what() << endl;
232 status = false;
233 }
235 return status;
236 }
238 // Create an Executor with the specified type and number of EOs
239 Executor* CreateExecutor(DeviceType dt, uint32_t num, const Configuration& c)
240 {
241 if (num == 0) return nullptr;
243 DeviceIds ids;
244 for (uint32_t i = 0; i < num; i++)
245 ids.insert(static_cast<DeviceId>(i));
247 return new Executor(dt, ids, c);
248 }
250 bool ReadFrame(ExecutionObjectPipeline &eop,
251 uint32_t frame_idx, const Configuration& c,
252 const cmdline_opts_t& opts, VideoCapture &cap)
253 {
254 if (frame_idx >= opts.num_frames)
255 return false;
257 eop.SetFrameIndex(frame_idx);
259 char* frame_buffer = eop.GetInputBufferPtr();
260 assert (frame_buffer != nullptr);
262 Mat image;
263 if (! opts.is_camera_input && ! opts.is_video_input)
264 {
265 if (opts.input_file.empty())
266 image = cv::imread(default_inputs[frame_idx % NUM_DEFAULT_INPUTS],
267 CV_LOAD_IMAGE_COLOR);
268 else
269 image = cv::imread(opts.input_file, CV_LOAD_IMAGE_COLOR);
270 if (image.empty())
271 {
272 cerr << "Unable to read input image" << endl;
273 return false;
274 }
275 }
276 else
277 {
278 Mat v_image;
279 if (! cap.grab()) return false;
280 if (! cap.retrieve(v_image)) return false;
281 int orig_width = v_image.cols;
282 int orig_height = v_image.rows;
283 // Crop camera/video input to center 256x256 input
284 if (orig_width > 256 && orig_height > 256)
285 {
286 image = Mat(v_image, Rect((orig_width-256)/2, (orig_height-256)/2,
287 256, 256));
288 }
289 else
290 image = v_image;
291 cv::imshow("ImageNet", image);
292 waitKey(2);
293 }
295 // TI DL image preprocessing, into frame_buffer
296 return imgutil::PreprocessImage(image, frame_buffer, c);
297 }
299 // Display top 5 classified imagenet classes with probabilities
300 bool WriteFrameOutput(const ExecutionObjectPipeline &eop)
301 {
302 const int k = 5;
303 unsigned char *out = (unsigned char *) eop.GetOutputBufferPtr();
304 int out_size = eop.GetOutputBufferSizeInBytes();
306 // sort and get k largest values and corresponding indices
307 typedef pair<unsigned char, int> val_index;
308 auto constexpr cmp = [](val_index &left, val_index &right)
309 { return left.first > right.first; };
310 priority_queue<val_index, vector<val_index>, decltype(cmp)> queue(cmp);
311 // initialize priority queue with smallest value on top
312 for (int i = 0; i < k; i++)
313 queue.push(val_index(out[i], i));
315 // for rest output, if larger than current min, pop min, push new val
316 for (int i = k; i < out_size; i++)
317 {
318 if (out[i] > queue.top().first)
319 {
320 queue.pop();
321 queue.push(val_index(out[i], i));
322 }
323 }
325 // output top k values in reverse order: largest val first
326 vector<val_index> sorted;
327 while (! queue.empty())
328 {
329 sorted.push_back(queue.top());
330 queue.pop();
331 }
333 for (int i = k - 1; i >= 0; i--)
334 cout << k-i << ": "
335 << object_classes->At(sorted[i].second).label << endl;
337 return true;
338 }
340 void DisplayHelp()
341 {
342 cout <<
343 "Usage: imagenet\n"
344 " Will run imagenet network to predict top 5 object"
345 " classes for the input.\n Use -c to run a"
346 " different imagenet network. Default is j11_v2.\n"
347 "Optional arguments:\n"
348 " -c <config> Valid configs: j11_bn, j11_prelu, j11_v2\n"
349 " -d <number> Number of dsp cores to use\n"
350 " -e <number> Number of eve cores to use\n"
351 " -i <image> Path to the image file as input\n"
352 " -i camera<number> Use camera as input\n"
353 " video input port: /dev/video<number>\n"
354 " -i <name>.{mp4,mov,avi} Use video file as input\n"
355 " -l <objects_list> Path to the object classes list file\n"
356 " -f <number> Number of frames to process\n"
357 " -v Verbose output during execution\n"
358 " -h Help\n";
359 }