yolo_v2_class.hpp 39 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053
  1. #ifndef YOLO_V2_CLASS_HPP
  2. #define YOLO_V2_CLASS_HPP
  3. #ifndef LIB_API
  4. #ifdef LIB_EXPORTS
  5. #if defined(_MSC_VER)
  6. #define LIB_API __declspec(dllexport)
  7. #else
  8. #define LIB_API __attribute__((visibility("default")))
  9. #endif
  10. #else
  11. #if defined(_MSC_VER)
  12. #define LIB_API
  13. #else
  14. #define LIB_API
  15. #endif
  16. #endif
  17. #endif
  18. #define C_SHARP_MAX_OBJECTS 1000
  19. struct bbox_t {
  20. unsigned int x, y, w, h; // (x,y) - top-left corner, (w, h) - width & height of bounded box
  21. float prob; // confidence - probability that the object was found correctly
  22. unsigned int obj_id; // class of object - from range [0, classes-1]
  23. unsigned int track_id; // tracking id for video (0 - untracked, 1 - inf - tracked object)
  24. unsigned int frames_counter; // counter of frames on which the object was detected
  25. float x_3d, y_3d, z_3d; // center of object (in Meters) if ZED 3D Camera is used
  26. };
  27. struct image_t {
  28. int h; // height
  29. int w; // width
  30. int c; // number of chanels (3 - for RGB)
  31. float *data; // pointer to the image data
  32. };
  33. struct bbox_t_container {
  34. bbox_t candidates[C_SHARP_MAX_OBJECTS];
  35. };
  36. #ifdef __cplusplus
  37. #include <memory>
  38. #include <vector>
  39. #include <deque>
  40. #include <algorithm>
  41. #include <chrono>
  42. #include <string>
  43. #include <sstream>
  44. #include <iostream>
  45. #include <cmath>
  46. #ifdef OPENCV
  47. #include <opencv2/opencv.hpp> // C++
  48. #include <opencv2/highgui/highgui_c.h> // C
  49. #include <opencv2/imgproc/imgproc_c.h> // C
  50. #endif
  51. extern "C" LIB_API int init(const char *configurationFilename, const char *weightsFilename, int gpu);
  52. extern "C" LIB_API int detect_image(const char *filename, bbox_t_container &container);
  53. extern "C" LIB_API int detect_mat(const uint8_t* data, const size_t data_length, bbox_t_container &container);
  54. extern "C" LIB_API int dispose();
  55. extern "C" LIB_API int get_device_count();
  56. extern "C" LIB_API int get_device_name(int gpu, char* deviceName);
  57. extern "C" LIB_API bool built_with_cuda();
  58. extern "C" LIB_API bool built_with_cudnn();
  59. extern "C" LIB_API bool built_with_opencv();
  60. extern "C" LIB_API void send_json_custom(char const* send_buf, int port, int timeout);
  61. class Detector {
  62. std::shared_ptr<void> detector_gpu_ptr;
  63. std::deque<std::vector<bbox_t>> prev_bbox_vec_deque;
  64. std::string _cfg_filename, _weight_filename;
  65. public:
  66. const int cur_gpu_id;
  67. float nms = .4;
  68. bool wait_stream;
  69. LIB_API Detector(std::string cfg_filename, std::string weight_filename, int gpu_id = 0);
  70. LIB_API ~Detector();
  71. LIB_API std::vector<bbox_t> detect(std::string image_filename, float thresh = 0.2, bool use_mean = false);
  72. LIB_API std::vector<bbox_t> detect(image_t img, float thresh = 0.2, bool use_mean = false);
  73. static LIB_API image_t load_image(std::string image_filename);
  74. static LIB_API void free_image(image_t m);
  75. LIB_API int get_net_width() const;
  76. LIB_API int get_net_height() const;
  77. LIB_API int get_net_color_depth() const;
  78. LIB_API std::vector<bbox_t> tracking_id(std::vector<bbox_t> cur_bbox_vec, bool const change_history = true,
  79. int const frames_story = 5, int const max_dist = 40);
  80. LIB_API void *get_cuda_context();
  81. //LIB_API bool send_json_http(std::vector<bbox_t> cur_bbox_vec, std::vector<std::string> obj_names, int frame_id,
  82. // std::string filename = std::string(), int timeout = 400000, int port = 8070);
  83. std::vector<bbox_t> detect_resized(image_t img, int init_w, int init_h, float thresh = 0.2, bool use_mean = false)
  84. {
  85. if (img.data == NULL)
  86. throw std::runtime_error("Image is empty");
  87. auto detection_boxes = detect(img, thresh, use_mean);
  88. float wk = (float)init_w / img.w, hk = (float)init_h / img.h;
  89. for (auto &i : detection_boxes) i.x *= wk, i.w *= wk, i.y *= hk, i.h *= hk;
  90. return detection_boxes;
  91. }
  92. #ifdef OPENCV
  93. std::vector<bbox_t> detect(cv::Mat mat, float thresh = 0.2, bool use_mean = false)
  94. {
  95. if(mat.data == NULL)
  96. throw std::runtime_error("Image is empty");
  97. auto image_ptr = mat_to_image_resize(mat);
  98. return detect_resized(*image_ptr, mat.cols, mat.rows, thresh, use_mean);
  99. }
  100. std::shared_ptr<image_t> mat_to_image_resize(cv::Mat mat) const
  101. {
  102. if (mat.data == NULL) return std::shared_ptr<image_t>(NULL);
  103. cv::Size network_size = cv::Size(get_net_width(), get_net_height());
  104. cv::Mat det_mat;
  105. if (mat.size() != network_size)
  106. cv::resize(mat, det_mat, network_size);
  107. else
  108. det_mat = mat; // only reference is copied
  109. return mat_to_image(det_mat);
  110. }
  111. static std::shared_ptr<image_t> mat_to_image(cv::Mat img_src)
  112. {
  113. cv::Mat img;
  114. if (img_src.channels() == 4) cv::cvtColor(img_src, img, cv::COLOR_RGBA2BGR);
  115. else if (img_src.channels() == 3) cv::cvtColor(img_src, img, cv::COLOR_RGB2BGR);
  116. else if (img_src.channels() == 1) cv::cvtColor(img_src, img, cv::COLOR_GRAY2BGR);
  117. else std::cerr << " Warning: img_src.channels() is not 1, 3 or 4. It is = " << img_src.channels() << std::endl;
  118. std::shared_ptr<image_t> image_ptr(new image_t, [](image_t *img) { free_image(*img); delete img; });
  119. *image_ptr = mat_to_image_custom(img);
  120. return image_ptr;
  121. }
  122. private:
  123. static image_t mat_to_image_custom(cv::Mat mat)
  124. {
  125. int w = mat.cols;
  126. int h = mat.rows;
  127. int c = mat.channels();
  128. image_t im = make_image_custom(w, h, c);
  129. unsigned char *data = (unsigned char *)mat.data;
  130. int step = mat.step;
  131. for (int y = 0; y < h; ++y) {
  132. for (int k = 0; k < c; ++k) {
  133. for (int x = 0; x < w; ++x) {
  134. im.data[k*w*h + y*w + x] = data[y*step + x*c + k] / 255.0f;
  135. }
  136. }
  137. }
  138. return im;
  139. }
  140. static image_t make_empty_image(int w, int h, int c)
  141. {
  142. image_t out;
  143. out.data = 0;
  144. out.h = h;
  145. out.w = w;
  146. out.c = c;
  147. return out;
  148. }
  149. static image_t make_image_custom(int w, int h, int c)
  150. {
  151. image_t out = make_empty_image(w, h, c);
  152. out.data = (float *)calloc(h*w*c, sizeof(float));
  153. return out;
  154. }
  155. #endif // OPENCV
  156. public:
  157. bool send_json_http(std::vector<bbox_t> cur_bbox_vec, std::vector<std::string> obj_names, int frame_id,
  158. std::string filename = std::string(), int timeout = 400000, int port = 8070)
  159. {
  160. std::string send_str;
  161. char *tmp_buf = (char *)calloc(1024, sizeof(char));
  162. if (!filename.empty()) {
  163. sprintf(tmp_buf, "{\n \"frame_id\":%d, \n \"filename\":\"%s\", \n \"objects\": [ \n", frame_id, filename.c_str());
  164. }
  165. else {
  166. sprintf(tmp_buf, "{\n \"frame_id\":%d, \n \"objects\": [ \n", frame_id);
  167. }
  168. send_str = tmp_buf;
  169. free(tmp_buf);
  170. for (auto & i : cur_bbox_vec) {
  171. char *buf = (char *)calloc(2048, sizeof(char));
  172. sprintf(buf, " {\"class_id\":%d, \"name\":\"%s\", \"absolute_coordinates\":{\"center_x\":%d, \"center_y\":%d, \"width\":%d, \"height\":%d}, \"confidence\":%f",
  173. i.obj_id, obj_names[i.obj_id].c_str(), i.x, i.y, i.w, i.h, i.prob);
  174. //sprintf(buf, " {\"class_id\":%d, \"name\":\"%s\", \"relative_coordinates\":{\"center_x\":%f, \"center_y\":%f, \"width\":%f, \"height\":%f}, \"confidence\":%f",
  175. // i.obj_id, obj_names[i.obj_id], i.x, i.y, i.w, i.h, i.prob);
  176. send_str += buf;
  177. if (!std::isnan(i.z_3d)) {
  178. sprintf(buf, "\n , \"coordinates_in_meters\":{\"x_3d\":%.2f, \"y_3d\":%.2f, \"z_3d\":%.2f}",
  179. i.x_3d, i.y_3d, i.z_3d);
  180. send_str += buf;
  181. }
  182. send_str += "}\n";
  183. free(buf);
  184. }
  185. //send_str += "\n ] \n}, \n";
  186. send_str += "\n ] \n}";
  187. send_json_custom(send_str.c_str(), port, timeout);
  188. return true;
  189. }
  190. };
  191. // --------------------------------------------------------------------------------
  192. #if defined(TRACK_OPTFLOW) && defined(OPENCV) && defined(GPU)
  193. #include <opencv2/cudaoptflow.hpp>
  194. #include <opencv2/cudaimgproc.hpp>
  195. #include <opencv2/cudaarithm.hpp>
  196. #include <opencv2/core/cuda.hpp>
  197. class Tracker_optflow {
  198. public:
  199. const int gpu_count;
  200. const int gpu_id;
  201. const int flow_error;
  202. Tracker_optflow(int _gpu_id = 0, int win_size = 15, int max_level = 3, int iterations = 8000, int _flow_error = -1) :
  203. gpu_count(cv::cuda::getCudaEnabledDeviceCount()), gpu_id(std::min(_gpu_id, gpu_count-1)),
  204. flow_error((_flow_error > 0)? _flow_error:(win_size*4))
  205. {
  206. int const old_gpu_id = cv::cuda::getDevice();
  207. cv::cuda::setDevice(gpu_id);
  208. stream = cv::cuda::Stream();
  209. sync_PyrLKOpticalFlow_gpu = cv::cuda::SparsePyrLKOpticalFlow::create();
  210. sync_PyrLKOpticalFlow_gpu->setWinSize(cv::Size(win_size, win_size)); // 9, 15, 21, 31
  211. sync_PyrLKOpticalFlow_gpu->setMaxLevel(max_level); // +- 3 pt
  212. sync_PyrLKOpticalFlow_gpu->setNumIters(iterations); // 2000, def: 30
  213. cv::cuda::setDevice(old_gpu_id);
  214. }
  215. // just to avoid extra allocations
  216. cv::cuda::GpuMat src_mat_gpu;
  217. cv::cuda::GpuMat dst_mat_gpu, dst_grey_gpu;
  218. cv::cuda::GpuMat prev_pts_flow_gpu, cur_pts_flow_gpu;
  219. cv::cuda::GpuMat status_gpu, err_gpu;
  220. cv::cuda::GpuMat src_grey_gpu; // used in both functions
  221. cv::Ptr<cv::cuda::SparsePyrLKOpticalFlow> sync_PyrLKOpticalFlow_gpu;
  222. cv::cuda::Stream stream;
  223. std::vector<bbox_t> cur_bbox_vec;
  224. std::vector<bool> good_bbox_vec_flags;
  225. cv::Mat prev_pts_flow_cpu;
  226. void update_cur_bbox_vec(std::vector<bbox_t> _cur_bbox_vec)
  227. {
  228. cur_bbox_vec = _cur_bbox_vec;
  229. good_bbox_vec_flags = std::vector<bool>(cur_bbox_vec.size(), true);
  230. cv::Mat prev_pts, cur_pts_flow_cpu;
  231. for (auto &i : cur_bbox_vec) {
  232. float x_center = (i.x + i.w / 2.0F);
  233. float y_center = (i.y + i.h / 2.0F);
  234. prev_pts.push_back(cv::Point2f(x_center, y_center));
  235. }
  236. if (prev_pts.rows == 0)
  237. prev_pts_flow_cpu = cv::Mat();
  238. else
  239. cv::transpose(prev_pts, prev_pts_flow_cpu);
  240. if (prev_pts_flow_gpu.cols < prev_pts_flow_cpu.cols) {
  241. prev_pts_flow_gpu = cv::cuda::GpuMat(prev_pts_flow_cpu.size(), prev_pts_flow_cpu.type());
  242. cur_pts_flow_gpu = cv::cuda::GpuMat(prev_pts_flow_cpu.size(), prev_pts_flow_cpu.type());
  243. status_gpu = cv::cuda::GpuMat(prev_pts_flow_cpu.size(), CV_8UC1);
  244. err_gpu = cv::cuda::GpuMat(prev_pts_flow_cpu.size(), CV_32FC1);
  245. }
  246. prev_pts_flow_gpu.upload(cv::Mat(prev_pts_flow_cpu), stream);
  247. }
  248. void update_tracking_flow(cv::Mat src_mat, std::vector<bbox_t> _cur_bbox_vec)
  249. {
  250. int const old_gpu_id = cv::cuda::getDevice();
  251. if (old_gpu_id != gpu_id)
  252. cv::cuda::setDevice(gpu_id);
  253. if (src_mat.channels() == 1 || src_mat.channels() == 3 || src_mat.channels() == 4) {
  254. if (src_mat_gpu.cols == 0) {
  255. src_mat_gpu = cv::cuda::GpuMat(src_mat.size(), src_mat.type());
  256. src_grey_gpu = cv::cuda::GpuMat(src_mat.size(), CV_8UC1);
  257. }
  258. if (src_mat.channels() == 1) {
  259. src_mat_gpu.upload(src_mat, stream);
  260. src_mat_gpu.copyTo(src_grey_gpu);
  261. }
  262. else if (src_mat.channels() == 3) {
  263. src_mat_gpu.upload(src_mat, stream);
  264. cv::cuda::cvtColor(src_mat_gpu, src_grey_gpu, CV_BGR2GRAY, 1, stream);
  265. }
  266. else if (src_mat.channels() == 4) {
  267. src_mat_gpu.upload(src_mat, stream);
  268. cv::cuda::cvtColor(src_mat_gpu, src_grey_gpu, CV_BGRA2GRAY, 1, stream);
  269. }
  270. else {
  271. std::cerr << " Warning: src_mat.channels() is not: 1, 3 or 4. It is = " << src_mat.channels() << " \n";
  272. return;
  273. }
  274. }
  275. update_cur_bbox_vec(_cur_bbox_vec);
  276. if (old_gpu_id != gpu_id)
  277. cv::cuda::setDevice(old_gpu_id);
  278. }
  279. std::vector<bbox_t> tracking_flow(cv::Mat dst_mat, bool check_error = true)
  280. {
  281. if (sync_PyrLKOpticalFlow_gpu.empty()) {
  282. std::cout << "sync_PyrLKOpticalFlow_gpu isn't initialized \n";
  283. return cur_bbox_vec;
  284. }
  285. int const old_gpu_id = cv::cuda::getDevice();
  286. if(old_gpu_id != gpu_id)
  287. cv::cuda::setDevice(gpu_id);
  288. if (dst_mat_gpu.cols == 0) {
  289. dst_mat_gpu = cv::cuda::GpuMat(dst_mat.size(), dst_mat.type());
  290. dst_grey_gpu = cv::cuda::GpuMat(dst_mat.size(), CV_8UC1);
  291. }
  292. //dst_grey_gpu.upload(dst_mat, stream); // use BGR
  293. dst_mat_gpu.upload(dst_mat, stream);
  294. cv::cuda::cvtColor(dst_mat_gpu, dst_grey_gpu, CV_BGR2GRAY, 1, stream);
  295. if (src_grey_gpu.rows != dst_grey_gpu.rows || src_grey_gpu.cols != dst_grey_gpu.cols) {
  296. stream.waitForCompletion();
  297. src_grey_gpu = dst_grey_gpu.clone();
  298. cv::cuda::setDevice(old_gpu_id);
  299. return cur_bbox_vec;
  300. }
  301. ////sync_PyrLKOpticalFlow_gpu.sparse(src_grey_gpu, dst_grey_gpu, prev_pts_flow_gpu, cur_pts_flow_gpu, status_gpu, &err_gpu); // OpenCV 2.4.x
  302. sync_PyrLKOpticalFlow_gpu->calc(src_grey_gpu, dst_grey_gpu, prev_pts_flow_gpu, cur_pts_flow_gpu, status_gpu, err_gpu, stream); // OpenCV 3.x
  303. cv::Mat cur_pts_flow_cpu;
  304. cur_pts_flow_gpu.download(cur_pts_flow_cpu, stream);
  305. dst_grey_gpu.copyTo(src_grey_gpu, stream);
  306. cv::Mat err_cpu, status_cpu;
  307. err_gpu.download(err_cpu, stream);
  308. status_gpu.download(status_cpu, stream);
  309. stream.waitForCompletion();
  310. std::vector<bbox_t> result_bbox_vec;
  311. if (err_cpu.cols == cur_bbox_vec.size() && status_cpu.cols == cur_bbox_vec.size())
  312. {
  313. for (size_t i = 0; i < cur_bbox_vec.size(); ++i)
  314. {
  315. cv::Point2f cur_key_pt = cur_pts_flow_cpu.at<cv::Point2f>(0, i);
  316. cv::Point2f prev_key_pt = prev_pts_flow_cpu.at<cv::Point2f>(0, i);
  317. float moved_x = cur_key_pt.x - prev_key_pt.x;
  318. float moved_y = cur_key_pt.y - prev_key_pt.y;
  319. if (abs(moved_x) < 100 && abs(moved_y) < 100 && good_bbox_vec_flags[i])
  320. if (err_cpu.at<float>(0, i) < flow_error && status_cpu.at<unsigned char>(0, i) != 0 &&
  321. ((float)cur_bbox_vec[i].x + moved_x) > 0 && ((float)cur_bbox_vec[i].y + moved_y) > 0)
  322. {
  323. cur_bbox_vec[i].x += moved_x + 0.5;
  324. cur_bbox_vec[i].y += moved_y + 0.5;
  325. result_bbox_vec.push_back(cur_bbox_vec[i]);
  326. }
  327. else good_bbox_vec_flags[i] = false;
  328. else good_bbox_vec_flags[i] = false;
  329. //if(!check_error && !good_bbox_vec_flags[i]) result_bbox_vec.push_back(cur_bbox_vec[i]);
  330. }
  331. }
  332. cur_pts_flow_gpu.swap(prev_pts_flow_gpu);
  333. cur_pts_flow_cpu.copyTo(prev_pts_flow_cpu);
  334. if (old_gpu_id != gpu_id)
  335. cv::cuda::setDevice(old_gpu_id);
  336. return result_bbox_vec;
  337. }
  338. };
  339. #elif defined(TRACK_OPTFLOW) && defined(OPENCV)
  340. //#include <opencv2/optflow.hpp>
  341. #include <opencv2/video/tracking.hpp>
  342. class Tracker_optflow {
  343. public:
  344. const int flow_error;
  345. Tracker_optflow(int win_size = 15, int max_level = 3, int iterations = 8000, int _flow_error = -1) :
  346. flow_error((_flow_error > 0)? _flow_error:(win_size*4))
  347. {
  348. sync_PyrLKOpticalFlow = cv::SparsePyrLKOpticalFlow::create();
  349. sync_PyrLKOpticalFlow->setWinSize(cv::Size(win_size, win_size)); // 9, 15, 21, 31
  350. sync_PyrLKOpticalFlow->setMaxLevel(max_level); // +- 3 pt
  351. }
  352. // just to avoid extra allocations
  353. cv::Mat dst_grey;
  354. cv::Mat prev_pts_flow, cur_pts_flow;
  355. cv::Mat status, err;
  356. cv::Mat src_grey; // used in both functions
  357. cv::Ptr<cv::SparsePyrLKOpticalFlow> sync_PyrLKOpticalFlow;
  358. std::vector<bbox_t> cur_bbox_vec;
  359. std::vector<bool> good_bbox_vec_flags;
  360. void update_cur_bbox_vec(std::vector<bbox_t> _cur_bbox_vec)
  361. {
  362. cur_bbox_vec = _cur_bbox_vec;
  363. good_bbox_vec_flags = std::vector<bool>(cur_bbox_vec.size(), true);
  364. cv::Mat prev_pts, cur_pts_flow;
  365. for (auto &i : cur_bbox_vec) {
  366. float x_center = (i.x + i.w / 2.0F);
  367. float y_center = (i.y + i.h / 2.0F);
  368. prev_pts.push_back(cv::Point2f(x_center, y_center));
  369. }
  370. if (prev_pts.rows == 0)
  371. prev_pts_flow = cv::Mat();
  372. else
  373. cv::transpose(prev_pts, prev_pts_flow);
  374. }
  375. void update_tracking_flow(cv::Mat new_src_mat, std::vector<bbox_t> _cur_bbox_vec)
  376. {
  377. if (new_src_mat.channels() == 1) {
  378. src_grey = new_src_mat.clone();
  379. }
  380. else if (new_src_mat.channels() == 3) {
  381. cv::cvtColor(new_src_mat, src_grey, CV_BGR2GRAY, 1);
  382. }
  383. else if (new_src_mat.channels() == 4) {
  384. cv::cvtColor(new_src_mat, src_grey, CV_BGRA2GRAY, 1);
  385. }
  386. else {
  387. std::cerr << " Warning: new_src_mat.channels() is not: 1, 3 or 4. It is = " << new_src_mat.channels() << " \n";
  388. return;
  389. }
  390. update_cur_bbox_vec(_cur_bbox_vec);
  391. }
  392. std::vector<bbox_t> tracking_flow(cv::Mat new_dst_mat, bool check_error = true)
  393. {
  394. if (sync_PyrLKOpticalFlow.empty()) {
  395. std::cout << "sync_PyrLKOpticalFlow isn't initialized \n";
  396. return cur_bbox_vec;
  397. }
  398. cv::cvtColor(new_dst_mat, dst_grey, CV_BGR2GRAY, 1);
  399. if (src_grey.rows != dst_grey.rows || src_grey.cols != dst_grey.cols) {
  400. src_grey = dst_grey.clone();
  401. //std::cerr << " Warning: src_grey.rows != dst_grey.rows || src_grey.cols != dst_grey.cols \n";
  402. return cur_bbox_vec;
  403. }
  404. if (prev_pts_flow.cols < 1) {
  405. return cur_bbox_vec;
  406. }
  407. ////sync_PyrLKOpticalFlow_gpu.sparse(src_grey_gpu, dst_grey_gpu, prev_pts_flow_gpu, cur_pts_flow_gpu, status_gpu, &err_gpu); // OpenCV 2.4.x
  408. sync_PyrLKOpticalFlow->calc(src_grey, dst_grey, prev_pts_flow, cur_pts_flow, status, err); // OpenCV 3.x
  409. dst_grey.copyTo(src_grey);
  410. std::vector<bbox_t> result_bbox_vec;
  411. if (err.rows == cur_bbox_vec.size() && status.rows == cur_bbox_vec.size())
  412. {
  413. for (size_t i = 0; i < cur_bbox_vec.size(); ++i)
  414. {
  415. cv::Point2f cur_key_pt = cur_pts_flow.at<cv::Point2f>(0, i);
  416. cv::Point2f prev_key_pt = prev_pts_flow.at<cv::Point2f>(0, i);
  417. float moved_x = cur_key_pt.x - prev_key_pt.x;
  418. float moved_y = cur_key_pt.y - prev_key_pt.y;
  419. if (abs(moved_x) < 100 && abs(moved_y) < 100 && good_bbox_vec_flags[i])
  420. if (err.at<float>(0, i) < flow_error && status.at<unsigned char>(0, i) != 0 &&
  421. ((float)cur_bbox_vec[i].x + moved_x) > 0 && ((float)cur_bbox_vec[i].y + moved_y) > 0)
  422. {
  423. cur_bbox_vec[i].x += moved_x + 0.5;
  424. cur_bbox_vec[i].y += moved_y + 0.5;
  425. result_bbox_vec.push_back(cur_bbox_vec[i]);
  426. }
  427. else good_bbox_vec_flags[i] = false;
  428. else good_bbox_vec_flags[i] = false;
  429. //if(!check_error && !good_bbox_vec_flags[i]) result_bbox_vec.push_back(cur_bbox_vec[i]);
  430. }
  431. }
  432. prev_pts_flow = cur_pts_flow.clone();
  433. return result_bbox_vec;
  434. }
  435. };
  436. #else
  437. class Tracker_optflow {};
  438. #endif // defined(TRACK_OPTFLOW) && defined(OPENCV)
  439. #ifdef OPENCV
  440. static cv::Scalar obj_id_to_color(int obj_id) {
  441. int const colors[6][3] = { { 1,0,1 },{ 0,0,1 },{ 0,1,1 },{ 0,1,0 },{ 1,1,0 },{ 1,0,0 } };
  442. int const offset = obj_id * 123457 % 6;
  443. int const color_scale = 150 + (obj_id * 123457) % 100;
  444. cv::Scalar color(colors[offset][0], colors[offset][1], colors[offset][2]);
  445. color *= color_scale;
  446. return color;
  447. }
  448. class preview_boxes_t {
  449. enum { frames_history = 30 }; // how long to keep the history saved
  450. struct preview_box_track_t {
  451. unsigned int track_id, obj_id, last_showed_frames_ago;
  452. bool current_detection;
  453. bbox_t bbox;
  454. cv::Mat mat_obj, mat_resized_obj;
  455. preview_box_track_t() : track_id(0), obj_id(0), last_showed_frames_ago(frames_history), current_detection(false) {}
  456. };
  457. std::vector<preview_box_track_t> preview_box_track_id;
  458. size_t const preview_box_size, bottom_offset;
  459. bool const one_off_detections;
  460. public:
  461. preview_boxes_t(size_t _preview_box_size = 100, size_t _bottom_offset = 100, bool _one_off_detections = false) :
  462. preview_box_size(_preview_box_size), bottom_offset(_bottom_offset), one_off_detections(_one_off_detections)
  463. {}
  464. void set(cv::Mat src_mat, std::vector<bbox_t> result_vec)
  465. {
  466. size_t const count_preview_boxes = src_mat.cols / preview_box_size;
  467. if (preview_box_track_id.size() != count_preview_boxes) preview_box_track_id.resize(count_preview_boxes);
  468. // increment frames history
  469. for (auto &i : preview_box_track_id)
  470. i.last_showed_frames_ago = std::min((unsigned)frames_history, i.last_showed_frames_ago + 1);
  471. // occupy empty boxes
  472. for (auto &k : result_vec) {
  473. bool found = false;
  474. // find the same (track_id)
  475. for (auto &i : preview_box_track_id) {
  476. if (i.track_id == k.track_id) {
  477. if (!one_off_detections) i.last_showed_frames_ago = 0; // for tracked objects
  478. found = true;
  479. break;
  480. }
  481. }
  482. if (!found) {
  483. // find empty box
  484. for (auto &i : preview_box_track_id) {
  485. if (i.last_showed_frames_ago == frames_history) {
  486. if (!one_off_detections && k.frames_counter == 0) break; // don't show if obj isn't tracked yet
  487. i.track_id = k.track_id;
  488. i.obj_id = k.obj_id;
  489. i.bbox = k;
  490. i.last_showed_frames_ago = 0;
  491. break;
  492. }
  493. }
  494. }
  495. }
  496. // draw preview box (from old or current frame)
  497. for (size_t i = 0; i < preview_box_track_id.size(); ++i)
  498. {
  499. // get object image
  500. cv::Mat dst = preview_box_track_id[i].mat_resized_obj;
  501. preview_box_track_id[i].current_detection = false;
  502. for (auto &k : result_vec) {
  503. if (preview_box_track_id[i].track_id == k.track_id) {
  504. if (one_off_detections && preview_box_track_id[i].last_showed_frames_ago > 0) {
  505. preview_box_track_id[i].last_showed_frames_ago = frames_history; break;
  506. }
  507. bbox_t b = k;
  508. cv::Rect r(b.x, b.y, b.w, b.h);
  509. cv::Rect img_rect(cv::Point2i(0, 0), src_mat.size());
  510. cv::Rect rect_roi = r & img_rect;
  511. if (rect_roi.width > 1 || rect_roi.height > 1) {
  512. cv::Mat roi = src_mat(rect_roi);
  513. cv::resize(roi, dst, cv::Size(preview_box_size, preview_box_size), cv::INTER_NEAREST);
  514. preview_box_track_id[i].mat_obj = roi.clone();
  515. preview_box_track_id[i].mat_resized_obj = dst.clone();
  516. preview_box_track_id[i].current_detection = true;
  517. preview_box_track_id[i].bbox = k;
  518. }
  519. break;
  520. }
  521. }
  522. }
  523. }
  524. void draw(cv::Mat draw_mat, bool show_small_boxes = false)
  525. {
  526. // draw preview box (from old or current frame)
  527. for (size_t i = 0; i < preview_box_track_id.size(); ++i)
  528. {
  529. auto &prev_box = preview_box_track_id[i];
  530. // draw object image
  531. cv::Mat dst = prev_box.mat_resized_obj;
  532. if (prev_box.last_showed_frames_ago < frames_history &&
  533. dst.size() == cv::Size(preview_box_size, preview_box_size))
  534. {
  535. cv::Rect dst_rect_roi(cv::Point2i(i * preview_box_size, draw_mat.rows - bottom_offset), dst.size());
  536. cv::Mat dst_roi = draw_mat(dst_rect_roi);
  537. dst.copyTo(dst_roi);
  538. cv::Scalar color = obj_id_to_color(prev_box.obj_id);
  539. int thickness = (prev_box.current_detection) ? 5 : 1;
  540. cv::rectangle(draw_mat, dst_rect_roi, color, thickness);
  541. unsigned int const track_id = prev_box.track_id;
  542. std::string track_id_str = (track_id > 0) ? std::to_string(track_id) : "";
  543. putText(draw_mat, track_id_str, dst_rect_roi.tl() - cv::Point2i(-4, 5), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.9, cv::Scalar(0, 0, 0), 2);
  544. std::string size_str = std::to_string(prev_box.bbox.w) + "x" + std::to_string(prev_box.bbox.h);
  545. putText(draw_mat, size_str, dst_rect_roi.tl() + cv::Point2i(0, 12), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0, 0, 0), 1);
  546. if (!one_off_detections && prev_box.current_detection) {
  547. cv::line(draw_mat, dst_rect_roi.tl() + cv::Point2i(preview_box_size, 0),
  548. cv::Point2i(prev_box.bbox.x, prev_box.bbox.y + prev_box.bbox.h),
  549. color);
  550. }
  551. if (one_off_detections && show_small_boxes) {
  552. cv::Rect src_rect_roi(cv::Point2i(prev_box.bbox.x, prev_box.bbox.y),
  553. cv::Size(prev_box.bbox.w, prev_box.bbox.h));
  554. unsigned int const color_history = (255 * prev_box.last_showed_frames_ago) / frames_history;
  555. color = cv::Scalar(255 - 3 * color_history, 255 - 2 * color_history, 255 - 1 * color_history);
  556. if (prev_box.mat_obj.size() == src_rect_roi.size()) {
  557. prev_box.mat_obj.copyTo(draw_mat(src_rect_roi));
  558. }
  559. cv::rectangle(draw_mat, src_rect_roi, color, thickness);
  560. putText(draw_mat, track_id_str, src_rect_roi.tl() - cv::Point2i(0, 10), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0, 0, 0), 1);
  561. }
  562. }
  563. }
  564. }
  565. };
  566. class track_kalman_t
  567. {
  568. int track_id_counter;
  569. std::chrono::steady_clock::time_point global_last_time;
  570. float dT;
  571. public:
  572. int max_objects; // max objects for tracking
  573. int min_frames; // min frames to consider an object as detected
  574. const float max_dist; // max distance (in px) to track with the same ID
  575. cv::Size img_size; // max value of x,y,w,h
  576. struct tst_t {
  577. int track_id;
  578. int state_id;
  579. std::chrono::steady_clock::time_point last_time;
  580. int detection_count;
  581. tst_t() : track_id(-1), state_id(-1) {}
  582. };
  583. std::vector<tst_t> track_id_state_id_time;
  584. std::vector<bbox_t> result_vec_pred;
  585. struct one_kalman_t;
  586. std::vector<one_kalman_t> kalman_vec;
  587. struct one_kalman_t
  588. {
  589. cv::KalmanFilter kf;
  590. cv::Mat state;
  591. cv::Mat meas;
  592. int measSize, stateSize, contrSize;
  593. void set_delta_time(float dT) {
  594. kf.transitionMatrix.at<float>(2) = dT;
  595. kf.transitionMatrix.at<float>(9) = dT;
  596. }
  597. void set(bbox_t box)
  598. {
  599. initialize_kalman();
  600. kf.errorCovPre.at<float>(0) = 1; // px
  601. kf.errorCovPre.at<float>(7) = 1; // px
  602. kf.errorCovPre.at<float>(14) = 1;
  603. kf.errorCovPre.at<float>(21) = 1;
  604. kf.errorCovPre.at<float>(28) = 1; // px
  605. kf.errorCovPre.at<float>(35) = 1; // px
  606. state.at<float>(0) = box.x;
  607. state.at<float>(1) = box.y;
  608. state.at<float>(2) = 0;
  609. state.at<float>(3) = 0;
  610. state.at<float>(4) = box.w;
  611. state.at<float>(5) = box.h;
  612. // <<<< Initialization
  613. kf.statePost = state;
  614. }
  615. // Kalman.correct() calculates: statePost = statePre + gain * (z(k)-measurementMatrix*statePre);
  616. // corrected state (x(k)): x(k)=x'(k)+K(k)*(z(k)-H*x'(k))
  617. void correct(bbox_t box) {
  618. meas.at<float>(0) = box.x;
  619. meas.at<float>(1) = box.y;
  620. meas.at<float>(2) = box.w;
  621. meas.at<float>(3) = box.h;
  622. kf.correct(meas);
  623. bbox_t new_box = predict();
  624. if (new_box.w == 0 || new_box.h == 0) {
  625. set(box);
  626. //std::cerr << " force set(): track_id = " << box.track_id <<
  627. // ", x = " << box.x << ", y = " << box.y << ", w = " << box.w << ", h = " << box.h << std::endl;
  628. }
  629. }
  630. // Kalman.predict() calculates: statePre = TransitionMatrix * statePost;
  631. // predicted state (x'(k)): x(k)=A*x(k-1)+B*u(k)
  632. bbox_t predict() {
  633. bbox_t box;
  634. state = kf.predict();
  635. box.x = state.at<float>(0);
  636. box.y = state.at<float>(1);
  637. box.w = state.at<float>(4);
  638. box.h = state.at<float>(5);
  639. return box;
  640. }
  641. void initialize_kalman()
  642. {
  643. kf = cv::KalmanFilter(stateSize, measSize, contrSize, CV_32F);
  644. // Transition State Matrix A
  645. // Note: set dT at each processing step!
  646. // [ 1 0 dT 0 0 0 ]
  647. // [ 0 1 0 dT 0 0 ]
  648. // [ 0 0 1 0 0 0 ]
  649. // [ 0 0 0 1 0 0 ]
  650. // [ 0 0 0 0 1 0 ]
  651. // [ 0 0 0 0 0 1 ]
  652. cv::setIdentity(kf.transitionMatrix);
  653. // Measure Matrix H
  654. // [ 1 0 0 0 0 0 ]
  655. // [ 0 1 0 0 0 0 ]
  656. // [ 0 0 0 0 1 0 ]
  657. // [ 0 0 0 0 0 1 ]
  658. kf.measurementMatrix = cv::Mat::zeros(measSize, stateSize, CV_32F);
  659. kf.measurementMatrix.at<float>(0) = 1.0f;
  660. kf.measurementMatrix.at<float>(7) = 1.0f;
  661. kf.measurementMatrix.at<float>(16) = 1.0f;
  662. kf.measurementMatrix.at<float>(23) = 1.0f;
  663. // Process Noise Covariance Matrix Q - result smoother with lower values (1e-2)
  664. // [ Ex 0 0 0 0 0 ]
  665. // [ 0 Ey 0 0 0 0 ]
  666. // [ 0 0 Ev_x 0 0 0 ]
  667. // [ 0 0 0 Ev_y 0 0 ]
  668. // [ 0 0 0 0 Ew 0 ]
  669. // [ 0 0 0 0 0 Eh ]
  670. //cv::setIdentity(kf.processNoiseCov, cv::Scalar(1e-3));
  671. kf.processNoiseCov.at<float>(0) = 1e-2;
  672. kf.processNoiseCov.at<float>(7) = 1e-2;
  673. kf.processNoiseCov.at<float>(14) = 1e-2;// 5.0f;
  674. kf.processNoiseCov.at<float>(21) = 1e-2;// 5.0f;
  675. kf.processNoiseCov.at<float>(28) = 5e-3;
  676. kf.processNoiseCov.at<float>(35) = 5e-3;
  677. // Measures Noise Covariance Matrix R - result smoother with higher values (1e-1)
  678. cv::setIdentity(kf.measurementNoiseCov, cv::Scalar(1e-1));
  679. //cv::setIdentity(kf.errorCovPost, cv::Scalar::all(1e-2));
  680. // <<<< Kalman Filter
  681. set_delta_time(0);
  682. }
  683. one_kalman_t(int _stateSize = 6, int _measSize = 4, int _contrSize = 0) :
  684. kf(_stateSize, _measSize, _contrSize, CV_32F), measSize(_measSize), stateSize(_stateSize), contrSize(_contrSize)
  685. {
  686. state = cv::Mat(stateSize, 1, CV_32F); // [x,y,v_x,v_y,w,h]
  687. meas = cv::Mat(measSize, 1, CV_32F); // [z_x,z_y,z_w,z_h]
  688. //cv::Mat procNoise(stateSize, 1, type)
  689. // [E_x,E_y,E_v_x,E_v_y,E_w,E_h]
  690. initialize_kalman();
  691. }
  692. };
  693. // ------------------------------------------
  694. track_kalman_t(int _max_objects = 1000, int _min_frames = 3, float _max_dist = 40, cv::Size _img_size = cv::Size(10000, 10000)) :
  695. max_objects(_max_objects), min_frames(_min_frames), max_dist(_max_dist), img_size(_img_size),
  696. track_id_counter(0)
  697. {
  698. kalman_vec.resize(max_objects);
  699. track_id_state_id_time.resize(max_objects);
  700. result_vec_pred.resize(max_objects);
  701. }
  702. float calc_dt() {
  703. dT = std::chrono::duration<double>(std::chrono::steady_clock::now() - global_last_time).count();
  704. return dT;
  705. }
  706. static float get_distance(float src_x, float src_y, float dst_x, float dst_y) {
  707. return sqrtf((src_x - dst_x)*(src_x - dst_x) + (src_y - dst_y)*(src_y - dst_y));
  708. }
  709. void clear_old_states() {
  710. // clear old bboxes
  711. for (size_t state_id = 0; state_id < track_id_state_id_time.size(); ++state_id)
  712. {
  713. float time_sec = std::chrono::duration<double>(std::chrono::steady_clock::now() - track_id_state_id_time[state_id].last_time).count();
  714. float time_wait = 0.5; // 0.5 second
  715. if (track_id_state_id_time[state_id].track_id > -1)
  716. {
  717. if ((result_vec_pred[state_id].x > img_size.width) ||
  718. (result_vec_pred[state_id].y > img_size.height))
  719. {
  720. track_id_state_id_time[state_id].track_id = -1;
  721. }
  722. if (time_sec >= time_wait || track_id_state_id_time[state_id].detection_count < 0) {
  723. //std::cerr << " remove track_id = " << track_id_state_id_time[state_id].track_id << ", state_id = " << state_id << std::endl;
  724. track_id_state_id_time[state_id].track_id = -1; // remove bbox
  725. }
  726. }
  727. }
  728. }
  729. tst_t get_state_id(bbox_t find_box, std::vector<bool> &busy_vec)
  730. {
  731. tst_t tst;
  732. tst.state_id = -1;
  733. float min_dist = std::numeric_limits<float>::max();
  734. for (size_t i = 0; i < max_objects; ++i)
  735. {
  736. if (track_id_state_id_time[i].track_id > -1 && result_vec_pred[i].obj_id == find_box.obj_id && busy_vec[i] == false)
  737. {
  738. bbox_t pred_box = result_vec_pred[i];
  739. float dist = get_distance(pred_box.x, pred_box.y, find_box.x, find_box.y);
  740. float movement_dist = std::max(max_dist, static_cast<float>(std::max(pred_box.w, pred_box.h)) );
  741. if ((dist < movement_dist) && (dist < min_dist)) {
  742. min_dist = dist;
  743. tst.state_id = i;
  744. }
  745. }
  746. }
  747. if (tst.state_id > -1) {
  748. track_id_state_id_time[tst.state_id].last_time = std::chrono::steady_clock::now();
  749. track_id_state_id_time[tst.state_id].detection_count = std::max(track_id_state_id_time[tst.state_id].detection_count + 2, 10);
  750. tst = track_id_state_id_time[tst.state_id];
  751. busy_vec[tst.state_id] = true;
  752. }
  753. else {
  754. //std::cerr << " Didn't find: obj_id = " << find_box.obj_id << ", x = " << find_box.x << ", y = " << find_box.y <<
  755. // ", track_id_counter = " << track_id_counter << std::endl;
  756. }
  757. return tst;
  758. }
  759. tst_t new_state_id(std::vector<bool> &busy_vec)
  760. {
  761. tst_t tst;
  762. // find empty cell to add new track_id
  763. auto it = std::find_if(track_id_state_id_time.begin(), track_id_state_id_time.end(), [&](tst_t &v) { return v.track_id == -1; });
  764. if (it != track_id_state_id_time.end()) {
  765. it->state_id = it - track_id_state_id_time.begin();
  766. //it->track_id = track_id_counter++;
  767. it->track_id = 0;
  768. it->last_time = std::chrono::steady_clock::now();
  769. it->detection_count = 1;
  770. tst = *it;
  771. busy_vec[it->state_id] = true;
  772. }
  773. return tst;
  774. }
  775. std::vector<tst_t> find_state_ids(std::vector<bbox_t> result_vec)
  776. {
  777. std::vector<tst_t> tst_vec(result_vec.size());
  778. std::vector<bool> busy_vec(max_objects, false);
  779. for (size_t i = 0; i < result_vec.size(); ++i)
  780. {
  781. tst_t tst = get_state_id(result_vec[i], busy_vec);
  782. int state_id = tst.state_id;
  783. int track_id = tst.track_id;
  784. // if new state_id
  785. if (state_id < 0) {
  786. tst = new_state_id(busy_vec);
  787. state_id = tst.state_id;
  788. track_id = tst.track_id;
  789. if (state_id > -1) {
  790. kalman_vec[state_id].set(result_vec[i]);
  791. //std::cerr << " post: ";
  792. }
  793. }
  794. //std::cerr << " track_id = " << track_id << ", state_id = " << state_id <<
  795. // ", x = " << result_vec[i].x << ", det_count = " << tst.detection_count << std::endl;
  796. if (state_id > -1) {
  797. tst_vec[i] = tst;
  798. result_vec_pred[state_id] = result_vec[i];
  799. result_vec_pred[state_id].track_id = track_id;
  800. }
  801. }
  802. return tst_vec;
  803. }
  804. std::vector<bbox_t> predict()
  805. {
  806. clear_old_states();
  807. std::vector<bbox_t> result_vec;
  808. for (size_t i = 0; i < max_objects; ++i)
  809. {
  810. tst_t tst = track_id_state_id_time[i];
  811. if (tst.track_id > -1) {
  812. bbox_t box = kalman_vec[i].predict();
  813. result_vec_pred[i].x = box.x;
  814. result_vec_pred[i].y = box.y;
  815. result_vec_pred[i].w = box.w;
  816. result_vec_pred[i].h = box.h;
  817. if (tst.detection_count >= min_frames)
  818. {
  819. if (track_id_state_id_time[i].track_id == 0) {
  820. track_id_state_id_time[i].track_id = ++track_id_counter;
  821. result_vec_pred[i].track_id = track_id_counter;
  822. }
  823. result_vec.push_back(result_vec_pred[i]);
  824. }
  825. }
  826. }
  827. //std::cerr << " result_vec.size() = " << result_vec.size() << std::endl;
  828. //global_last_time = std::chrono::steady_clock::now();
  829. return result_vec;
  830. }
  831. std::vector<bbox_t> correct(std::vector<bbox_t> result_vec)
  832. {
  833. calc_dt();
  834. clear_old_states();
  835. for (size_t i = 0; i < max_objects; ++i)
  836. track_id_state_id_time[i].detection_count--;
  837. std::vector<tst_t> tst_vec = find_state_ids(result_vec);
  838. for (size_t i = 0; i < tst_vec.size(); ++i) {
  839. tst_t tst = tst_vec[i];
  840. int state_id = tst.state_id;
  841. if (state_id > -1)
  842. {
  843. kalman_vec[state_id].set_delta_time(dT);
  844. kalman_vec[state_id].correct(result_vec_pred[state_id]);
  845. }
  846. }
  847. result_vec = predict();
  848. global_last_time = std::chrono::steady_clock::now();
  849. return result_vec;
  850. }
  851. };
  852. // ----------------------------------------------
  853. #endif // OPENCV
  854. #endif // __cplusplus
  855. #endif // YOLO_V2_CLASS_HPP