SLProject  4.2.000
A platform independent 3D computer graphics framework for desktop OS, Android, iOS and online in web browsers
CVCapture.cpp
Go to the documentation of this file.
1 /**
2  * \file CVCapture.cpp
3  * \brief OpenCV Capture Device
4  * \authors Michael Goettlicher, Marcus Hudritsch, Jan Dellsperger
5  * \date Winter 2016
6  * \authors Marcus Hudritsch
7  * \copyright http://opensource.org/licenses/GPL-3.0
8  * \remarks Please use clangformat to format the code. See more code style on
9  * https://github.com/cpvrlab/SLProject4/wiki/SLProject-Coding-Style
10  */
11 
12 /*
13 The OpenCV library version 3.4 with extra module must be present.
14 If the application captures the live video stream with OpenCV you have
15 to define in addition the constant APP_USES_CVCAPTURE.
16 All classes that use OpenCV begin with CV.
17 See also the class docs for CVCapture, CVCalibration and CVTracked
18 for a good top down information.
19 */
20 
21 #include <CVCamera.h>
22 #include <algorithm> // std::max
23 #include <CVCapture.h>
24 #include <CVImage.h>
25 #include <Utils.h>
26 #include <FtpUtils.h>
27 #include <AppCommon.h>
28 #include <Profiler.h>
29 #include <opencv2/core/utils/logger.hpp>
30 
31 //-----------------------------------------------------------------------------
33 //-----------------------------------------------------------------------------
34 //! Private constructor
36  : mainCam(CVCameraType::FRONTFACING),
37  scndCam(CVCameraType::BACKFACING),
38  videoFileCam(CVCameraType::VIDEOFILE)
39 {
40  startCaptureTimeMS = 0.0f;
41 #ifdef APP_USES_CVCAPTURE
42  hasSecondaryCamera = false;
43 #else
44  hasSecondaryCamera = true;
45 #endif
46  videoFilename = "";
47  videoLoops = true;
48  fps = 1;
49  frameCount = 0;
50  activeCamSizeIndex = -1;
51  activeCamera = nullptr;
52  _captureTimesMS.init(60, 0);
53 
54  // Silences OpenCV debug logging
55  cv::utils::logging::setLogLevel(cv::utils::logging::LogLevel::LOG_LEVEL_SILENT);
56 }
57 //-----------------------------------------------------------------------------
58 //! Private constructor
60 {
61  release();
63  {
64  delete CVCapture::_instance;
65  CVCapture::_instance = nullptr;
66  }
67 }
68 //-----------------------------------------------------------------------------
69 //! Opens the capture device and returns the frame size
70 /* This so far called in start if a scene uses a live video by
71 setting the CVCapture::videoType to VT_MAIN. On desktop systems the webcam
72 is the only and main camera.
73 */
74 CVSize2i CVCapture::open(int deviceNum)
75 {
76 #ifndef SL_EMSCRIPTEN
77  try
78  {
79  _captureDevice.open(deviceNum);
80 
81  if (!_captureDevice.isOpened())
82  return CVSize2i(0, 0);
83 # if _DEBUG
84  Utils::log("SLProject", "CVCapture::open : Capture devices created.");
85 # endif
86  //_captureDevice.set(cv::CAP_PROP_FRAME_WIDTH, 1440);
87  //_captureDevice.set(cv::CAP_PROP_FRAME_HEIGHT, 1080);
88  int w = (int)_captureDevice.get(cv::CAP_PROP_FRAME_WIDTH);
89  int h = (int)_captureDevice.get(cv::CAP_PROP_FRAME_HEIGHT);
90  // Utils::log("SLProject", "CV_CAP_PROP_FRAME_WIDTH : %d", w);
91  // Utils::log("SLProject", "CV_CAP_PROP_FRAME_HEIGHT: %d", h);
92 
93  hasSecondaryCamera = false;
94  fps = (float)_captureDevice.get(cv::CAP_PROP_FPS);
95  frameCount = 0;
96 
97  // Set one camera size entry
98  CVCapture::camSizes.clear();
99  CVCapture::camSizes.push_back(CVSize(w, h));
101 
102  return CVSize2i(w, h);
103  }
104  catch (exception& e)
105  {
106  Utils::log("SLProject", "Exception during OpenCV video capture creation: %s", e.what());
107  }
108 #else
110  if (_videoType == VT_MAIN)
111  facing = WebCameraFacing::BACK;
112  else if (_videoType == VT_SCND)
113  facing = WebCameraFacing::FRONT;
114  _webCamera.open(facing);
115 
116  // We can't query the actual resolution of the camera because that is considered a security risk.
117  // Therefore, we list some common resolutions. If the camera doesn't support the requested resolution,
118  // the browser will simply switch to a supported one.
119  camSizes = {CVSize2i(640, 480),
120  CVSize2i(1280, 720),
121  CVSize2i(1920, 1080)};
122  activeCamSizeIndex = 0;
123 #endif
124 
125  return CVSize2i(0, 0);
126 }
127 //-----------------------------------------------------------------------------
128 //! Opens the video file instead of a camera feed.
129 /* This so far called in CVCapture::start if a scene uses a video by
130 setting the the CVCapture::videoType to VT_FILE.
131 */
133 {
134 #ifndef SL_EMSCRIPTEN
135  try
136  { // Load the file directly
138  {
139  string msg = "CVCapture::openFile: File not found: " + videoFilename;
140  Utils::exitMsg("SLProject", msg.c_str(), __LINE__, __FILE__);
141  }
142 
144 
145  if (!_captureDevice.isOpened())
146  {
147  Utils::log("SLProject", "CVCapture::openFile: Failed to open video file.");
148  return CVSize2i(0, 0);
149  }
150 
151  // Utils::log("SLProject", "Capture devices created with video.");
152 
153  int w = (int)_captureDevice.get(cv::CAP_PROP_FRAME_WIDTH);
154  int h = (int)_captureDevice.get(cv::CAP_PROP_FRAME_HEIGHT);
155  // Utils::log("SLProject", "CV_CAP_PROP_FRAME_WIDTH : %d", w);
156  // Utils::log("SLProject", "CV_CAP_PROP_FRAME_HEIGHT: %d", h);
157 
158  hasSecondaryCamera = false;
159  fps = (float)_captureDevice.get(cv::CAP_PROP_FPS);
160  frameCount = (int)_captureDevice.get(cv::CAP_PROP_FRAME_COUNT);
161 
162  return CVSize2i(w, h);
163  }
164  catch (exception& e)
165  {
166  Utils::log("SLProject", "CVCapture::openFile: Exception during OpenCV video capture creation with video file: %s", e.what());
167  }
168 #endif
169 
170  return CVSize2i(0, 0);
171 }
172 //-----------------------------------------------------------------------------
173 //! starts the video capturing
174 void CVCapture::start(float viewportWdivH)
175 {
176 #if defined(SL_EMSCRIPTEN)
177  open(VT_MAIN);
178 #elif defined(APP_USES_CVCAPTURE)
179  if (_videoType != VT_NONE)
180  {
181  if (!isOpened())
182  {
183  CVSize2i videoSize;
184  if (_videoType == VT_FILE && !videoFilename.empty())
185  videoSize = openFile();
186  else
187  videoSize = open(0);
188 
189  if (videoSize != CVSize2i(0, 0))
190  {
191  grabAndAdjustForSL(viewportWdivH);
192  }
193  }
194  }
195 #else
196  if (_videoType == VT_FILE && !videoFilename.empty())
197  {
198  if (!isOpened())
199  {
200  CVSize2i videoSize = openFile();
201  }
202  }
203 #endif
204 }
205 //-----------------------------------------------------------------------------
207 {
208 #ifndef SL_EMSCRIPTEN
209  return _captureDevice.isOpened();
210 #else
211  return _webCamera.isOpened();
212 #endif
213 }
214 //-----------------------------------------------------------------------------
216 {
217 #ifndef SL_EMSCRIPTEN
218  if (_captureDevice.isOpened())
219  _captureDevice.release();
220 #else
221  if (_webCamera.isOpened())
222  _webCamera.close();
223 #endif
224 
225  videoFilename = "";
226 }
227 //-----------------------------------------------------------------------------
228 /*! Grabs a new frame from the OpenCV capture device or video file and calls
229 CVCapture::adjustForSL. This function can also be called by Android or iOS
230 app for grabbing a frame of a video file. Android and iOS use their own
231 capture functionality.
232 If viewportWdivH is negative the viewport aspect will be adapted to the video
233 aspect ratio.
234 */
235 bool CVCapture::grabAndAdjustForSL(float viewportWdivH)
236 {
238 
240 
241 #ifndef SL_EMSCRIPTEN
242  try
243  {
244  if (_captureDevice.isOpened())
245  {
246  if (!_captureDevice.read(lastFrame))
247  {
248  // Try to loop the video
249  if (!videoFilename.empty() && videoLoops)
250  {
251  _captureDevice.set(cv::CAP_PROP_POS_FRAMES, 0);
252  if (!_captureDevice.read(lastFrame))
253  return false;
254  }
255  else
256  return false;
257  }
258 # if defined(ANDROID)
259  // Convert BGR to RGB on mobile phones
260  cvtColor(CVCapture::lastFrame, CVCapture::lastFrame, cv::COLOR_BGR2RGB, 3);
261 # endif
262  adjustForSL(viewportWdivH);
263  }
264  else
265  {
266  static bool logOnce = true;
267  if (logOnce)
268  {
269  Utils::log("SLProject", "OpenCV: Capture device or video file is not open!");
270  logOnce = false;
271  return false;
272  }
273  }
274  }
275  catch (exception& e)
276  {
277  Utils::log("SLProject", "Exception during OpenCV video capture creation: %s", e.what());
278  return false;
279  }
280 #else
281  if (!_webCamera.isOpened())
282  {
283  SL_LOG("Web camera is not open!");
284  return false;
285  }
286 
287  if (activeCamera->camSizeIndex() != -1)
288  _webCamera.setSize(camSizes[activeCamera->camSizeIndex()]);
289 
290  lastFrame = _webCamera.read();
291  adjustForSL(viewportWdivH);
292 #endif
293 
294  return true;
295 }
296 //-----------------------------------------------------------------------------
297 /*! This method is called by iOS and Android projects that capture their video
298 cameras on their own. We only adjust the color space. See the app_demo_slproject/ios and
299 app_demo_slproject/android projects for the usage.
300 */
301 void CVCapture::loadIntoLastFrame(const float viewportWdivH,
302  const int width,
303  const int height,
304  const CVPixelFormatGL newFormat,
305  const uchar* data,
306  const bool isContinuous)
307 {
309 
310  // treat Android YUV to RGB conversion special
311  if (newFormat == PF_yuv_420_888)
312  {
313  CVMat yuv(height + height / 2, width, CV_8UC1, (void*)data);
314 
315  // Android image copy loop #1
316  cvtColor(yuv, CVCapture::lastFrame, cv::COLOR_YUV2RGB_NV21, 3);
317  }
318  // convert 4 channel images to 3 channel
319  else if (newFormat == PF_bgra || format == PF_rgba)
320  {
321  CVMat rgba(height, width, CV_8UC4, (void*)data);
322  cvtColor(rgba, CVCapture::lastFrame, cv::COLOR_RGBA2RGB, 3);
323  }
324  else
325  {
326  // Set the according OpenCV format
327  int cvType = 0, bpp = 0;
328 
329  switch (newFormat)
330  {
331  case PF_luminance:
332  {
333  cvType = CV_8UC1;
334  bpp = 1;
335  break;
336  }
337  case PF_bgr:
338  case PF_rgb:
339  {
340  cvType = CV_8UC3;
341  bpp = 3;
342  break;
343  }
344  case PF_bgra:
345  case PF_rgba:
346  {
347  cvType = CV_8UC4;
348  bpp = 4;
349  break;
350  }
351  default: Utils::exitMsg("SLProject", "Pixel format not supported", __LINE__, __FILE__);
352  }
353 
354  // calculate padding NO. of bgrRowOffset bytes (= step in OpenCV terminology)
355  size_t destStride = 0;
356  if (!isContinuous)
357  {
358  int bitsPerPixel = bpp * 8;
359  int bpl = ((width * bitsPerPixel + 31) / 32) * 4;
360  destStride = (size_t)(bpl - width * bpp);
361  }
362 
363  CVCapture::lastFrame = CVMat(height, width, cvType, (void*)data, destStride);
364  }
365 
366  adjustForSL(viewportWdivH);
367 }
368 //-----------------------------------------------------------------------------
369 //! Does all adjustments needed for the gVideoTexture
370 /*! CVCapture::adjustForSL processes the following adjustments for all input
371 images no matter with what they where captured:
372 \n
373 1) Crops the input image if it doesn't match the screens aspect ratio. The
374 input image mostly does't fit the aspect of the output screen aspect. If the
375 input image is too high we crop it on top and bottom, if it is too wide we
376 crop it on the sides.
377 If viewportWdivH is negative the viewport aspect will be adapted to the video
378 aspect ratio. No cropping will be applied.
379 \n
380 2) Some cameras toward a face mirror the image and some do not. If a input
381 image should be mirrored or not is stored in CVCalibration::_isMirroredH
382 (H for horizontal) and CVCalibration::_isMirroredV (V for vertical).
383 \n
384 3) Many of the further processing steps are faster done on grayscale images.
385 We therefore create a copy that is grayscale converted.
386 */
387 void CVCapture::adjustForSL(float viewportWdivH)
388 {
390 
392 
393  //////////////////////////////////////
394  // 1) Check if capture size changed //
395  //////////////////////////////////////
396 
397  // Get capture size before cropping
398  captureSize = lastFrame.size();
399 
400  // Determine active size index if unset or changed
401  if (!camSizes.empty())
402  {
403  CVSize activeSize(0, 0);
404 
405  if (activeCamSizeIndex >= 0 && activeCamSizeIndex < (int)camSizes.size())
406  activeSize = camSizes[(uint)activeCamSizeIndex];
407 
408  if (activeCamSizeIndex == -1 || captureSize != activeSize)
409  {
410  for (unsigned long i = 0; i < camSizes.size(); ++i)
411  {
412  if (camSizes[i] == captureSize)
413  {
414  activeCamSizeIndex = (int)i;
415  break;
416  }
417  }
418  }
419  }
420 
421  //////////////////////////////////////////////////////////////////
422  // 2) Crop Video image to the aspect ratio of OpenGL background //
423  //////////////////////////////////////////////////////////////////
424 
425  // Cropping is done almost always.
426  // So this is Android image copy loop #2
427 
428  float inWdivH = (float)lastFrame.cols / (float)lastFrame.rows;
429  // viewportWdivH is negative the viewport aspect will be the same
430  float outWdivH = viewportWdivH < 0.0f ? inWdivH : viewportWdivH;
431 
432  if (Utils::abs(inWdivH - outWdivH) > 0.01f)
433  {
434  int width = 0; // width in pixels of the destination image
435  int height = 0; // height in pixels of the destination image
436  int cropH = 0; // crop height in pixels of the source image
437  int cropW = 0; // crop width in pixels of the source image
438  int wModulo4;
439  int hModulo4;
440 
441  if (inWdivH > outWdivH) // crop input image left & right
442  {
443  width = (int)((float)lastFrame.rows * outWdivH);
444  height = lastFrame.rows;
445  cropW = (int)((float)(lastFrame.cols - width) * 0.5f);
446 
447  // Width must be devidable by 4
448  wModulo4 = width % 4;
449  if (wModulo4 == 1) width--;
450  if (wModulo4 == 2)
451  {
452  cropW++;
453  width -= 2;
454  }
455  if (wModulo4 == 3) width++;
456  }
457  else // crop input image at top & bottom
458  {
459  width = lastFrame.cols;
460  height = (int)((float)lastFrame.cols / outWdivH);
461  cropH = (int)((float)(lastFrame.rows - height) * 0.5f);
462 
463  // Height must be dividable by 4
464  hModulo4 = height % 4;
465  if (hModulo4 == 1) height--;
466  if (hModulo4 == 2)
467  {
468  cropH++;
469  height -= 2;
470  }
471  if (hModulo4 == 3) height++;
472  }
473 
474  lastFrame(CVRect(cropW, cropH, width, height)).copyTo(lastFrame);
475  // imwrite("AfterCropping.bmp", lastFrame);
476  }
477 
478  //////////////////
479  // 3) Mirroring //
480  //////////////////
481 
482  // Mirroring is done for most selfie cameras.
483  // So this is Android image copy loop #3
484 
486  {
487  CVMat mirrored;
489  cv::flip(lastFrame, mirrored, -1);
490  else
491  cv::flip(lastFrame, mirrored, 1);
492  lastFrame = mirrored;
493  }
495  {
496  CVMat mirrored;
498  cv::flip(lastFrame, mirrored, -1);
499  else
500  cv::flip(lastFrame, mirrored, 0);
501  lastFrame = mirrored;
502  }
503 
504  /////////////////////////
505  // 4) Create grayscale //
506  /////////////////////////
507 
508  // Creating a grayscale version from an YUV input source is stupid.
509  // We just could take the Y channel.
510  // Android image copy loop #4
511 
512  if (!lastFrame.empty())
513  cv::cvtColor(lastFrame, lastFrameGray, cv::COLOR_BGR2GRAY);
514 
515 #ifndef SL_EMSCRIPTEN
516  // Reset calibrated image size
517  if (lastFrame.size() != activeCamera->calibration.imageSize())
518  {
520  }
521 #endif
522 
524 }
525 //-----------------------------------------------------------------------------
526 //! YUV to RGB image infos. Offset value can be negative for mirrored copy.
527 inline void
528 yuv2rbg(uchar y, uchar u, uchar v, uchar& r, uchar& g, uchar& b)
529 {
530  // Conversion from:
531  // https://de.wikipedia.org/wiki/YUV-Farbmodell
532  // float c = 1.164f*(float)(yVal-16);
533  // float d = (float)(uVal-128);
534  // float e = (float)(vVal-128);
535  // r = clipFToUInt8(c + 1.596f*e);
536  // g = clipFToUInt8(c - 0.391f*d - 0.813f*e);
537  // b = clipFToUInt8(c + 2.018f*d);
538 
539  // Conversion from:
540  // http://www.wordsaretoys.com/2013/10/18/making-yuv-conversion-a-little-faster
541  // I've multiplied each floating point constant by 1024 and truncated it.
542  // Now I can add/subtract the scaled integers, and apply a bit shift right to
543  // divide each result by 1024
544  int e = v - 128;
545  int d = u - 128;
546  int a0 = 1192 * (y - 16);
547  int a1 = 1634 * e;
548  int a2 = 832 * e;
549  int a3 = 400 * d;
550  int a4 = 2066 * d;
551  r = (uchar)Utils::clamp((a0 + a1) >> 10, 0, 255);
552  g = (uchar)Utils::clamp((a0 - a2 - a3) >> 10, 0, 255);
553  b = (uchar)Utils::clamp((a0 + a4) >> 10, 0, 255);
554 }
555 //-----------------------------------------------------------------------------
556 //! YUV to RGB image infos. Offset value can be negative for mirrored copy.
557 struct colorBGR
558 {
559  uchar b, g, r;
560 };
561 //-----------------------------------------------------------------------------
562 //! YUV to RGB image infos. Offset value can be negative for mirrored copy.
564 {
565  int bgrColOffest; //!< offset in bytes to the next bgr pixel (column)
566  int grayColOffest; //!< offset in bytes to the next gray pixel (column)
567  int yColOffest; //!< offset in bytes to the next y pixel (column)
568  int uColOffest; //!< offset in bytes to the next u pixel (column)
569  int vColOffset; //!< offset in bytes to the next v pixel (column)
570  int bgrRowOffset; //!< offset in bytes to the next bgr row
571  int grayRowOffset; //!< offset in bytes to the next grayscale row
572  int yRowOffset; //!< offset in bytes to the y value of the next row
573  int uRowOffset; //!< offset in bytes to the u value of the next row
574  int vRowOffest; //!< offset in bytes to the v value of the next row
575 };
576 //-----------------------------------------------------------------------------
577 //! YUV to RGB image block infos that are different per thread
579 {
580  YUV2RGB_ImageInfo* imageInfo; //!< Pointer to the image info
581  int rowCount; //!< Num. of rows in block
582  int colCount; //!< Num. of columns in block
583  uchar* bgrRow; //!< Pointer to the bgr row
584  uchar* grayRow; //!< Pointer to the grayscale row
585  uchar* yRow; //!< Pointer to the y value row
586  uchar* uRow; //!< Pointer to the u value row
587  uchar* vRow; //!< Pointer to the v value row
588 };
589 //-----------------------------------------------------------------------------
590 //! YUV to RGB conversion function called by multiple threads
591 /*!
592 /param info image block information struct with thread specific information
593 */
595 {
596  YUV2RGB_ImageInfo* image = block->imageInfo;
597 
598  for (int row = 0; row < block->rowCount; ++row)
599  {
600  colorBGR* bgrCol = (colorBGR*)block->bgrRow;
601  uchar* grayCol = block->grayRow;
602  uchar* yCol = block->yRow;
603  uchar* uCol = block->uRow;
604  uchar* vCol = block->vRow;
605 
606  // convert 2 pixels in the inner loop
607  for (int col = 0; col < block->colCount; col += 2)
608  {
609  yuv2rbg(*yCol, *uCol, *vCol, bgrCol->r, bgrCol->g, bgrCol->b);
610  *grayCol = *yCol;
611 
612  bgrCol += image->bgrColOffest;
613  grayCol += image->grayColOffest;
614  yCol += image->yColOffest;
615 
616  yuv2rbg(*yCol, *uCol, *vCol, bgrCol->r, bgrCol->g, bgrCol->b);
617  *grayCol = *yCol;
618 
619  bgrCol += image->bgrColOffest;
620  grayCol += image->grayColOffest;
621  yCol += image->yColOffest;
622 
623  uCol += image->uColOffest;
624  vCol += image->vColOffset;
625  }
626 
627  block->bgrRow += image->bgrRowOffset;
628  block->grayRow += image->grayRowOffset;
629  block->yRow += image->yRowOffset;
630 
631  // if odd row
632  if (row & 1)
633  {
634  block->uRow += image->uRowOffset;
635  block->vRow += image->vRowOffest;
636  }
637  }
638 
639  return nullptr;
640 }
641 //------------------------------------------------------------------------------
642 //! Copies and converts the video image in YUV_420 format to RGB and Grayscale
643 /*! CVCapture::copyYUVPlanes copies and converts the video image in YUV_420
644 format to the RGB image in CVCapture::lastFrame and the Y channel the grayscale
645 image in CVCapture::lastFrameGray.\n
646 In the YUV_420 format only the luminosity channel Y has the full resolution
647 (one byte per pixel). The color channels U and V are subsampled and have only
648 one byte per 4 pixel. See also https://en.wikipedia.org/wiki/Chroma_subsampling
649 \n
650 In addition the routine crops and mirrors the image if needed. So the following
651 processing steps should be done hopefully in a single loop:
652 \n
653 1) Crops the input image if it doesn't match the screens aspect ratio. The
654 input image mostly does't fit the aspect of the output screen aspect. If the
655 input image is too high we crop it on top and bottom, if it is too wide we
656 crop it on the sides.
657 \n
658 2) Some cameras toward a face mirror the image and some do not. If a input
659 image should be mirrored or not is stored in CVCalibration::_isMirroredH
660 (H for horizontal) and CVCalibration::_isMirroredV (V for vertical).
661 \n
662 3) The most expensive part of course is the color space conversion from the
663 YUV to RGB conversion. According to Wikipedia the conversion is defined as:
664 \n
665 - C = 1.164*(Y-16); D = U-128; E = V-128
666 - R = clip(round(C + 1.596*E))
667 - G = clip(round(C - 0.391*D - 0.813*E))
668 - B = clip(round(C + 2.018*D))
669 \n
670 A faster integer version with bit shifting is:\n
671 - C = 298*(Y-16)+128; D = U-128; E = V-128
672 - R = clip((C + 409*E) >> 8)
673 - G = clip((C - 100*D - 208*E) >> 8)
674 - B = clip((C + 516*D) >> 8)
675 \n
676 4) Many of the image processing tasks are faster done on grayscale images.
677 We therefore create a copy of the y-channel into CVCapture::lastFrameGray.
678 \n
679 @param scrWdivH aspect ratio width / height
680 @param srcW Source image width in pixel
681 @param srcH Source image height in pixel
682 @param y Pointer to first byte of the top left pixel of the y-plane
683 @param yBytes Size in bytes of the y-plane (must be srcW x srcH)
684 @param yColOffset Offset in bytes to the next pixel in the y-plane
685 @param yRowOffset Offset in bytes to the next line in the y-plane
686 @param u Pointer to first byte of the top left pixel of the u-plane
687 @param uBytes Size in bytes of the u-plane
688 @param uColOffset Offset in bytes to the next pixel in the u-plane
689 @param uRowOffset Offset in bytes to the next line in the u-plane
690 @param v Pointer to first byte of the top left pixel of the v-plane
691 @param vBytes Size in bytes of the v-plane
692 @param vColOffset Offset in bytes to the next pixel in the v-plane
693 @param vRowOffset Offset in bytes to the next line in the v-plane
694 */
695 void CVCapture::copyYUVPlanes(float scrWdivH,
696  int srcW,
697  int srcH,
698  uchar* y,
699  int yBytes,
700  int yColOffset,
701  int yRowOffset,
702  uchar* u,
703  int uBytes,
704  int uColOffset,
705  int uRowOffset,
706  uchar* v,
707  int vBytes,
708  int vColOffset,
709  int vRowOffset)
710 {
711  // Set the start time to measure the MS for the whole conversion
713 
714  // input image aspect ratio
715  float imgWdivH = (float)srcW / (float)srcH;
716 
717  int dstW = srcW; // width in pixels of the destination image
718  int dstH = srcH; // height in pixels of the destination image
719  int cropH = 0; // crop height in pixels of the source image
720  int cropW = 0; // crop width in pixels of the source image
721 
722  // Crop image if source and destination aspect is not the same
723  if (Utils::abs(imgWdivH - scrWdivH) > 0.01f)
724  {
725  if (imgWdivH > scrWdivH) // crop input image left & right
726  {
727  dstW = (int)((float)srcH * scrWdivH);
728  dstH = srcH;
729  cropW = (int)((float)(srcW - dstW) * 0.5f);
730  }
731  else // crop input image at top & bottom
732  {
733  dstW = srcW;
734  dstH = (int)((float)srcW / scrWdivH);
735  cropH = (int)((float)(srcH - dstH) * 0.5f);
736  }
737  }
738 
739  // Get the infos if the destination image must be mirrored
740  bool mirrorH = CVCapture::activeCamera->mirrorH();
741  bool mirrorV = CVCapture::activeCamera->mirrorV();
742 
743  // Create output color (BGR) and grayscale images
744  lastFrame = CVMat(dstH, dstW, CV_8UC(3));
745  lastFrameGray = CVMat(dstH, dstW, CV_8UC(1));
747 
748  // Bugfix on some devices with wrong pixel offsets
749  if (yRowOffset == uRowOffset && uColOffset == 1)
750  {
751  uColOffset = 2;
752  vColOffset = 2;
753  }
754 
755  uchar* bgrRow = lastFrame.data;
756  uchar* grayRow = lastFrameGray.data;
757 
758  int bgrColBytes = 3;
759  int bgrRowBytes = dstW * bgrColBytes;
760  int grayColBytes = 1;
761  int grayRowBytes = dstW * grayColBytes;
762 
763  // Adjust the offsets depending on the horizontal mirroring
764  int bgrRowOffset = dstW * bgrColBytes;
765  int grayRowOffset = dstW;
766  if (mirrorH)
767  {
768  bgrRow += (dstH - 1) * bgrRowBytes;
769  grayRow += (dstH - 1) * grayRowBytes;
770  bgrRowOffset *= -1;
771  grayRowOffset *= -1;
772  }
773 
774  // Adjust the offsets depending on the vertical mirroring
775  int bgrColOffset = 1;
776  int grayColOffset = grayColBytes;
777  if (mirrorV)
778  {
779  bgrRow += (bgrRowBytes - bgrColBytes);
780  grayRow += (grayRowBytes - grayColBytes);
781  bgrColOffset *= -1;
782  grayColOffset *= -1;
783  }
784 
785  // Set source buffer pointers
786  int halfCropH = cropH / 2;
787  int halfCropW = cropW / 2;
788  uchar* yRow = y + cropH * yRowOffset + cropW * yColOffset;
789  uchar* uRow = u + halfCropH * uRowOffset + halfCropW * uColOffset;
790  uchar* vRow = v + halfCropH * vRowOffset + halfCropW * vColOffset;
791 
792  // Set the information common for all thread blocks
793  YUV2RGB_ImageInfo imageInfo{};
794  imageInfo.bgrColOffest = bgrColOffset;
795  imageInfo.grayColOffest = grayColOffset;
796  imageInfo.yColOffest = yColOffset;
797  imageInfo.uColOffest = uColOffset;
798  imageInfo.vColOffset = vColOffset;
799  imageInfo.bgrRowOffset = bgrRowOffset;
800  imageInfo.grayRowOffset = grayRowOffset;
801  imageInfo.yRowOffset = yRowOffset;
802  imageInfo.uRowOffset = uRowOffset;
803  imageInfo.vRowOffest = vRowOffset;
804 
805  // Prepare the threads (hyperthreads seam to be unefficient on ARM)
806  const int threadNum = 4; // std::max(thread::hardware_concurrency(), 1U);
807  vector<thread> threads;
808  YUV2RGB_BlockInfo threadInfos[threadNum];
809  int rowsPerThread = dstH / (threadNum + 1);
810  int halfRowsPerThread = (int)((float)rowsPerThread * 0.5f);
811  int rowsHandled = 0;
812 
813  // Launch threadNum-1 threads on different blocks of the image
814  for (int i = 0; i < threadNum - 1; i++)
815  {
816  YUV2RGB_BlockInfo* info = threadInfos + i;
817  info->imageInfo = &imageInfo;
818  info->bgrRow = bgrRow;
819  info->grayRow = grayRow;
820  info->yRow = yRow;
821  info->uRow = uRow;
822  info->vRow = vRow;
823  info->rowCount = rowsPerThread;
824  info->colCount = dstW;
825 
826  ////////////////////////////////////////////////
827  threads.emplace_back(thread(convertYUV2RGB, info));
828  ////////////////////////////////////////////////
829 
830  rowsHandled += rowsPerThread;
831 
832  bgrRow += bgrRowOffset * rowsPerThread;
833  grayRow += grayRowOffset * rowsPerThread;
834  yRow += yRowOffset * rowsPerThread;
835  uRow += uRowOffset * halfRowsPerThread;
836  vRow += vRowOffset * halfRowsPerThread;
837  }
838  // Launch the last block on the main thread
839  YUV2RGB_BlockInfo infoMain{};
840  infoMain.imageInfo = &imageInfo;
841  infoMain.bgrRow = bgrRow;
842  infoMain.grayRow = grayRow;
843  infoMain.yRow = yRow;
844  infoMain.uRow = uRow;
845  infoMain.vRow = vRow;
846  infoMain.rowCount = (dstH - rowsHandled);
847  infoMain.colCount = dstW;
848 
849  convertYUV2RGB(&infoMain);
850 
851  // Join all threads to continue single threaded
852  for (auto& thread : threads)
853  thread.join();
854 
855  // Stop the capture time displayed in the statistics info
857 }
858 //-----------------------------------------------------------------------------
859 //! Setter for video type also sets the active calibration
860 /*! The CVCapture instance has up to three video camera calibrations, one
861 for a main camera (CVCapture::mainCam), one for the selfie camera on
862 mobile devices (CVCapture::scndCam) and one for video file simulation
863 (CVCapture::videoFileCam). The member CVCapture::activeCamera
864 references the active one.
865 */
867 {
868  _videoType = vt;
869  _captureTimesMS.set(0.0f);
870 
871  if (vt == VT_SCND)
872  {
873  if (hasSecondaryCamera)
875  else // fallback if there is no secondary camera we use main setup
876  {
879  }
880  }
881  else if (vt == VT_FILE)
883  else
884  {
886  if (vt == VT_NONE)
887  {
888  release();
889  _captureTimesMS.init(60, 0.0f);
890  }
891  }
892 }
893 //-----------------------------------------------------------------------------
894 void CVCapture::loadCalibrations(const string& computerInfo,
895  const string& configPath)
896 {
897  string mainCalibFilename = "camCalib_" + computerInfo + "_main.xml";
898  string scndCalibFilename = "camCalib_" + computerInfo + "_scnd.xml";
899 
900  // load opencv camera calibration for main and secondary camera
901 #if defined(APP_USES_CVCAPTURE)
902 
903  // try to download from ftp if no calibration exists locally
904  string fullPathAndFilename = Utils::unifySlashes(configPath) + mainCalibFilename;
905  if (Utils::fileExists(fullPathAndFilename))
906  {
907  if (!mainCam.calibration.load(configPath, mainCalibFilename, true))
908  {
909  // instantiate a guessed calibration
910  // mainCam.calibration = CVCalibration()
911  }
912  }
913  else
914  {
915  /*
916  //todo: move this download call out of cvcaputure (during refactoring of this class)
917  string errorMsg;
918  if (!FtpUtils::downloadFileLatestVersion(AppCommon::calibFilePath,
919  mainCalibFilename,
920  AppCommon::CALIB_FTP_HOST,
921  AppCommon::CALIB_FTP_USER,
922  AppCommon::CALIB_FTP_PWD,
923  AppCommon::CALIB_FTP_DIR,
924  errorMsg))
925  {
926  Utils::log("SLProject", errorMsg.c_str());
927  }
928  */
929  }
930 
932  hasSecondaryCamera = false;
933 #else
934  /*
935  //todo: move this download call out of cvcaputure (during refactoring of this class)
936  string errorMsg;
937  if (!FtpUtils::downloadFile(AppCommon::calibFilePath,
938  mainCalibFilename,
939  AppCommon::CALIB_FTP_HOST,
940  AppCommon::CALIB_FTP_USER,
941  AppCommon::CALIB_FTP_PWD,
942  AppCommon::CALIB_FTP_DIR,
943  errorMsg))
944  {
945  Utils::log("SLProject", errorMsg.c_str());
946  }
947  //todo: move this download call out of cvcaputure (during refactoring of this class)
948  if (!FtpUtils::downloadFile(AppCommon::calibFilePath,
949  scndCalibFilename,
950  AppCommon::CALIB_FTP_HOST,
951  AppCommon::CALIB_FTP_USER,
952  AppCommon::CALIB_FTP_PWD,
953  AppCommon::CALIB_FTP_DIR,
954  errorMsg))
955  {
956  Utils::log("SLProject", errorMsg.c_str());
957  }
958  */
959  mainCam.calibration.load(configPath, mainCalibFilename, true);
960  scndCam.calibration.load(configPath, scndCalibFilename, true);
962  hasSecondaryCamera = true;
963 #endif
964 }
965 //-----------------------------------------------------------------------------
966 /*! Sets the with and height of a camera size at index sizeIndex.
967 If sizeIndexMax changes the vector in CVCapture gets cleared and resized.
968 */
969 void CVCapture::setCameraSize(int sizeIndex,
970  int sizeIndexMax,
971  int width,
972  int height)
973 {
974  if ((uint)sizeIndexMax != camSizes.size())
975  {
976  camSizes.clear();
977  camSizes.resize((uint)sizeIndexMax);
978  }
979  camSizes[(uint)sizeIndex].width = width;
980  camSizes[(uint)sizeIndex].height = height;
981 }
982 //-----------------------------------------------------------------------------
983 //! Moves the current frame position in a video file.
985 {
986 #ifndef SL_EMSCRIPTEN
987  if (_videoType != VT_FILE) return;
988 
989  int frameIndex = (int)_captureDevice.get(cv::CAP_PROP_POS_FRAMES);
990  frameIndex += n;
991 
992  if (frameIndex < 0) frameIndex = 0;
993  if (frameIndex > frameCount) frameIndex = frameCount;
994 
995  _captureDevice.set(cv::CAP_PROP_POS_FRAMES, frameIndex);
996 #endif
997 }
998 //-----------------------------------------------------------------------------
999 //! Returns the next frame index number
1001 {
1002 #ifndef SL_EMSCRIPTEN
1003  int result = 0;
1004 
1005  if (_videoType == VT_FILE)
1006  result = (int)_captureDevice.get(cv::CAP_PROP_POS_FRAMES);
1007 
1008  return result;
1009 #else
1010  return 0;
1011 #endif
1012 }
1013 //-----------------------------------------------------------------------------
1015 {
1016 #ifndef SL_EMSCRIPTEN
1017  int result = 0;
1018 
1019  if (_videoType == VT_FILE)
1020  result = (int)_captureDevice.get(cv::CAP_PROP_FRAME_COUNT);
1021 
1022  return result;
1023 #else
1024  return 0;
1025 #endif
1026 }
1027 //------------------------------------------------------------------------------
The AppCommon class holds the top-level instances of the app-demo.
void yuv2rbg(uchar y, uchar u, uchar v, uchar &r, uchar &g, uchar &b)
YUV to RGB image infos. Offset value can be negative for mirrored copy.
Definition: CVCapture.cpp:528
void * convertYUV2RGB(YUV2RGB_BlockInfo *block)
YUV to RGB conversion function called by multiple threads.
Definition: CVCapture.cpp:594
CVVideoType
Video type if multiple exist on mobile devices.
Definition: CVCapture.h:40
@ VT_SCND
Selfie camera on mobile devices.
Definition: CVCapture.h:43
@ VT_FILE
Loads a video from file with OpenCV.
Definition: CVCapture.h:44
@ VT_NONE
No camera needed.
Definition: CVCapture.h:41
@ VT_MAIN
Main camera on all on all all devices.
Definition: CVCapture.h:42
CVPixelFormatGL
Pixel format according to OpenGL pixel format defines.
Definition: CVImage.h:24
@ PF_luminance
Definition: CVImage.h:28
@ PF_rgb
Definition: CVImage.h:36
@ PF_bgra
Definition: CVImage.h:39
@ PF_rgba
Definition: CVImage.h:37
@ PF_yuv_420_888
Definition: CVImage.h:26
@ PF_bgr
Definition: CVImage.h:38
cv::Size2i CVSize2i
Definition: CVTypedefs.h:56
cv::Rect CVRect
Definition: CVTypedefs.h:39
cv::Size CVSize
Definition: CVTypedefs.h:55
cv::Mat CVMat
Definition: CVTypedefs.h:38
CVCameraType
Definition: CVTypes.h:62
#define PROFILE_FUNCTION()
Definition: Instrumentor.h:41
#define SL_LOG(...)
Definition: SL.h:233
WebCameraFacing
Facing modes for the camera.
Definition: WebCamera.h:19
CVSize imageSize() const
bool isMirroredH()
bool isMirroredV()
void adaptForNewResolution(const CVSize &newSize, bool calcUndistortionMaps)
Adapts an already calibrated camera to a new resolution (cropping and scaling)
bool load(const string &calibDir, const string &calibFileName, bool calcUndistortionMaps)
Loads the calibration information from the config file.
bool mirrorH()
Definition: CVCamera.h:22
int camSizeIndex()
Definition: CVCamera.h:27
CVCalibration calibration
Definition: CVCamera.h:36
bool mirrorV()
Definition: CVCamera.h:23
Encapsulation of the OpenCV Capture Device and holder of the last frame.
Definition: CVCapture.h:63
CVCamera scndCam
camera representation for secondary video camera
Definition: CVCapture.h:138
int frameCount
Definition: CVCapture.h:129
CVVideoCapture _captureDevice
OpenCV capture device.
Definition: CVCapture.h:147
void moveCapturePosition(int n)
Moves the current frame position in a video file.
Definition: CVCapture.cpp:984
bool hasSecondaryCamera
flag if device has secondary camera
Definition: CVCapture.h:125
CVVideoType videoType()
Definition: CVCapture.h:106
~CVCapture()
Private constructor.
Definition: CVCapture.cpp:59
void loadCalibrations(const string &computerInfo, const string &configPath)
Definition: CVCapture.cpp:894
float fps
Definition: CVCapture.h:128
CVVSize camSizes
All possible camera sizes.
Definition: CVCapture.h:133
void release()
Definition: CVCapture.cpp:215
CVCamera * activeCamera
Pointer to the active camera.
Definition: CVCapture.h:136
void copyYUVPlanes(float scrWdivH, int srcW, int srcH, uchar *y, int ySize, int yPixStride, int yLineStride, uchar *u, int uSize, int uPixStride, int uLineStride, uchar *v, int vSize, int vPixStride, int vLineStride)
Copies and converts the video image in YUV_420 format to RGB and Grayscale.
Definition: CVCapture.cpp:695
CVPixelFormatGL format
GL pixel format.
Definition: CVCapture.h:122
CVSize captureSize
size of captured frame
Definition: CVCapture.h:123
void setCameraSize(int sizeIndex, int sizeIndexMax, int width, int height)
Definition: CVCapture.cpp:969
void adjustForSL(float viewportWdivH)
Does all adjustments needed for the gVideoTexture.
Definition: CVCapture.cpp:387
int videoLength()
Definition: CVCapture.cpp:1014
CVSize2i open(int deviceNum)
Opens the capture device and returns the frame size.
Definition: CVCapture.cpp:74
void start(float viewportWdivH)
starts the video capturing
Definition: CVCapture.cpp:174
CVCapture()
private onetime constructor
Definition: CVCapture.cpp:35
CVMat lastFrame
last frame grabbed in BGR
Definition: CVCapture.h:119
string videoFilename
video filename to load
Definition: CVCapture.h:126
CVVideoType _videoType
Flag for using the live video image.
Definition: CVCapture.h:152
CVMat lastFrameGray
last frame in grayscale
Definition: CVCapture.h:121
CVSize2i openFile()
Opens the video file instead of a camera feed.
Definition: CVCapture.cpp:132
int activeCamSizeIndex
Currently active camera size index.
Definition: CVCapture.h:134
CVCamera videoFileCam
camera representation for simulation using a video file
Definition: CVCapture.h:139
CVCamera mainCam
camera representation for main video camera
Definition: CVCapture.h:137
int nextFrameIndex()
Returns the next frame index number.
Definition: CVCapture.cpp:1000
bool isOpened()
Definition: CVCapture.cpp:206
void loadIntoLastFrame(float vieportWdivH, int camWidth, int camHeight, CVPixelFormatGL srcPixelFormat, const uchar *data, bool isContinuous)
Definition: CVCapture.cpp:301
AvgFloat _captureTimesMS
Averaged time for video capturing in ms.
Definition: CVCapture.h:153
HighResTimer _timer
High resolution timer.
Definition: CVCapture.h:154
bool videoLoops
flag if video should loop
Definition: CVCapture.h:127
float startCaptureTimeMS
start time of capturing in ms
Definition: CVCapture.h:124
static CVCapture * _instance
global singleton object
Definition: CVCapture.h:144
bool grabAndAdjustForSL(float viewportWdivH)
Definition: CVCapture.cpp:235
static CVPixelFormatGL cvType2glPixelFormat(int cvType)
Converts OpenCV mat type to OpenGL pixel format.
Definition: CVImage.cpp:522
float elapsedTimeInMilliSec()
Definition: HighResTimer.h:38
void init(int numValues, T initValue)
Initializes the average value array to a given value.
Definition: Averaged.h:41
void set(T value)
Sets the current value in the value array and builds the average.
Definition: Averaged.h:53
string unifySlashes(const string &inputDir, bool withTrailingSlash)
Returns the inputDir string with unified forward slashes, e.g.: "dirA/dirB/".
Definition: Utils.cpp:368
bool fileExists(const string &pathfilename)
Returns true if a file exists.
Definition: Utils.cpp:897
T abs(T a)
Definition: Utils.h:249
T clamp(T a, T min, T max)
Definition: Utils.h:253
void exitMsg(const char *tag, const char *msg, const int line, const char *file)
Terminates the application with a message. No leak checking.
Definition: Utils.cpp:1135
void log(const char *tag, const char *format,...)
logs a formatted string platform independently
Definition: Utils.cpp:1103
YUV to RGB image block infos that are different per thread.
Definition: CVCapture.cpp:579
uchar * bgrRow
Pointer to the bgr row.
Definition: CVCapture.cpp:583
int rowCount
Num. of rows in block.
Definition: CVCapture.cpp:581
uchar * vRow
Pointer to the v value row.
Definition: CVCapture.cpp:587
uchar * yRow
Pointer to the y value row.
Definition: CVCapture.cpp:585
YUV2RGB_ImageInfo * imageInfo
Pointer to the image info.
Definition: CVCapture.cpp:580
uchar * uRow
Pointer to the u value row.
Definition: CVCapture.cpp:586
int colCount
Num. of columns in block.
Definition: CVCapture.cpp:582
uchar * grayRow
Pointer to the grayscale row.
Definition: CVCapture.cpp:584
YUV to RGB image infos. Offset value can be negative for mirrored copy.
Definition: CVCapture.cpp:564
int uRowOffset
offset in bytes to the u value of the next row
Definition: CVCapture.cpp:573
int vColOffset
offset in bytes to the next v pixel (column)
Definition: CVCapture.cpp:569
int grayColOffest
offset in bytes to the next gray pixel (column)
Definition: CVCapture.cpp:566
int bgrRowOffset
offset in bytes to the next bgr row
Definition: CVCapture.cpp:570
int yRowOffset
offset in bytes to the y value of the next row
Definition: CVCapture.cpp:572
int grayRowOffset
offset in bytes to the next grayscale row
Definition: CVCapture.cpp:571
int vRowOffest
offset in bytes to the v value of the next row
Definition: CVCapture.cpp:574
int bgrColOffest
offset in bytes to the next bgr pixel (column)
Definition: CVCapture.cpp:565
int uColOffest
offset in bytes to the next u pixel (column)
Definition: CVCapture.cpp:568
int yColOffest
offset in bytes to the next y pixel (column)
Definition: CVCapture.cpp:567
YUV to RGB image infos. Offset value can be negative for mirrored copy.
Definition: CVCapture.cpp:558
uchar r
Definition: CVCapture.cpp:559
uchar g
Definition: CVCapture.cpp:559
uchar b
Definition: CVCapture.cpp:559