29 #include <opencv2/core/utils/logger.hpp> 
   41 #ifdef APP_USES_CVCAPTURE 
   55     cv::utils::logging::setLogLevel(cv::utils::logging::LogLevel::LOG_LEVEL_SILENT);
 
   84         Utils::log(
"SLProject", 
"CVCapture::open  : Capture devices created.");
 
  106         Utils::log(
"SLProject", 
"Exception during OpenCV video capture creation: %s", e.what());
 
  114     _webCamera.open(facing);
 
  134 #ifndef SL_EMSCRIPTEN 
  139             string msg = 
"CVCapture::openFile: File not found: " + 
videoFilename;
 
  147             Utils::log(
"SLProject", 
"CVCapture::openFile: Failed to open video file.");
 
  166         Utils::log(
"SLProject", 
"CVCapture::openFile: Exception during OpenCV video capture creation with video file: %s", e.what());
 
  176 #if defined(SL_EMSCRIPTEN) 
  178 #elif defined(APP_USES_CVCAPTURE) 
  208 #ifndef SL_EMSCRIPTEN 
  211     return _webCamera.isOpened();
 
  217 #ifndef SL_EMSCRIPTEN 
  221     if (_webCamera.isOpened())
 
  241 #ifndef SL_EMSCRIPTEN 
  258 #    if defined(ANDROID) 
  266             static bool logOnce = 
true;
 
  269                 Utils::log(
"SLProject", 
"OpenCV: Capture device or video file is not open!");
 
  277         Utils::log(
"SLProject", 
"Exception during OpenCV video capture creation: %s", e.what());
 
  281     if (!_webCamera.isOpened())
 
  283         SL_LOG(
"Web camera is not open!");
 
  306                                   const bool            isContinuous)
 
  313         CVMat yuv(height + height / 2, width, CV_8UC1, (
void*)data);
 
  321         CVMat rgba(height, width, CV_8UC4, (
void*)data);
 
  327         int cvType = 0, bpp = 0;
 
  351             default: 
Utils::exitMsg(
"SLProject", 
"Pixel format not supported", __LINE__, __FILE__);
 
  355         size_t destStride = 0;
 
  358             int bitsPerPixel = bpp * 8;
 
  359             int bpl          = ((width * bitsPerPixel + 31) / 32) * 4;
 
  360             destStride       = (size_t)(bpl - width * bpp);
 
  410             for (
unsigned long i = 0; i < 
camSizes.size(); ++i)
 
  430     float outWdivH = viewportWdivH < 0.0f ? inWdivH : viewportWdivH;
 
  441         if (inWdivH > outWdivH) 
 
  443             width  = (int)((
float)
lastFrame.rows * outWdivH);
 
  445             cropW  = (int)((
float)(
lastFrame.cols - width) * 0.5f);
 
  448             wModulo4 = width % 4;
 
  449             if (wModulo4 == 1) width--;
 
  455             if (wModulo4 == 3) width++;
 
  460             height = (int)((
float)
lastFrame.cols / outWdivH);
 
  461             cropH  = (int)((
float)(
lastFrame.rows - height) * 0.5f);
 
  464             hModulo4 = height % 4;
 
  465             if (hModulo4 == 1) height--;
 
  471             if (hModulo4 == 3) height++;
 
  515 #ifndef SL_EMSCRIPTEN 
  528 yuv2rbg(uchar y, uchar u, uchar v, uchar& r, uchar& g, uchar& b)
 
  546     int a0 = 1192 * (y - 16);
 
  598     for (
int row = 0; row < block->
rowCount; ++row)
 
  601         uchar*    grayCol = block->
grayRow;
 
  602         uchar*    yCol    = block->
yRow;
 
  603         uchar*    uCol    = block->
uRow;
 
  604         uchar*    vCol    = block->
vRow;
 
  607         for (
int col = 0; col < block->
colCount; col += 2)
 
  609             yuv2rbg(*yCol, *uCol, *vCol, bgrCol->
r, bgrCol->
g, bgrCol->
b);
 
  616             yuv2rbg(*yCol, *uCol, *vCol, bgrCol->
r, bgrCol->
g, bgrCol->
b);
 
  715     float imgWdivH = (float)srcW / (
float)srcH;
 
  725         if (imgWdivH > scrWdivH) 
 
  727             dstW  = (int)((
float)srcH * scrWdivH);
 
  729             cropW = (int)((
float)(srcW - dstW) * 0.5f);
 
  734             dstH  = (int)((
float)srcW / scrWdivH);
 
  735             cropH = (int)((
float)(srcH - dstH) * 0.5f);
 
  749     if (yRowOffset == uRowOffset && uColOffset == 1)
 
  759     int bgrRowBytes  = dstW * bgrColBytes;
 
  760     int grayColBytes = 1;
 
  761     int grayRowBytes = dstW * grayColBytes;
 
  764     int bgrRowOffset  = dstW * bgrColBytes;
 
  765     int grayRowOffset = dstW;
 
  768         bgrRow += (dstH - 1) * bgrRowBytes;
 
  769         grayRow += (dstH - 1) * grayRowBytes;
 
  775     int bgrColOffset  = 1;
 
  776     int grayColOffset = grayColBytes;
 
  779         bgrRow += (bgrRowBytes - bgrColBytes);
 
  780         grayRow += (grayRowBytes - grayColBytes);
 
  786     int    halfCropH = cropH / 2;
 
  787     int    halfCropW = cropW / 2;
 
  788     uchar* yRow      = y + cropH * yRowOffset + cropW * yColOffset;
 
  789     uchar* uRow      = u + halfCropH * uRowOffset + halfCropW * uColOffset;
 
  790     uchar* vRow      = v + halfCropH * vRowOffset + halfCropW * vColOffset;
 
  795     imageInfo.grayColOffest = grayColOffset;
 
  796     imageInfo.yColOffest    = yColOffset;
 
  797     imageInfo.uColOffest    = uColOffset;
 
  798     imageInfo.vColOffset    = vColOffset;
 
  799     imageInfo.bgrRowOffset  = bgrRowOffset;
 
  800     imageInfo.grayRowOffset = grayRowOffset;
 
  801     imageInfo.yRowOffset    = yRowOffset;
 
  802     imageInfo.uRowOffset    = uRowOffset;
 
  803     imageInfo.vRowOffest    = vRowOffset;
 
  806     const int         threadNum = 4; 
 
  807     vector<thread>    threads;
 
  809     int               rowsPerThread     = dstH / (threadNum + 1);
 
  810     int               halfRowsPerThread = (int)((
float)rowsPerThread * 0.5f);
 
  814     for (
int i = 0; i < threadNum - 1; i++)
 
  830         rowsHandled += rowsPerThread;
 
  832         bgrRow += bgrRowOffset * rowsPerThread;
 
  833         grayRow += grayRowOffset * rowsPerThread;
 
  834         yRow += yRowOffset * rowsPerThread;
 
  835         uRow += uRowOffset * halfRowsPerThread;
 
  836         vRow += vRowOffset * halfRowsPerThread;
 
  841     infoMain.bgrRow    = bgrRow;
 
  842     infoMain.grayRow   = grayRow;
 
  843     infoMain.yRow      = yRow;
 
  844     infoMain.uRow      = uRow;
 
  845     infoMain.vRow      = vRow;
 
  846     infoMain.rowCount  = (dstH - rowsHandled);
 
  847     infoMain.colCount  = dstW;
 
  852     for (
auto& thread : threads)
 
  895                                  const string& configPath)
 
  897     string mainCalibFilename = 
"camCalib_" + computerInfo + 
"_main.xml";
 
  898     string scndCalibFilename = 
"camCalib_" + computerInfo + 
"_scnd.xml";
 
  901 #if defined(APP_USES_CVCAPTURE) 
  974     if ((uint)sizeIndexMax != 
camSizes.size())
 
  977         camSizes.resize((uint)sizeIndexMax);
 
  979     camSizes[(uint)sizeIndex].width  = width;
 
  980     camSizes[(uint)sizeIndex].height = height;
 
  986 #ifndef SL_EMSCRIPTEN 
  989     int frameIndex = (int)
_captureDevice.get(cv::CAP_PROP_POS_FRAMES);
 
  992     if (frameIndex < 0) frameIndex = 0;
 
 1002 #ifndef SL_EMSCRIPTEN 
 1016 #ifndef SL_EMSCRIPTEN 
The AppCommon class holds the top-level instances of the app-demo.
 
void yuv2rbg(uchar y, uchar u, uchar v, uchar &r, uchar &g, uchar &b)
YUV to RGB image infos. Offset value can be negative for mirrored copy.
 
void * convertYUV2RGB(YUV2RGB_BlockInfo *block)
YUV to RGB conversion function called by multiple threads.
 
CVVideoType
Video type if multiple exist on mobile devices.
 
@ VT_SCND
Selfie camera on mobile devices.
 
@ VT_FILE
Loads a video from file with OpenCV.
 
@ VT_NONE
No camera needed.
 
@ VT_MAIN
Main camera on all on all all devices.
 
CVPixelFormatGL
Pixel format according to OpenGL pixel format defines.
 
#define PROFILE_FUNCTION()
 
WebCameraFacing
Facing modes for the camera.
 
void adaptForNewResolution(const CVSize &newSize, bool calcUndistortionMaps)
Adapts an already calibrated camera to a new resolution (cropping and scaling)
 
bool load(const string &calibDir, const string &calibFileName, bool calcUndistortionMaps)
Loads the calibration information from the config file.
 
CVCalibration calibration
 
Encapsulation of the OpenCV Capture Device and holder of the last frame.
 
CVCamera scndCam
camera representation for secondary video camera
 
CVVideoCapture _captureDevice
OpenCV capture device.
 
void moveCapturePosition(int n)
Moves the current frame position in a video file.
 
bool hasSecondaryCamera
flag if device has secondary camera
 
~CVCapture()
Private constructor.
 
void loadCalibrations(const string &computerInfo, const string &configPath)
 
CVVSize camSizes
All possible camera sizes.
 
CVCamera * activeCamera
Pointer to the active camera.
 
void copyYUVPlanes(float scrWdivH, int srcW, int srcH, uchar *y, int ySize, int yPixStride, int yLineStride, uchar *u, int uSize, int uPixStride, int uLineStride, uchar *v, int vSize, int vPixStride, int vLineStride)
Copies and converts the video image in YUV_420 format to RGB and Grayscale.
 
CVPixelFormatGL format
GL pixel format.
 
CVSize captureSize
size of captured frame
 
void setCameraSize(int sizeIndex, int sizeIndexMax, int width, int height)
 
void adjustForSL(float viewportWdivH)
Does all adjustments needed for the gVideoTexture.
 
CVSize2i open(int deviceNum)
Opens the capture device and returns the frame size.
 
void start(float viewportWdivH)
starts the video capturing
 
CVCapture()
private onetime constructor
 
CVMat lastFrame
last frame grabbed in BGR
 
string videoFilename
video filename to load
 
CVVideoType _videoType
Flag for using the live video image.
 
CVMat lastFrameGray
last frame in grayscale
 
CVSize2i openFile()
Opens the video file instead of a camera feed.
 
int activeCamSizeIndex
Currently active camera size index.
 
CVCamera videoFileCam
camera representation for simulation using a video file
 
CVCamera mainCam
camera representation for main video camera
 
int nextFrameIndex()
Returns the next frame index number.
 
void loadIntoLastFrame(float vieportWdivH, int camWidth, int camHeight, CVPixelFormatGL srcPixelFormat, const uchar *data, bool isContinuous)
 
AvgFloat _captureTimesMS
Averaged time for video capturing in ms.
 
HighResTimer _timer
High resolution timer.
 
bool videoLoops
flag if video should loop
 
float startCaptureTimeMS
start time of capturing in ms
 
static CVCapture * _instance
global singleton object
 
bool grabAndAdjustForSL(float viewportWdivH)
 
static CVPixelFormatGL cvType2glPixelFormat(int cvType)
Converts OpenCV mat type to OpenGL pixel format.
 
float elapsedTimeInMilliSec()
 
void init(int numValues, T initValue)
Initializes the average value array to a given value.
 
void set(T value)
Sets the current value in the value array and builds the average.
 
string unifySlashes(const string &inputDir, bool withTrailingSlash)
Returns the inputDir string with unified forward slashes, e.g.: "dirA/dirB/".
 
bool fileExists(const string &pathfilename)
Returns true if a file exists.
 
T clamp(T a, T min, T max)
 
void exitMsg(const char *tag, const char *msg, const int line, const char *file)
Terminates the application with a message. No leak checking.
 
void log(const char *tag, const char *format,...)
logs a formatted string platform independently
 
YUV to RGB image block infos that are different per thread.
 
uchar * bgrRow
Pointer to the bgr row.
 
int rowCount
Num. of rows in block.
 
uchar * vRow
Pointer to the v value row.
 
uchar * yRow
Pointer to the y value row.
 
YUV2RGB_ImageInfo * imageInfo
Pointer to the image info.
 
uchar * uRow
Pointer to the u value row.
 
int colCount
Num. of columns in block.
 
uchar * grayRow
Pointer to the grayscale row.
 
YUV to RGB image infos. Offset value can be negative for mirrored copy.
 
int uRowOffset
offset in bytes to the u value of the next row
 
int vColOffset
offset in bytes to the next v pixel (column)
 
int grayColOffest
offset in bytes to the next gray pixel (column)
 
int bgrRowOffset
offset in bytes to the next bgr row
 
int yRowOffset
offset in bytes to the y value of the next row
 
int grayRowOffset
offset in bytes to the next grayscale row
 
int vRowOffest
offset in bytes to the v value of the next row
 
int bgrColOffest
offset in bytes to the next bgr pixel (column)
 
int uColOffest
offset in bytes to the next u pixel (column)
 
int yColOffest
offset in bytes to the next y pixel (column)
 
YUV to RGB image infos. Offset value can be negative for mirrored copy.