SLProject  4.2.000
A platform independent 3D computer graphics framework for desktop OS, Android, iOS and online in web browsers
ViewController.mm
Go to the documentation of this file.
1 /**
2  * \file ViewController.m
3  * \details Top level iOS view controller code that interfaces SLProject
4  * The demo application demonstrates most features of the SLProject
5  * framework. Implementation of the GUI with the GLFW3 framework
6  * that can create a window and receive system event on desktop OS
7  * such as Windows, MacOS and Linux.
8  * \date November 2017
9  * \authors Marcus Hudritsch
10  * \copyright http://opensource.org/licenses/GPL-3.0
11  * \remarks Please use clangformat to format the code. See more code style on
12  * https://github.com/cpvrlab/SLProject4/wiki/SLProject-Coding-Style
13 */
14 
15 // Objective C imports
16 #import <ViewController.h>
17 #import <CoreMotion/CoreMotion.h>
18 
19 // C++ includes for the SceneLibrary
20 #include <Utils.h>
21 #include <Utils_iOS.h>
22 #include <SLAssetManager.h>
23 #include <SLInterface.h>
24 #include <SLAssetLoader.h>
25 #include <CVCapture.h>
26 #include <AppCommon.h>
27 #include <App.h>
28 
29 #include <mach/mach_time.h>
30 #import <sys/utsname.h>
31 #import <mach-o/arch.h>
32 
33 //-----------------------------------------------------------------------------
34 // C-Prototypes
35 float GetSeconds();
37 //-----------------------------------------------------------------------------
38 // Global pointer to the GLView instance that can be accessed by onPaintRTGL
39 GLKView* myView = 0;
40 //-----------------------------------------------------------------------------
41 // Global SLSceneView handle
42 int svIndex = 0;
43 //-----------------------------------------------------------------------------
44 // Global screen scale (2.0 for retina, 1.0 else)
45 float screenScale = 1.0f;
46 
47 //-----------------------------------------------------------------------------
48 // C-Function used as C-function callback for raytracing update
50 {
51  [myView display];
52  return true;
53 }
54 
55 //-----------------------------------------------------------------------------
56 /*!
57  Returns the absolute time in seconds since the system started. It is based
58  on a CPU clock counter.
59  */
60 float GetSeconds()
61 {
62  static mach_timebase_info_data_t info;
63  mach_timebase_info(&info);
64  uint64_t now = mach_absolute_time();
65  now *= info.numer;
66  now /= info.denom;
67  double sec = (double)now / 1000000000.0;
68  return (float)sec;
69 }
70 //-----------------------------------------------------------------------------
71 @interface ViewController ()<CLLocationManagerDelegate> {
72  SLfloat m_lastFrameTimeSec; //!< Timestamp for passing highres time
73  SLfloat m_lastTouchTimeSec; //!< Frame time of the last touch event
74  SLfloat m_lastTouchDownSec; //!< Time of last touch down
75  SLint m_touchDowns; //!< No. of finger touchdowns
76  CGPoint m_touchDownPos1; //!< Pos. of touch down for finger 1
77 
78  // Video stuff
79  AVCaptureSession* m_avSession; //!< Audio video session
80  NSString* m_avSessionPreset; //!< Session name
82  int m_lastVideoType; //! VT_NONE=0,VT_MAIN=1,VT_SCND=2
83  int m_lastVideoSizeIndex; //! 0=1920x1080, 1=1280x720 else 640x480
84  bool m_locationIsRunning; //! GPS is running
85 }
86 @property (strong, nonatomic) EAGLContext* context;
87 @property (strong, nonatomic) CMMotionManager* motionManager;
88 @property (strong, nonatomic) NSTimer* motionTimer;
89 @property (strong, nonatomic) CLLocationManager* locationManager;
90 @end
91 //-----------------------------------------------------------------------------
92 @implementation ViewController
93 @synthesize context = _context;
94 
95 - (void)dealloc
96 {
97  //[_context release];
98  //[super dealloc];
99 }
100 //-----------------------------------------------------------------------------
101 - (void)viewDidLoad
102 {
103  [super viewDidLoad];
104 
105  self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
106  if (!self.context)
107  {
108  NSLog(@"Failed to create ES3 context");
109  self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
110  if (!self.context) NSLog(@"Failed to create ES2 context");
111  }
112 
113  myView = (GLKView*)self.view;
114  myView.context = self.context;
115  myView.drawableDepthFormat = GLKViewDrawableDepthFormat24;
116 
117  if ([UIDevice currentDevice].multitaskingSupported)
118  myView.drawableMultisample = GLKViewDrawableMultisample4X;
119 
120  self.preferredFramesPerSecond = 60;
121  self.view.multipleTouchEnabled = true;
122  m_touchDowns = 0;
123 
124  //[self setupGL];
125  [EAGLContext setCurrentContext:self.context];
126 
127  // determine device pixel ratio and dots per inch
128  screenScale = [UIScreen mainScreen].scale;
129  float dpi;
130  if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad)
131  dpi = 132 * screenScale;
132  else if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPhone)
133  dpi = 163 * screenScale;
134  else
135  dpi = 160 * screenScale;
136 
137  SLVstring cmdLineArgs;
141 
142  // Some some computer informations
143  struct utsname systemInfo;
144  uname(&systemInfo);
145  NSString* model = [NSString stringWithCString:systemInfo.machine encoding:NSUTF8StringEncoding];
146  NSString* osver = [[UIDevice currentDevice] systemVersion];
147  const NXArchInfo* archInfo = NXGetLocalArchInfo();
148  NSString* arch = [NSString stringWithUTF8String:archInfo->description];
149 
150  Utils::ComputerInfos::model = std::string([model UTF8String]);
151  Utils::ComputerInfos::osVer = std::string([osver UTF8String]);
152  Utils::ComputerInfos::arch = std::string([arch UTF8String]);
153 
154  AppCommon::calibIniPath = AppCommon::exePath + "data/calibrations/"; // for calibInitPath
156  AppCommon::configPath); // for stored calibrations
157 
158  /////////////////////////////////////////////
159  slCreateApp(cmdLineArgs,
160  AppCommon::exePath + "data/",
161  AppCommon::exePath + "data/shaders/",
162  AppCommon::exePath + "data/models/",
163  AppCommon::exePath + "data/images/textures/",
164  AppCommon::exePath + "data/images/fonts/",
165  AppCommon::exePath + "data/videos/",
167  "AppDemo_iOS");
168 
170 
171  ///////////////////////////////////////////////////////////////////////
174  self.view.bounds.size.width * screenScale,
175  self.view.bounds.size.height * screenScale,
176  dpi,
178  reinterpret_cast<void*>(onPaintRTGL),
179  0,
180  reinterpret_cast<void*>(App::config.onNewSceneView),
181  reinterpret_cast<void*>(App::config.onGuiBuild),
182  reinterpret_cast<void*>(App::config.onGuiLoadConfig),
183  reinterpret_cast<void*>(App::config.onGuiSaveConfig));
184  ///////////////////////////////////////////////////////////////////////
185 
186  [self setupMotionManager:1.0 / 20.0];
187  [self setupLocationManager];
188 
189  // Set the available capture resolutions
190 
191  CVCapture::instance()->setCameraSize(0, 3, 1920, 1080);
192  CVCapture::instance()->setCameraSize(1, 3, 1280, 720);
193  CVCapture::instance()->setCameraSize(2, 3, 640, 480);
194  m_lastVideoSizeIndex = -1; // default size index
195 }
196 //-----------------------------------------------------------------------------
197 - (void)didReceiveMemoryWarning
198 {
199  printf("didReceiveMemoryWarning\n");
200 
201  [super didReceiveMemoryWarning];
202 
203  slTerminate();
204 
205  if ([EAGLContext currentContext] == self.context)
206  {
207  [EAGLContext setCurrentContext:nil];
208  }
209  self.context = nil;
210  //[super dealloc];
211 }
212 //-----------------------------------------------------------------------------
213 - (void)update
214 {
215  slResize(svIndex, self.view.bounds.size.width * screenScale, self.view.bounds.size.height * screenScale);
216 }
217 //-----------------------------------------------------------------------------
218 - (void)glkView:(GLKView*)view drawInRect:(CGRect)rect
219 {
220  if (AppCommon::sceneViews.empty())
221  return;
222 
224 
225  [self setVideoType:CVCapture::instance()->videoType()
226  videoSizeIndex:CVCapture::instance()->activeCamera->camSizeIndex()];
227 
228  if (slUsesLocation())
229  [self startLocationManager];
230  else
231  [self stopLocationManager];
232 
234  {
236  AppCommon::sceneToLoad = {}; // sets optional to empty
237  }
238 
239  if (AppCommon::assetLoader->isLoading())
241 
242  ////////////////////////////////////////////////
243  SLbool appNeedsUpdate = App::config.onUpdate && App::config.onUpdate(sv);
244  SLbool jobIsRunning = slUpdateParallelJob();
245  SLbool isLoading = AppCommon::assetLoader->isLoading();
246  SLbool viewNeedsUpdate = slPaintAllViews();
247  ////////////////////////////////////////////////
248 
249  m_lastVideoImageIsConsumed = true;
250 
251  if (slShouldClose())
252  {
253  slTerminate();
254  exit(0);
255  }
256 }
257 //-----------------------------------------------------------------------------
258 // touchesBegan receives the finger thouch down events
259 - (void)touchesBegan:(NSSet*)touches withEvent:(UIEvent*)event
260 {
261  NSArray* myTouches = [touches allObjects];
262  UITouch* touch1 = [myTouches objectAtIndex:0];
263  m_touchDownPos1 = [touch1 locationInView:touch1.view];
264  m_touchDownPos1.x *= screenScale;
265  m_touchDownPos1.y *= screenScale;
266  float touchDownNowSec = GetSeconds();
267 
268  // end touch actions on sequential finger touch downs
269  if (m_touchDowns > 0)
270  {
271  if (m_touchDowns == 1)
272  slMouseUp(svIndex, MB_left, m_touchDownPos1.x, m_touchDownPos1.y, K_none);
273  if (m_touchDowns == 2)
274  slTouch2Up(svIndex, 0, 0, 0, 0);
275 
276  // Reset touch counter if last touch event is older than a second.
277  // This resolves the problem off loosing track in touch counting e.g.
278  // when somebody touches with the flat hand.
279  if (m_lastTouchTimeSec < (m_lastFrameTimeSec - 2.0f))
280  m_touchDowns = 0;
281  }
282 
283  m_touchDowns += [touches count];
284  //printf("Begin tD: %d, touches count: %u\n", m_touchDowns, (SLuint)[touches count]);
285 
286  if (m_touchDowns == 1 && [touches count] == 1)
287  {
288  if (touchDownNowSec - m_lastTouchDownSec < 0.3f)
289  slDoubleClick(svIndex, MB_left, m_touchDownPos1.x, m_touchDownPos1.y, K_none);
290  else
291  slMouseDown(svIndex, MB_left, m_touchDownPos1.x, m_touchDownPos1.y, K_none);
292  }
293  else if (m_touchDowns == 2)
294  {
295  if ([touches count] == 2)
296  {
297  UITouch* touch2 = [myTouches objectAtIndex:1];
298  CGPoint pos2 = [touch2 locationInView:touch2.view];
299  pos2.x *= screenScale;
300  pos2.y *= screenScale;
301  slTouch2Down(svIndex, m_touchDownPos1.x, m_touchDownPos1.y, pos2.x, pos2.y);
302  }
303  else if ([touches count] == 1) // delayed 2nd finger touch
304  slTouch2Down(svIndex, 0, 0, 0, 0);
305  }
306 
307  m_lastTouchTimeSec = m_lastTouchDownSec = touchDownNowSec;
308 }
309 //-----------------------------------------------------------------------------
310 // touchesMoved receives the finger move events
311 - (void)touchesMoved:(NSSet*)touches withEvent:(UIEvent*)event
312 {
313  NSArray* myTouches = [touches allObjects];
314  UITouch* touch1 = [myTouches objectAtIndex:0];
315  CGPoint pos1 = [touch1 locationInView:touch1.view];
316  pos1.x *= screenScale;
317  pos1.y *= screenScale;
318 
319  if (m_touchDowns == 1 && [touches count] == 1)
320  {
321  slMouseMove(svIndex, pos1.x, pos1.y);
322  }
323  else if (m_touchDowns == 2 && [touches count] == 2)
324  {
325  UITouch* touch2 = [myTouches objectAtIndex:1];
326  CGPoint pos2 = [touch2 locationInView:touch2.view];
327  pos2.x *= screenScale;
328  pos2.y *= screenScale;
329  slTouch2Move(svIndex, pos1.x, pos1.y, pos2.x, pos2.y);
330  }
331 
332  m_lastTouchTimeSec = m_lastFrameTimeSec;
333 }
334 //-----------------------------------------------------------------------------
335 // touchesEnded receives the finger thouch release events
336 - (void)touchesEnded:(NSSet*)touches withEvent:(UIEvent*)event
337 {
338  NSArray* myTouches = [touches allObjects];
339  UITouch* touch1 = [myTouches objectAtIndex:0];
340  CGPoint pos1 = [touch1 locationInView:touch1.view];
341  pos1.x *= screenScale;
342  pos1.y *= screenScale;
343  float touchUpNowSec = GetSeconds();
344  int dX = std::abs(m_touchDownPos1.x - pos1.x);
345  int dY = std::abs(m_touchDownPos1.y - pos1.y);
346  float dSec = touchUpNowSec - m_lastTouchDownSec;
347 
348  if (m_touchDowns == 1 || [touches count] == 1)
349  {
350  // Long touch as right mouse button touch
351  if (dSec > 0.8f && dX < 3 && dY < 3)
352  {
353  slMouseDown(svIndex, MB_right, m_touchDownPos1.x, m_touchDownPos1.y, K_none);
354  slMouseUp(svIndex, MB_right, m_touchDownPos1.x, m_touchDownPos1.y, K_none);
355  }
356  else
357  slMouseUp(svIndex, MB_left, pos1.x, pos1.y, K_none);
358  }
359  else if (m_touchDowns == 2 && [touches count] == 2)
360  {
361  UITouch* touch2 = [myTouches objectAtIndex:1];
362  CGPoint pos2 = [touch2 locationInView:touch2.view];
363  pos2.x *= screenScale;
364  pos2.y *= screenScale;
365  slTouch2Up(svIndex, pos1.x, pos1.y, pos2.x, pos2.y);
366  }
367 
368  m_touchDowns = 0;
369 
370  //printf("End tD: %d, touches count: %u, dSec:%3.2f, dX:%d, dY:%d\n", m_touchDowns, (SLuint)[touches count], dSec,dX,dY);
371 
372  m_lastTouchTimeSec = m_lastFrameTimeSec;
373 }
374 //-----------------------------------------------------------------------------
375 // touchesCancle receives the cancle event on an iPhone call
376 - (void)touchesCancle:(NSSet*)touches withEvent:(UIEvent*)event
377 {
378  NSArray* myTouches = [touches allObjects];
379  UITouch* touch1 = [myTouches objectAtIndex:0];
380  CGPoint pos1 = [touch1 locationInView:touch1.view];
381 
382  if (m_touchDowns == 1 || [touches count] == 1)
383  {
384  slMouseUp(svIndex, MB_left, pos1.x, pos1.y, K_none);
385  }
386  else if (m_touchDowns == 2 && [touches count] >= 2)
387  {
388  UITouch* touch2 = [myTouches objectAtIndex:1];
389  CGPoint pos2 = [touch2 locationInView:touch2.view];
390  slTouch2Up(svIndex, pos1.x, pos1.y, pos2.x, pos2.y);
391  }
392  m_touchDowns -= (int)[touches count];
393  if (m_touchDowns < 0) m_touchDowns = 0;
394 
395  //printf("End tD: %d, touches count: %d\n", m_touchDowns, [touches count]);
396 
397  m_lastTouchTimeSec = m_lastFrameTimeSec;
398 }
399 //-----------------------------------------------------------------------------
400 // Event handler for a new camera image (taken from the GLCameraRipple example)
401 - (void)captureOutput:(AVCaptureOutput*)captureOutput
402  didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
403  fromConnection:(AVCaptureConnection*)connection
404 {
405  // Don't copy the available image if the last wasn't consumed
406  if (!m_lastVideoImageIsConsumed) return;
407 
408  CVReturn err;
409  CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
410 
411  CVPixelBufferLockBaseAddress(pixelBuffer, 0);
412 
413  int imgWidth = (int)CVPixelBufferGetWidth(pixelBuffer);
414  int imgHeight = (int)CVPixelBufferGetHeight(pixelBuffer);
415  unsigned char* data = (unsigned char*)CVPixelBufferGetBaseAddress(pixelBuffer);
416 
417  if (!data)
418  {
419  NSLog(@"No pixel buffer data");
420  return;
421  }
422 
424  CVCapture* capture = CVCapture::instance();
425  float videoImgWdivH = (float)imgWidth / (float)imgHeight;
426 
427  if (sv->viewportSameAsVideo())
428  {
429  // If video aspect has changed we need to tell the new viewport to the sceneview
430  if (Utils::abs(videoImgWdivH - sv->viewportWdivH()) > 0.01f)
431  sv->setViewportFromRatio(SLVec2i(imgWidth, imgHeight), sv->viewportAlign(), true);
432  }
433 
435  imgWidth,
436  imgHeight,
437  PF_bgra,
438  data,
439  false);
440 
441  CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
442 
443  m_lastVideoImageIsConsumed = false;
444 }
445 //-----------------------------------------------------------------------------
446 - (void)onAccelerationData:(CMAcceleration)acceleration
447 {
448  //SLVec3f acc(acceleration.x,acceleration.y,acceleration.z);
449  //acc.print("Acc:");
450 }
451 //-----------------------------------------------------------------------------
452 - (void)onGyroData:(CMRotationRate)rotation
453 {
454  //SLVec3f rot(rotation.x,rotation.y,rotation.z);
455  //rot.print("Rot:");
456 }
457 //-----------------------------------------------------------------------------
458 - (void)onMotionData:(CMAttitude*)attitude
459 {
460  if (slUsesRotation())
461  {
462  if ([[UIDevice currentDevice] orientation] == UIDeviceOrientationLandscapeLeft)
463  {
464  NSLog(@"UIDeviceOrientationLandscapeLeft");
465  }
466  else if ([[UIDevice currentDevice] orientation] == UIDeviceOrientationLandscapeRight)
467  {
468  float pitch = attitude.roll - Utils::HALFPI;
469  float yaw = attitude.yaw - Utils::HALFPI;
470  float roll = attitude.pitch;
471  SL_LOG("Pitch: %3.0f, Yaw: %3.0f, Roll: %3.0f\n",
472  pitch * Utils::RAD2DEG,
473  yaw * Utils::RAD2DEG,
474  roll * Utils::RAD2DEG);
475  }
476  else if ([[UIDevice currentDevice] orientation] == UIDeviceOrientationPortrait)
477  {
478  NSLog(@"UIDeviceOrientationPortrait");
479  }
480  else if ([[UIDevice currentDevice] orientation] == UIDeviceOrientationPortraitUpsideDown)
481  {
482  NSLog(@"UIDeviceOrientationPortraitUpsideDown");
483  }
484  }
485 }
486 //-----------------------------------------------------------------------------
487 //! Prepares the video capture (taken from the GLCameraRipple example)
488 - (void)setupVideo:(bool)useFaceCamera videoSizeIndex:(int)sizeIndex
489 {
490  switch (sizeIndex)
491  {
492  case 0: m_avSessionPreset = AVCaptureSessionPreset1920x1080; break;
493  case 1: m_avSessionPreset = AVCaptureSessionPreset1280x720; break;
494  default: m_avSessionPreset = AVCaptureSessionPreset640x480;
495  }
496 
497  //-- Setup Capture Session.
498  m_avSession = [[AVCaptureSession alloc] init];
499  [m_avSession beginConfiguration];
500 
501  //-- Set preset session size.
502  [m_avSession setSessionPreset:m_avSessionPreset];
503 
504  //-- Creata a video device and input from that Device. Add the input to the capture session.
505  //AVCaptureDevice* videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
506  AVCaptureDevice* videoDevice = nil;
507  if (useFaceCamera)
508  videoDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera
509  mediaType:AVMediaTypeVideo
510  position:AVCaptureDevicePositionFront];
511  else
512  videoDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera
513  mediaType:AVMediaTypeVideo
514  position:AVCaptureDevicePositionBack];
515  if (videoDevice == nil)
516  assert(0);
517 
518  /*
519  for (AVCaptureDeviceFormat *format in [videoDevice formats] ) {
520  CMFormatDescriptionRef description = format.formatDescription;
521  CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(description);
522  SL_LOG("%s: %d x %d\n", format.description.UTF8String, dimensions.width, dimensions.height);
523  }
524  */
525 
526  //-- Add the device to the session.
527  NSError* error;
528  AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
529  if (error)
530  assert(0);
531 
532  [m_avSession addInput:input];
533 
534  //-- Create the output for the capture session.
535  AVCaptureVideoDataOutput* dataOutput = [[AVCaptureVideoDataOutput alloc] init];
536  [dataOutput setAlwaysDiscardsLateVideoFrames:YES]; // Probably want to set this to NO when recording
537 
538  //-- Set to BGRA.
539  // Corevideo only supports:
540  // kCVPixelFormatType_32BGRA
541  // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
542  // kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
543  [dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
544  forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
545 
546  // Set dispatch to be on the main thread so OpenGL can do things with the data
547  [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
548 
549  [m_avSession addOutput:dataOutput];
550  [m_avSession commitConfiguration];
551 
552  m_lastVideoImageIsConsumed = true;
553 }
554 //-----------------------------------------------------------------------------
555 //! Sets the video according to the passed type (0=NONE, 1=Main, 2=Secondary)
556 /* The main camera on iOS is the back camera and the the secondary is the front
557  camera that faces the face.
558 */
559 - (void)setVideoType:(int)videoType
560  videoSizeIndex:(int)sizeIndex
561 {
562  if (videoType == VT_NONE) // No video needed. Turn off any video
563  {
564  if (m_avSession != nil && ![m_avSession isRunning])
565  {
566  printf("Stopping AV Session\n");
567  [m_avSession stopRunning];
568  }
569  }
570  if (videoType == VT_FILE) // Turn off any live video
571  {
572  if (m_avSession != nil && ![m_avSession isRunning])
573  {
574  printf("Stopping AV Session\n");
575  [m_avSession stopRunning];
576  }
577 
579  CVCapture* capture = CVCapture::instance();
580 
581  // Get the current capture size of the videofile
582  CVSize videoSizeBefore = capture->captureSize;
583 
584  // If viewportWdivH is negative the viewport aspect will be adapted to the video
585  // aspect ratio. No cropping will be applied.
586  // iOS doesn't know the video file frame size before grab
587  float viewportWdivH = sv->viewportWdivH();
588  if (sv->viewportSameAsVideo())
589  viewportWdivH = -1;
590 
591  capture->grabAndAdjustForSL(viewportWdivH);
592 
593  // If video aspect has changed we need to tell the new viewport to the sceneview
594  CVSize videoSizeAfter = capture->captureSize;
595  if (sv->viewportSameAsVideo() && videoSizeBefore != videoSizeAfter)
596  sv->setViewportFromRatio(SLVec2i(videoSizeAfter.width, videoSizeAfter.height),
597  sv->viewportAlign(),
599  }
600  else if (videoType == VT_MAIN) // back facing video needed
601  {
602  if (m_avSession == nil)
603  {
604  printf("Creating AV Session for Front Camera\n");
605  [self setupVideo:false videoSizeIndex:sizeIndex];
606  printf("Starting AV Session\n");
607  [m_avSession startRunning];
608  }
609  else if (m_lastVideoType == videoType)
610  {
611  if (m_lastVideoSizeIndex != sizeIndex)
612  {
613  printf("Stopping AV Session for resolution change\n");
614  [m_avSession stopRunning];
615  m_avSession = nil;
616  }
617  else if (![m_avSession isRunning])
618  {
619  printf("Starting AV Session\n");
620  [m_avSession startRunning];
621  }
622  }
623  else
624  {
625  if ([m_avSession isRunning])
626  {
627  printf("Deleting AV Session\n");
628  [m_avSession stopRunning];
629  m_avSession = nil;
630  }
631  printf("Creating AV Session for Front Camera\n");
632  [self setupVideo:false videoSizeIndex:sizeIndex];
633  printf("Starting AV Session\n");
634  [m_avSession startRunning];
635  }
636  }
637  else if (videoType == VT_SCND) // Video from selfie camera needed
638  {
639  if (m_avSession == nil)
640  {
641  printf("Creating AV Session for Back Camera\n");
642  [self setupVideo:true videoSizeIndex:sizeIndex];
643  printf("Starting AV Session\n");
644  [m_avSession startRunning];
645  }
646  else if (m_lastVideoType == videoType)
647  {
648  if (m_lastVideoSizeIndex != sizeIndex)
649  {
650  printf("Stopping AV Session for resolution change\n");
651  [m_avSession stopRunning];
652  m_avSession = nil;
653  }
654  else if (![m_avSession isRunning])
655  {
656  printf("Starting AV Session\n");
657  [m_avSession startRunning];
658  }
659  }
660  else
661  {
662  if ([m_avSession isRunning])
663  {
664  printf("Deleting AV Session\n");
665  [m_avSession stopRunning];
666  m_avSession = nil;
667  }
668  printf("Creating AV Session for Back Camera\n");
669  [self setupVideo:true videoSizeIndex:sizeIndex];
670  printf("Starting AV Session\n");
671  [m_avSession startRunning];
672  }
673  }
674 
675  m_lastVideoType = videoType;
676  m_lastVideoSizeIndex = sizeIndex;
677 }
678 //-----------------------------------------------------------------------------
679 //! Starts the motion data update if the interval time > 0 else it stops
680 - (void)setupMotionManager:(double)intervalTimeSEC
681 {
682  // Init motion manager
683  self.motionManager = [[CMMotionManager alloc] init];
684 
685  if ([self.motionManager isDeviceMotionAvailable] == YES)
686  {
687  self.motionManager.deviceMotionUpdateInterval = intervalTimeSEC;
688 
689  // See also: https://developer.apple.com/documentation/coremotion/getting_processed_device_motion_data/understanding_reference_frames_and_device_attitude?language=objc
690  [self.motionManager startDeviceMotionUpdatesUsingReferenceFrame:CMAttitudeReferenceFrameXMagneticNorthZVertical
691  toQueue:[NSOperationQueue currentQueue]
692  withHandler:^(CMDeviceMotion* motion, NSError* error) {
693  [self performSelectorOnMainThread:@selector(onDeviceMotionUpdate:)
694  withObject:motion
695  waitUntilDone:YES];
696  }];
697  }
698  else
699  [self.motionManager stopDeviceMotionUpdates];
700 }
701 //-----------------------------------------------------------------------------
702 - (void)onDeviceMotionUpdate:(CMDeviceMotion*)motion
703 {
704  if (slUsesRotation())
705  {
706  CMDeviceMotion* motionData = self.motionManager.deviceMotion;
707  CMAttitude* attitude = motionData.attitude;
708 
709  //Get sensor rotation as quaternion. This quaternion describes a rotation
710  //relative to NWU-frame
711  //(see: https://developer.apple.com/documentation/coremotion/getting_processed_device_motion_data/understanding_reference_frames_and_device_attitude)
712  CMQuaternion q = attitude.quaternion;
713 
714  //Add rotation of 90 deg. around z-axis to relate the sensor rotation to an ENU-frame (as in Android)
715  GLKQuaternion qNWU = GLKQuaternionMake(q.x, q.y, q.z, q.w);
716  GLKQuaternion qRot90Z = GLKQuaternionMakeWithAngleAndAxis(GLKMathDegreesToRadians(90), 0, 0, 1);
717  GLKQuaternion qENU = GLKQuaternionMultiply(qRot90Z, qNWU);
718 
719  // Send quaternion to SLProject
720  slRotationQUAT(qENU.q[0], qENU.q[1], qENU.q[2], qENU.q[3]);
721 
722  // See the following routines how the rotation is used:
723  // SLScene::onRotationQUAT calculates the offset if _zeroYawAtStart is true
724  // SLCamera::setView how the device rotation is processed for the camera's view
725  }
726 }
727 //-----------------------------------------------------------------------------
728 //! Starts the location data update if the interval time > 0 else it stops
729 - (void)setupLocationManager
730 {
731  if ([CLLocationManager locationServicesEnabled])
732  {
733  // Init location manager
734  self.locationManager = [[CLLocationManager alloc] init];
735  self.locationManager.delegate = self;
736  self.locationManager.desiredAccuracy = kCLLocationAccuracyBest;
737  //self.locationManager.distanceFilter = 1;
738 
739  // for iOS 8, specific user level permission is required,
740  // "when-in-use" authorization grants access to the user's location.
741  // important: be sure to include NSLocationWhenInUseUsageDescription along with its
742  // explanation string in your Info.plist or startUpdatingLocation will not work
743  if ([self.locationManager respondsToSelector:@selector(requestWhenInUseAuthorization)])
744  {
745  [self.locationManager requestWhenInUseAuthorization];
746  }
747  }
748  else
749  {
750  /* Location services are not enabled.
751  Take appropriate action: for instance, prompt the
752  user to enable the location services */
753  NSLog(@"Location services are not enabled");
754  }
755 
756  m_locationIsRunning = false;
757 }
758 //-----------------------------------------------------------------------------
759 //! Starts the location data update
760 - (void)startLocationManager
761 {
762  if (!m_locationIsRunning)
763  {
764  [self.locationManager startUpdatingLocation];
765  m_locationIsRunning = true;
766  printf("Starting Location Manager\n");
767  }
768 }
769 //-----------------------------------------------------------------------------
770 //! Stops the location data update
771 - (void)stopLocationManager
772 {
773  if (m_locationIsRunning)
774  {
775  [self.locationManager stopUpdatingLocation];
776  m_locationIsRunning = false;
777  printf("Stopping Location Manager\n");
778  }
779 }
780 //-----------------------------------------------------------------------------
781 - (void)locationManager:(CLLocationManager*)manager didUpdateToLocation:(CLLocation*)newLocation fromLocation:(CLLocation*)oldLocation
782 {
783  printf("horizontalAccuracy: %f\n", newLocation.horizontalAccuracy);
784 
785  // negative horizontal accuracy means no location fix
786  if (newLocation.horizontalAccuracy > 0.0)
787  {
788  slLocationLatLonAlt(newLocation.coordinate.latitude,
789  newLocation.coordinate.longitude,
790  newLocation.altitude,
791  newLocation.horizontalAccuracy);
792  }
793 }
794 //-----------------------------------------------------------------------------
795 - (void)locationManager:(CLLocationManager*)manager didFailWithError:(NSError*)error
796 {
797  // The location "unknown" error simply means the manager is currently unable to get the location.
798  // We can ignore this error for the scenario of getting a single location fix, because we already have a
799  // timeout that will stop the location manager to save power.
800  //
801  if ([error code] != kCLErrorLocationUnknown)
802  {
803  printf("**** locationManager didFailWithError ****\n");
804  [self stopLocationManager];
805  }
806 }
807 //-----------------------------------------------------------------------------
808 std::string getAppsDocumentsDir()
809 {
810  // Get the documents director
811  NSArray* paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
812  NSUserDomainMask,
813  YES);
814  NSString* documentsDirectory = [paths objectAtIndex:0];
815  string documentsDir = [documentsDirectory UTF8String];
816  documentsDir += "/SLProject";
817  NSString* documentsPath = [NSString stringWithUTF8String:documentsDir.c_str()];
818 
819  // Create if it does not exist
820  NSError* error;
821  if (![[NSFileManager defaultManager] fileExistsAtPath:documentsPath])
822  [[NSFileManager defaultManager] createDirectoryAtPath:documentsPath
823  withIntermediateDirectories:NO
824  attributes:nil
825  error:&error];
826 
827  return documentsDir + "/";
828 }
829 //-----------------------------------------------------------------------------
830 
831 @end
The App namespace declares the App::Config struct and the App::run function.
The AppCommon class holds the top-level instances of the app-demo.
static SLVec2i touch2
Last finger touch 2 position in pixels.
static SLint dpi
Dot per inch resolution of screen.
Definition: AppGLFW.cpp:41
@ VT_SCND
Selfie camera on mobile devices.
Definition: CVCapture.h:43
@ VT_FILE
Loads a video from file with OpenCV.
Definition: CVCapture.h:44
@ VT_NONE
No camera needed.
Definition: CVCapture.h:41
@ VT_MAIN
Main camera on all on all all devices.
Definition: CVCapture.h:42
@ PF_bgra
Definition: CVImage.h:39
cv::Size CVSize
Definition: CVTypedefs.h:55
float SLfloat
Definition: SL.h:173
#define SL_LOG(...)
Definition: SL.h:233
bool SLbool
Definition: SL.h:175
vector< SLstring > SLVstring
Definition: SL.h:201
int SLint
Definition: SL.h:170
@ MB_left
Definition: SLEnums.h:100
@ MB_right
Definition: SLEnums.h:102
@ K_none
Definition: SLEnums.h:17
typedef void(SL_STDCALL *cbOnImGuiBuild)(SLScene *s
Callback function typedef for ImGui build function.
void slRotationQUAT(float quatX, float quatY, float quatZ, float quatW)
void slTouch2Move(int sceneViewIndex, int xpos1, int ypos1, int xpos2, int ypos2)
void slMouseDown(int sceneViewIndex, SLMouseButton button, int xpos, int ypos, SLKey modifier)
void slTouch2Down(int sceneViewIndex, int xpos1, int ypos1, int xpos2, int ypos2)
bool slUsesRotation()
void slMouseMove(int sceneViewIndex, int x, int y)
void slSwitchScene(SLSceneView *sv, SLSceneID sceneID)
bool slUsesLocation()
void slLocationLatLonAlt(double latitudeDEG, double longitudeDEG, double altitudeM, float accuracyM)
void slMouseUp(int sceneViewIndex, SLMouseButton button, int xpos, int ypos, SLKey modifier)
void slTouch2Up(int sceneViewIndex, int xpos1, int ypos1, int xpos2, int ypos2)
void slDoubleClick(int sceneViewIndex, SLMouseButton button, int xpos, int ypos, SLKey modifier)
void slLoadCoreAssetsSync()
void slResize(int sceneViewIndex, int width, int height)
bool slPaintAllViews()
void slTerminate()
bool slUpdateParallelJob()
void slCreateApp(SLVstring &cmdLineArgs, const SLstring &dataPath, const SLstring &shaderPath, const SLstring &modelPath, const SLstring &texturePath, const SLstring &fontPath, const SLstring &videoPath, const SLstring &configPath, const SLstring &applicationName)
Definition: SLInterface.cpp:57
SLint slCreateSceneView(SLAssetManager *am, SLScene *scene, int screenWidth, int screenHeight, int dotsPerInch, SLSceneID initScene, void *onWndUpdateCallback, void *onSelectNodeMeshCallback, void *onNewSceneViewCallback, void *onImGuiBuild, void *onImGuiLoadConfig, void *onImGuiSaveConfig)
bool slShouldClose()
Declaration of the main Scene Library C-Interface.
SLVec2< SLint > SLVec2i
Definition: SLVec2.h:140
GLKView * myView
float GetSeconds()
SLbool onPaintRTGL()
float screenScale
int svIndex
static SLstring configPath
Default path for calibration files.
Definition: AppCommon.h:81
static SLstring calibIniPath
That's where data/calibrations folder is located.
Definition: AppCommon.h:108
static optional< SLSceneID > sceneToLoad
Scene id to load at start up.
Definition: AppCommon.h:90
static SLAssetManager * assetManager
asset manager is the owner of all assets
Definition: AppCommon.h:59
static SLstring exePath
executable root path
Definition: AppCommon.h:80
static SLVSceneView sceneViews
Vector of sceneview pointers.
Definition: AppCommon.h:62
static SLAssetLoader * assetLoader
Asset-loader for async asset loading.
Definition: AppCommon.h:60
static SLstring externalPath
Default path for external file storage.
Definition: AppCommon.h:82
static SLScene * scene
Pointer to the one and only SLScene instance.
Definition: AppCommon.h:61
Encapsulation of the OpenCV Capture Device and holder of the last frame.
Definition: CVCapture.h:63
void loadCalibrations(const string &computerInfo, const string &configPath)
Definition: CVCapture.cpp:894
CVSize captureSize
size of captured frame
Definition: CVCapture.h:123
void setCameraSize(int sizeIndex, int sizeIndexMax, int width, int height)
Definition: CVCapture.cpp:969
static CVCapture * instance()
Public static instance getter for singleton pattern.
Definition: CVCapture.h:65
void loadIntoLastFrame(float vieportWdivH, int camWidth, int camHeight, CVPixelFormatGL srcPixelFormat, const uchar *data, bool isContinuous)
Definition: CVCapture.cpp:301
bool grabAndAdjustForSL(float viewportWdivH)
Definition: CVCapture.cpp:235
void checkIfAsyncLoadingIsDone()
bool isLoading() const
Definition: SLAssetLoader.h:68
SceneView class represents a dynamic real time 3D view onto the scene.
Definition: SLSceneView.h:69
void viewportSameAsVideo(bool sameAsVideo)
Definition: SLSceneView.h:155
void setViewportFromRatio(const SLVec2i &vpRatio, SLViewportAlign vpAlignment, SLbool vpSameAsVideo)
Sets the viewport ratio and the viewport rectangle.
SLViewportAlign viewportAlign() const
Definition: SLSceneView.h:181
SLfloat viewportWdivH() const
Definition: SLSceneView.h:178
T x
Definition: SLVec2.h:30
static std::string model
Definition: Utils.h:293
static std::string get()
Definition: Utils.cpp:1261
static std::string osVer
Definition: Utils.h:295
static std::string arch
Definition: Utils.h:296
static std::string getCurrentWorkingDir()
Returns the working directory.
static std::string getAppsWritableDir()
Returns the writable configuration directory.
AVCaptureSession * m_avSession
SLfloat m_lastFrameTimeSec
SLfloat m_lastTouchDownSec
CGPoint m_touchDownPos1
NSString * m_avSessionPreset
bool m_lastVideoImageIsConsumed
SLfloat m_lastTouchTimeSec
EAGLContext * context
std::string getAppsDocumentsDir()
Config config
The configuration set in App::run.
Definition: AppAndroid.cpp:34
Utils provides utilities for string & file handling, logging and math functions.
Definition: Averaged.h:22
T abs(T a)
Definition: Utils.h:249
static const float RAD2DEG
Definition: Utils.h:238
OnGuiLoadConfigCallback onGuiLoadConfig
Definition: App.h:73
SLSceneID startSceneID
Definition: App.h:64
OnNewSceneViewCallback onNewSceneView
Definition: App.h:65
OnUpdateCallback onUpdate
Definition: App.h:71
OnGuiSaveConfigCallback onGuiSaveConfig
Definition: App.h:74
OnGuiBuildCallback onGuiBuild
Definition: App.h:72