gl_view.mm 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738
  1. /*************************************************************************/
  2. /* gl_view.mm */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #import "gl_view.h"
  31. #import "gl_view_gesture_recognizer.h"
  32. #include "core/os/keyboard.h"
  33. #include "core/project_settings.h"
  34. #include "os_iphone.h"
  35. #include "servers/audio_server.h"
  36. #import <OpenGLES/EAGLDrawable.h>
  37. #import <QuartzCore/QuartzCore.h>
  38. /*
  39. @interface GLView (private)
  40. - (id)initGLES;
  41. - (BOOL)createFramebuffer;
  42. - (void)destroyFramebuffer;
  43. @end
  44. */
  45. bool gles3_available = true;
  46. int gl_view_base_fb;
  47. static String keyboard_text;
  48. static GLView *_instance = NULL;
  49. static bool video_found_error = false;
  50. static bool video_playing = false;
  51. static CMTime video_current_time;
  52. void _show_keyboard(String);
  53. void _hide_keyboard();
  54. bool _play_video(String, float, String, String);
  55. bool _is_video_playing();
  56. void _pause_video();
  57. void _focus_out_video();
  58. void _unpause_video();
  59. void _stop_video();
  60. CGFloat _points_to_pixels(CGFloat);
  61. void _show_keyboard(String p_existing) {
  62. keyboard_text = p_existing;
  63. printf("instance on show is %p\n", _instance);
  64. [_instance open_keyboard];
  65. };
  66. void _hide_keyboard() {
  67. printf("instance on hide is %p\n", _instance);
  68. [_instance hide_keyboard];
  69. keyboard_text = "";
  70. };
  71. Rect2 _get_ios_window_safe_area(float p_window_width, float p_window_height) {
  72. UIEdgeInsets insets = UIEdgeInsetsZero;
  73. if (@available(iOS 11.0, *)) {
  74. insets = [_instance safeAreaInsets];
  75. }
  76. ERR_FAIL_COND_V(insets.left < 0 || insets.top < 0 || insets.right < 0 || insets.bottom < 0,
  77. Rect2(0, 0, p_window_width, p_window_height));
  78. UIEdgeInsets window_insets = UIEdgeInsetsMake(_points_to_pixels(insets.top), _points_to_pixels(insets.left), _points_to_pixels(insets.bottom), _points_to_pixels(insets.right));
  79. return Rect2(window_insets.left, window_insets.top, p_window_width - window_insets.right - window_insets.left, p_window_height - window_insets.bottom - window_insets.top);
  80. }
  81. bool _play_video(String p_path, float p_volume, String p_audio_track, String p_subtitle_track) {
  82. p_path = ProjectSettings::get_singleton()->globalize_path(p_path);
  83. NSString *file_path = [[[NSString alloc] initWithUTF8String:p_path.utf8().get_data()] autorelease];
  84. _instance.avAsset = [AVAsset assetWithURL:[NSURL fileURLWithPath:file_path]];
  85. _instance.avPlayerItem = [[AVPlayerItem alloc] initWithAsset:_instance.avAsset];
  86. [_instance.avPlayerItem addObserver:_instance forKeyPath:@"status" options:0 context:nil];
  87. _instance.avPlayer = [[AVPlayer alloc] initWithPlayerItem:_instance.avPlayerItem];
  88. _instance.avPlayerLayer = [AVPlayerLayer playerLayerWithPlayer:_instance.avPlayer];
  89. [_instance.avPlayer addObserver:_instance forKeyPath:@"status" options:0 context:nil];
  90. [[NSNotificationCenter defaultCenter]
  91. addObserver:_instance
  92. selector:@selector(playerItemDidReachEnd:)
  93. name:AVPlayerItemDidPlayToEndTimeNotification
  94. object:[_instance.avPlayer currentItem]];
  95. [_instance.avPlayer addObserver:_instance forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:0];
  96. [_instance.avPlayerLayer setFrame:_instance.bounds];
  97. [_instance.layer addSublayer:_instance.avPlayerLayer];
  98. [_instance.avPlayer play];
  99. AVMediaSelectionGroup *audioGroup = [_instance.avAsset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
  100. NSMutableArray *allAudioParams = [NSMutableArray array];
  101. for (id track in audioGroup.options) {
  102. NSString *language = [[track locale] localeIdentifier];
  103. NSLog(@"subtitle lang: %@", language);
  104. if ([language isEqualToString:[NSString stringWithUTF8String:p_audio_track.utf8()]]) {
  105. AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
  106. [audioInputParams setVolume:p_volume atTime:kCMTimeZero];
  107. [audioInputParams setTrackID:[track trackID]];
  108. [allAudioParams addObject:audioInputParams];
  109. AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
  110. [audioMix setInputParameters:allAudioParams];
  111. [_instance.avPlayer.currentItem selectMediaOption:track inMediaSelectionGroup:audioGroup];
  112. [_instance.avPlayer.currentItem setAudioMix:audioMix];
  113. break;
  114. }
  115. }
  116. AVMediaSelectionGroup *subtitlesGroup = [_instance.avAsset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
  117. NSArray *useableTracks = [AVMediaSelectionGroup mediaSelectionOptionsFromArray:subtitlesGroup.options withoutMediaCharacteristics:[NSArray arrayWithObject:AVMediaCharacteristicContainsOnlyForcedSubtitles]];
  118. for (id track in useableTracks) {
  119. NSString *language = [[track locale] localeIdentifier];
  120. NSLog(@"subtitle lang: %@", language);
  121. if ([language isEqualToString:[NSString stringWithUTF8String:p_subtitle_track.utf8()]]) {
  122. [_instance.avPlayer.currentItem selectMediaOption:track inMediaSelectionGroup:subtitlesGroup];
  123. break;
  124. }
  125. }
  126. video_playing = true;
  127. return true;
  128. }
  129. bool _is_video_playing() {
  130. if (_instance.avPlayer.error) {
  131. printf("Error during playback\n");
  132. }
  133. return (_instance.avPlayer.rate > 0 && !_instance.avPlayer.error);
  134. }
  135. void _pause_video() {
  136. video_current_time = _instance.avPlayer.currentTime;
  137. [_instance.avPlayer pause];
  138. video_playing = false;
  139. }
  140. void _focus_out_video() {
  141. printf("focus out pausing video\n");
  142. [_instance.avPlayer pause];
  143. };
  144. void _unpause_video() {
  145. [_instance.avPlayer play];
  146. video_playing = true;
  147. };
  148. void _stop_video() {
  149. [_instance.avPlayer pause];
  150. [_instance.avPlayerLayer removeFromSuperlayer];
  151. _instance.avPlayer = nil;
  152. video_playing = false;
  153. }
  154. CGFloat _points_to_pixels(CGFloat points) {
  155. float pixelPerInch;
  156. if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) {
  157. pixelPerInch = 132;
  158. } else if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPhone) {
  159. pixelPerInch = 163;
  160. } else {
  161. pixelPerInch = 160;
  162. }
  163. CGFloat pointsPerInch = 72.0;
  164. return (points / pointsPerInch * pixelPerInch);
  165. }
  166. @implementation GLView
  167. @synthesize animationInterval;
  168. static const int max_touches = 8;
  169. static UITouch *touches[max_touches];
  170. static void init_touches() {
  171. for (int i = 0; i < max_touches; i++) {
  172. touches[i] = NULL;
  173. };
  174. };
  175. static int get_touch_id(UITouch *p_touch) {
  176. int first = -1;
  177. for (int i = 0; i < max_touches; i++) {
  178. if (first == -1 && touches[i] == NULL) {
  179. first = i;
  180. continue;
  181. };
  182. if (touches[i] == p_touch)
  183. return i;
  184. };
  185. if (first != -1) {
  186. touches[first] = p_touch;
  187. return first;
  188. };
  189. return -1;
  190. };
  191. static int remove_touch(UITouch *p_touch) {
  192. int remaining = 0;
  193. for (int i = 0; i < max_touches; i++) {
  194. if (touches[i] == NULL)
  195. continue;
  196. if (touches[i] == p_touch)
  197. touches[i] = NULL;
  198. else
  199. ++remaining;
  200. };
  201. return remaining;
  202. };
  203. static void clear_touches() {
  204. for (int i = 0; i < max_touches; i++) {
  205. touches[i] = NULL;
  206. };
  207. };
  208. // Implement this to override the default layer class (which is [CALayer class]).
  209. // We do this so that our view will be backed by a layer that is capable of OpenGL ES rendering.
  210. + (Class)layerClass {
  211. return [CAEAGLLayer class];
  212. }
  213. //The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder:
  214. - (id)initWithCoder:(NSCoder *)coder {
  215. active = FALSE;
  216. if ((self = [super initWithCoder:coder])) {
  217. self = [self initGLES];
  218. [self initGestureRecognizer];
  219. }
  220. return self;
  221. }
  222. - (id)initGLES {
  223. // Get our backing layer
  224. CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
  225. // Configure it so that it is opaque, does not retain the contents of the backbuffer when displayed, and uses RGBA8888 color.
  226. eaglLayer.opaque = YES;
  227. eaglLayer.drawableProperties = [NSDictionary
  228. dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:FALSE],
  229. kEAGLDrawablePropertyRetainedBacking,
  230. kEAGLColorFormatRGBA8,
  231. kEAGLDrawablePropertyColorFormat,
  232. nil];
  233. bool fallback_gl2 = false;
  234. // Create a GL ES 3 context based on the gl driver from project settings
  235. if (GLOBAL_GET("rendering/quality/driver/driver_name") == "GLES3") {
  236. context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
  237. NSLog(@"Setting up an OpenGL ES 3.0 context. Based on Project Settings \"rendering/quality/driver/driver_name\"");
  238. if (!context && GLOBAL_GET("rendering/quality/driver/fallback_to_gles2")) {
  239. gles3_available = false;
  240. fallback_gl2 = true;
  241. NSLog(@"Failed to create OpenGL ES 3.0 context. Falling back to OpenGL ES 2.0");
  242. }
  243. }
  244. // Create GL ES 2 context
  245. if (GLOBAL_GET("rendering/quality/driver/driver_name") == "GLES2" || fallback_gl2) {
  246. context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
  247. NSLog(@"Setting up an OpenGL ES 2.0 context.");
  248. if (!context) {
  249. NSLog(@"Failed to create OpenGL ES 2.0 context!");
  250. return nil;
  251. }
  252. }
  253. if (![EAGLContext setCurrentContext:context]) {
  254. NSLog(@"Failed to set EAGLContext!");
  255. return nil;
  256. }
  257. if (![self createFramebuffer]) {
  258. NSLog(@"Failed to create frame buffer!");
  259. return nil;
  260. }
  261. // Default the animation interval to 1/60th of a second.
  262. animationInterval = 1.0 / 60.0;
  263. return self;
  264. }
  265. - (void)initGestureRecognizer {
  266. delayGestureRecognizer = [[GLViewGestureRecognizer alloc] init];
  267. [self addGestureRecognizer:delayGestureRecognizer];
  268. }
  269. - (id<GLViewDelegate>)delegate {
  270. return delegate;
  271. }
  272. // Update the delegate, and if it needs a -setupView: call, set our internal flag so that it will be called.
  273. - (void)setDelegate:(id<GLViewDelegate>)d {
  274. delegate = d;
  275. delegateSetup = ![delegate respondsToSelector:@selector(setupView:)];
  276. }
  277. @synthesize useCADisplayLink;
  278. // If our view is resized, we'll be asked to layout subviews.
  279. // This is the perfect opportunity to also update the framebuffer so that it is
  280. // the same size as our display area.
  281. - (void)layoutSubviews {
  282. [EAGLContext setCurrentContext:context];
  283. [self destroyFramebuffer];
  284. [self createFramebuffer];
  285. [self drawView];
  286. }
  287. - (BOOL)createFramebuffer {
  288. // Generate IDs for a framebuffer object and a color renderbuffer
  289. UIScreen *mainscr = [UIScreen mainScreen];
  290. printf("******** screen size %i, %i\n", (int)mainscr.currentMode.size.width, (int)mainscr.currentMode.size.height);
  291. self.contentScaleFactor = mainscr.nativeScale;
  292. glGenFramebuffersOES(1, &viewFramebuffer);
  293. glGenRenderbuffersOES(1, &viewRenderbuffer);
  294. glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
  295. glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
  296. // This call associates the storage for the current render buffer with the EAGLDrawable (our CAEAGLLayer)
  297. // allowing us to draw into a buffer that will later be rendered to screen wherever the layer is (which corresponds with our view).
  298. [context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(id<EAGLDrawable>)self.layer];
  299. glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
  300. glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
  301. glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
  302. // For this sample, we also need a depth buffer, so we'll create and attach one via another renderbuffer.
  303. glGenRenderbuffersOES(1, &depthRenderbuffer);
  304. glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
  305. glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
  306. glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
  307. if (glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES) {
  308. NSLog(@"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
  309. return NO;
  310. }
  311. if (OS::get_singleton()) {
  312. OS::VideoMode vm;
  313. vm.fullscreen = true;
  314. vm.width = backingWidth;
  315. vm.height = backingHeight;
  316. vm.resizable = false;
  317. OS::get_singleton()->set_video_mode(vm);
  318. OSIPhone::get_singleton()->set_base_framebuffer(viewFramebuffer);
  319. };
  320. gl_view_base_fb = viewFramebuffer;
  321. return YES;
  322. }
  323. // Clean up any buffers we have allocated.
  324. - (void)destroyFramebuffer {
  325. glDeleteFramebuffersOES(1, &viewFramebuffer);
  326. viewFramebuffer = 0;
  327. glDeleteRenderbuffersOES(1, &viewRenderbuffer);
  328. viewRenderbuffer = 0;
  329. if (depthRenderbuffer) {
  330. glDeleteRenderbuffersOES(1, &depthRenderbuffer);
  331. depthRenderbuffer = 0;
  332. }
  333. }
  334. - (void)startAnimation {
  335. if (active)
  336. return;
  337. active = TRUE;
  338. printf("start animation!\n");
  339. if (useCADisplayLink) {
  340. // Approximate frame rate
  341. // assumes device refreshes at 60 fps
  342. int frameInterval = (int)floor(animationInterval * 60.0f);
  343. displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(drawView)];
  344. [displayLink setFrameInterval:frameInterval];
  345. // Setup DisplayLink in main thread
  346. [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];
  347. } else {
  348. animationTimer = [NSTimer scheduledTimerWithTimeInterval:animationInterval target:self selector:@selector(drawView) userInfo:nil repeats:YES];
  349. }
  350. if (video_playing) {
  351. _unpause_video();
  352. }
  353. }
  354. - (void)stopAnimation {
  355. if (!active)
  356. return;
  357. active = FALSE;
  358. printf("******** stop animation!\n");
  359. if (useCADisplayLink) {
  360. [displayLink invalidate];
  361. displayLink = nil;
  362. } else {
  363. [animationTimer invalidate];
  364. animationTimer = nil;
  365. }
  366. clear_touches();
  367. if (video_playing) {
  368. // save position
  369. }
  370. }
  371. - (void)setAnimationInterval:(NSTimeInterval)interval {
  372. animationInterval = interval;
  373. if ((useCADisplayLink && displayLink) || (!useCADisplayLink && animationTimer)) {
  374. [self stopAnimation];
  375. [self startAnimation];
  376. }
  377. }
  378. // Updates the OpenGL view when the timer fires
  379. - (void)drawView {
  380. if (!active) {
  381. printf("draw view not active!\n");
  382. return;
  383. };
  384. if (useCADisplayLink) {
  385. // Pause the CADisplayLink to avoid recursion
  386. [displayLink setPaused:YES];
  387. // Process all input events
  388. while (CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.0, TRUE) == kCFRunLoopRunHandledSource)
  389. ;
  390. // We are good to go, resume the CADisplayLink
  391. [displayLink setPaused:NO];
  392. }
  393. // Make sure that you are drawing to the current context
  394. [EAGLContext setCurrentContext:context];
  395. // If our drawing delegate needs to have the view setup, then call -setupView: and flag that it won't need to be called again.
  396. if (!delegateSetup) {
  397. [delegate setupView:self];
  398. delegateSetup = YES;
  399. }
  400. glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
  401. [delegate drawView:self];
  402. glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
  403. [context presentRenderbuffer:GL_RENDERBUFFER_OES];
  404. #ifdef DEBUG_ENABLED
  405. GLenum err = glGetError();
  406. if (err)
  407. NSLog(@"DrawView: %x error", err);
  408. #endif
  409. }
  410. - (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
  411. NSArray *tlist = [[event allTouches] allObjects];
  412. for (unsigned int i = 0; i < [tlist count]; i++) {
  413. if ([touches containsObject:[tlist objectAtIndex:i]]) {
  414. UITouch *touch = [tlist objectAtIndex:i];
  415. int tid = get_touch_id(touch);
  416. ERR_FAIL_COND(tid == -1);
  417. CGPoint touchPoint = [touch locationInView:self];
  418. OSIPhone::get_singleton()->touch_press(tid, touchPoint.x * self.contentScaleFactor, touchPoint.y * self.contentScaleFactor, true, touch.tapCount > 1);
  419. };
  420. };
  421. }
  422. - (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event {
  423. NSArray *tlist = [[event allTouches] allObjects];
  424. for (unsigned int i = 0; i < [tlist count]; i++) {
  425. if ([touches containsObject:[tlist objectAtIndex:i]]) {
  426. UITouch *touch = [tlist objectAtIndex:i];
  427. int tid = get_touch_id(touch);
  428. ERR_FAIL_COND(tid == -1);
  429. CGPoint touchPoint = [touch locationInView:self];
  430. CGPoint prev_point = [touch previousLocationInView:self];
  431. OSIPhone::get_singleton()->touch_drag(tid, prev_point.x * self.contentScaleFactor, prev_point.y * self.contentScaleFactor, touchPoint.x * self.contentScaleFactor, touchPoint.y * self.contentScaleFactor);
  432. };
  433. };
  434. }
  435. - (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event {
  436. NSArray *tlist = [[event allTouches] allObjects];
  437. for (unsigned int i = 0; i < [tlist count]; i++) {
  438. if ([touches containsObject:[tlist objectAtIndex:i]]) {
  439. UITouch *touch = [tlist objectAtIndex:i];
  440. int tid = get_touch_id(touch);
  441. ERR_FAIL_COND(tid == -1);
  442. remove_touch(touch);
  443. CGPoint touchPoint = [touch locationInView:self];
  444. OSIPhone::get_singleton()->touch_press(tid, touchPoint.x * self.contentScaleFactor, touchPoint.y * self.contentScaleFactor, false, false);
  445. };
  446. };
  447. }
  448. - (void)touchesCancelled:(NSSet *)touches withEvent:(UIEvent *)event {
  449. OSIPhone::get_singleton()->touches_cancelled();
  450. clear_touches();
  451. };
  452. - (BOOL)canBecomeFirstResponder {
  453. return YES;
  454. };
  455. - (void)open_keyboard {
  456. //keyboard_text = p_existing;
  457. [self becomeFirstResponder];
  458. };
  459. - (void)hide_keyboard {
  460. //keyboard_text = p_existing;
  461. [self resignFirstResponder];
  462. };
  463. - (void)keyboardOnScreen:(NSNotification *)notification {
  464. NSDictionary *info = notification.userInfo;
  465. NSValue *value = info[UIKeyboardFrameEndUserInfoKey];
  466. CGRect rawFrame = [value CGRectValue];
  467. CGRect keyboardFrame = [self convertRect:rawFrame fromView:nil];
  468. OSIPhone::get_singleton()->set_virtual_keyboard_height(_points_to_pixels(keyboardFrame.size.height));
  469. }
  470. - (void)keyboardHidden:(NSNotification *)notification {
  471. OSIPhone::get_singleton()->set_virtual_keyboard_height(0);
  472. }
  473. - (void)deleteBackward {
  474. if (keyboard_text.length())
  475. keyboard_text.erase(keyboard_text.length() - 1, 1);
  476. OSIPhone::get_singleton()->key(KEY_BACKSPACE, true);
  477. };
  478. - (BOOL)hasText {
  479. return keyboard_text.length() ? YES : NO;
  480. };
  481. - (void)insertText:(NSString *)p_text {
  482. String character;
  483. character.parse_utf8([p_text UTF8String]);
  484. keyboard_text = keyboard_text + character;
  485. OSIPhone::get_singleton()->key(character[0] == 10 ? KEY_ENTER : character[0], true);
  486. printf("inserting text with character %lc\n", (CharType)character[0]);
  487. };
  488. - (void)audioRouteChangeListenerCallback:(NSNotification *)notification {
  489. printf("*********** route changed!\n");
  490. NSDictionary *interuptionDict = notification.userInfo;
  491. NSInteger routeChangeReason = [[interuptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue];
  492. switch (routeChangeReason) {
  493. case AVAudioSessionRouteChangeReasonNewDeviceAvailable: {
  494. NSLog(@"AVAudioSessionRouteChangeReasonNewDeviceAvailable");
  495. NSLog(@"Headphone/Line plugged in");
  496. }; break;
  497. case AVAudioSessionRouteChangeReasonOldDeviceUnavailable: {
  498. NSLog(@"AVAudioSessionRouteChangeReasonOldDeviceUnavailable");
  499. NSLog(@"Headphone/Line was pulled. Resuming video play....");
  500. if (_is_video_playing()) {
  501. dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0.5f * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
  502. [_instance.avPlayer play]; // NOTE: change this line according your current player implementation
  503. NSLog(@"resumed play");
  504. });
  505. };
  506. }; break;
  507. case AVAudioSessionRouteChangeReasonCategoryChange: {
  508. // called at start - also when other audio wants to play
  509. NSLog(@"AVAudioSessionRouteChangeReasonCategoryChange");
  510. }; break;
  511. }
  512. }
  513. // When created via code however, we get initWithFrame
  514. - (id)initWithFrame:(CGRect)frame {
  515. self = [super initWithFrame:frame];
  516. _instance = self;
  517. printf("after init super %p\n", self);
  518. if (self != nil) {
  519. self = [self initGLES];
  520. printf("after init gles %p\n", self);
  521. [self initGestureRecognizer];
  522. }
  523. init_touches();
  524. self.multipleTouchEnabled = YES;
  525. self.autocorrectionType = UITextAutocorrectionTypeNo;
  526. printf("******** adding observer for sound routing changes\n");
  527. [[NSNotificationCenter defaultCenter]
  528. addObserver:self
  529. selector:@selector(audioRouteChangeListenerCallback:)
  530. name:AVAudioSessionRouteChangeNotification
  531. object:nil];
  532. printf("******** adding observer for keyboard show/hide\n");
  533. [[NSNotificationCenter defaultCenter]
  534. addObserver:self
  535. selector:@selector(keyboardOnScreen:)
  536. name:UIKeyboardDidShowNotification
  537. object:nil];
  538. [[NSNotificationCenter defaultCenter]
  539. addObserver:self
  540. selector:@selector(keyboardHidden:)
  541. name:UIKeyboardDidHideNotification
  542. object:nil];
  543. //self.autoresizesSubviews = YES;
  544. //[self setAutoresizingMask:UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleWidth];
  545. return self;
  546. }
  547. //- (BOOL)automaticallyForwardAppearanceAndRotationMethodsToChildViewControllers {
  548. // return YES;
  549. //}
  550. //- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation{
  551. // return YES;
  552. //}
  553. // Stop animating and release resources when they are no longer needed.
  554. - (void)dealloc {
  555. [self stopAnimation];
  556. if ([EAGLContext currentContext] == context) {
  557. [EAGLContext setCurrentContext:nil];
  558. }
  559. [context release];
  560. context = nil;
  561. [super dealloc];
  562. }
  563. - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
  564. if (object == _instance.avPlayerItem && [keyPath isEqualToString:@"status"]) {
  565. if (_instance.avPlayerItem.status == AVPlayerItemStatusFailed || _instance.avPlayer.status == AVPlayerStatusFailed) {
  566. _stop_video();
  567. video_found_error = true;
  568. }
  569. if (_instance.avPlayer.status == AVPlayerStatusReadyToPlay &&
  570. _instance.avPlayerItem.status == AVPlayerItemStatusReadyToPlay &&
  571. CMTIME_COMPARE_INLINE(video_current_time, ==, kCMTimeZero)) {
  572. //NSLog(@"time: %@", video_current_time);
  573. [_instance.avPlayer seekToTime:video_current_time];
  574. video_current_time = kCMTimeZero;
  575. }
  576. }
  577. if (object == _instance.avPlayer && [keyPath isEqualToString:@"rate"]) {
  578. NSLog(@"Player playback rate changed: %.5f", _instance.avPlayer.rate);
  579. if (_is_video_playing() && _instance.avPlayer.rate == 0.0 && !_instance.avPlayer.error) {
  580. dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0.5f * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
  581. [_instance.avPlayer play]; // NOTE: change this line according your current player implementation
  582. NSLog(@"resumed play");
  583. });
  584. NSLog(@" . . . PAUSED (or just started)");
  585. }
  586. }
  587. }
  588. - (void)playerItemDidReachEnd:(NSNotification *)notification {
  589. _stop_video();
  590. }
  591. @end