(iOS) Camera - implement most of the code inside RAGameView -

totally untested - C camera driver is now a shim and will just
need to dial into the RAGameView camera driver parts
This commit is contained in:
twinaphex 2013-12-01 19:31:00 +01:00
parent 224c0b4652
commit 37622eb48d
4 changed files with 176 additions and 89 deletions

View File

@ -7,6 +7,8 @@
objects = {
/* Begin PBXBuildFile section */
501881EC184BAD6D006F665D /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 501881EB184BAD6D006F665D /* AVFoundation.framework */; };
501881EE184BB54C006F665D /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 501881ED184BB54C006F665D /* CoreMedia.framework */; };
509FC979183F9F18007A5A30 /* menu.m in Sources */ = {isa = PBXBuildFile; fileRef = 509FC978183F9F18007A5A30 /* menu.m */; };
50E7189F184B88AA001956CE /* CoreVideo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 50E7189E184B88AA001956CE /* CoreVideo.framework */; };
50F54A871848F9FC00E19EFD /* ic_pause.png in Resources */ = {isa = PBXBuildFile; fileRef = 50F54A861848F9FC00E19EFD /* ic_pause.png */; };
@ -37,6 +39,8 @@
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
501881EB184BAD6D006F665D /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
501881ED184BB54C006F665D /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; };
509FC978183F9F18007A5A30 /* menu.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = menu.m; path = iOS/menu.m; sourceTree = SOURCE_ROOT; };
50E7189E184B88AA001956CE /* CoreVideo.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreVideo.framework; path = System/Library/Frameworks/CoreVideo.framework; sourceTree = SDKROOT; };
50F54A861848F9FC00E19EFD /* ic_pause.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = ic_pause.png; sourceTree = "<group>"; };
@ -80,6 +84,8 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
501881EE184BB54C006F665D /* CoreMedia.framework in Frameworks */,
501881EC184BAD6D006F665D /* AVFoundation.framework in Frameworks */,
50E7189F184B88AA001956CE /* CoreVideo.framework in Frameworks */,
96366C5916C9ACF500D64A22 /* AudioToolbox.framework in Frameworks */,
96366C5516C9AC3300D64A22 /* CoreAudio.framework in Frameworks */,
@ -135,6 +141,8 @@
96AFAE2816C1D4EA009DE44C /* Frameworks */ = {
isa = PBXGroup;
children = (
501881ED184BB54C006F665D /* CoreMedia.framework */,
501881EB184BAD6D006F665D /* AVFoundation.framework */,
50E7189E184B88AA001956CE /* CoreVideo.framework */,
96366C5816C9ACF500D64A22 /* AudioToolbox.framework */,
96366C5416C9AC3300D64A22 /* CoreAudio.framework */,

View File

@ -22,7 +22,12 @@
// Define compatibility symbols and categories
#ifdef IOS
#include <COreVideo/CVOpenGLESTextureCache.h>
#include <AVFoundation/AVCaptureSession.h>
#include <AVFoundation/AVCaptureDevice.h>
#include <AVFoundation/AVCaptureOutput.h>
#include <AVFoundation/AVCaptureInput.h>
#include <AVFoundation/AVMediaFormat.h>
#include <CoreVideo/CVOpenGLESTextureCache.h>
#define APP_HAS_FOCUS ([UIApplication sharedApplication].applicationState == UIApplicationStateActive)
#define GLContextClass EAGLContext
@ -67,6 +72,16 @@ static GLKView* g_view;
static UIView* g_pause_indicator_view;
static UITextField* g_text_hide;
// Camera
static AVCaptureSession *_session;
static NSString *_sessionPreset;
CVOpenGLESTextureCacheRef textureCache;
CFDictionaryRef empty;
CFMutableDictionaryRef attrs;
CVPixelBufferRef renderTarget;
CVOpenGLESTextureRef renderTexture;
GLuint renderFrameBuffer;
#elif defined(OSX)
#include "apple_input.h"
@ -207,6 +222,131 @@ static bool g_is_syncing = true;
[self viewWillLayoutSubviews];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
int width, height;
CVReturn ret;
//FIXME - dehardcode
width = 640;
height = 480;
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Periodic texture cache flush every frame
CVOpenGLESTextureCacheFlush(textureCache, 0);
//TODO - rewrite all this
// create a texture from our render target.
// textureCache will be what you previously made with CVOpenGLESTextureCacheCreate
ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
textureCache, pixelBuffer, NULL, GL_TEXTURE_2D,
GL_RGBA, width, height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &renderTexture);
if (ret)
{
RARCH_ERR("ioscamera: CVOpenGLESTextureCacheCreateTextureFromImage failed.\n");
}
glBindTexture(CVOpenGLESTextureGetTarget(renderTexture),
CVOpenGLESTextureGetName(renderTexture));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// bind the texture to teh fraembuffer you're going to render to
// (boilerplate code to make a framebuffer not shown)
glBindFramebuffer(GL_FRAMEBUFFER, renderFrameBuffer);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);
}
- (void) onCameraInit
{
CVReturn ret;
int width, height;
//FIXME - dehardcode this
width = 640;
height = 480;
empty = CFDictionaryCreate(kCFAllocatorDefault,
NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
attrs = CFDictionaryCreateMutable(kCFAllocatorDefault,
1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(attrs, kCVPixelBufferIOSurfacePropertiesKey,
empty);
ret = CVPixelBufferCreate(kCFAllocatorDefault, width, height,
kCVPixelFormatType_32BGRA, attrs, &renderTarget);
if (ret)
{
RARCH_ERR("ioscamera: CVPixelBufferCreate failed.\n");
}
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
ret = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, g_context, NULL, &textureCache);
#else
ret = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)g_context, NULL, &textureCache);
#endif
//-- Setup Capture Session.
_session = [[AVCaptureSession alloc] init];
[_session beginConfiguration];
// TODO: dehardcode this based on device capabilities
_sessionPreset = AVCaptureSessionPreset640x480;
//-- Set preset session size.
[_session setSessionPreset:_sessionPreset];
//-- Creata a video device and input from that Device. Add the input to the capture session.
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if(videoDevice == nil)
assert(0);
//-- Add the device to the session.
NSError *error;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if(error)
assert(0);
[_session addInput:input];
//-- Create the output for the capture session.
AVCaptureVideoDataOutput * dataOutput = [[AVCaptureVideoDataOutput alloc] init];
[dataOutput setAlwaysDiscardsLateVideoFrames:YES]; // Probably want to set this to NO when recording
//-- Set to YUV420.
[dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange]
forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; // Necessary for manual preview
// Set dispatch to be on the main thread so OpenGL can do things with the data
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:dataOutput];
[_session commitConfiguration];
}
- (void) onCameraStart
{
[_session startRunning];
}
- (void) onCameraStop
{
[_session stopRunning];
}
- (void) onCameraFree
{
CVPixelBufferRelease(renderTarget);
CVOpenGLESTextureCacheFlush(textureCache, 0);
CFRelease(textureCache);
}
#endif
@end
@ -444,13 +584,4 @@ void apple_bind_game_view_fbo(void)
[g_view bindDrawable];
});
}
CVReturn texture_cache_create(CVOpenGLESTextureCacheRef *ref)
{
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
return CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, g_context, NULL, ref);
#else
return CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)g_context, NULL, ref);
#endif
}
#endif

View File

@ -17,9 +17,10 @@
#ifndef __RARCH_IOS_PLATFORM_H
#define __RARCH_IOS_PLATFORM_H
#import <AVFoundation/AVCaptureOutput.h>
#include "views.h"
@interface RAGameView : UIViewController
@interface RAGameView : UIViewController<AVCaptureAudioDataOutputSampleBufferDelegate>
+ (RAGameView*)get;
- (void)iOS7SetiCadeMode:(bool)on;
@end

View File

@ -20,26 +20,15 @@
#include <OpenGLES/ES2/gl.h>
#include <OpenGLES/ES2/glext.h>
#include <CoreVideo/CVPixelBuffer.h>
#include <CoreVideo/CVOpenGLESTexture.h>
#include <CoreVideo/CVOpenGLESTextureCache.h>
#include "../driver.h"
extern CVReturn texture_cache_create(CVOpenGLESTextureCacheRef *ref);
typedef struct ios_camera
{
CFDictionaryRef empty;
CFMutableDictionaryRef attrs;
CVPixelBufferRef renderTarget;
CVOpenGLESTextureRef renderTexture;
CVOpenGLESTextureCacheRef textureCache;
GLuint renderFrameBuffer;
void *empty;
} ioscamera_t;
static void *ios_camera_init(const char *device, uint64_t caps, unsigned width, unsigned height)
{
CVReturn ret;
if ((caps & (1ULL << RETRO_CAMERA_BUFFER_OPENGL_TEXTURE)) == 0)
{
RARCH_ERR("ioscamera returns OpenGL texture.\n");
@ -50,45 +39,7 @@ static void *ios_camera_init(const char *device, uint64_t caps, unsigned width,
if (!ioscamera)
return NULL;
ioscamera->empty = CFDictionaryCreate(kCFAllocatorDefault,
NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
ioscamera->attrs = CFDictionaryCreateMutable(kCFAllocatorDefault,
1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(ioscamera->attrs, kCVPixelBufferIOSurfacePropertiesKey,
ioscamera->empty);
// TODO: for testing, image is 640x480 for now
//if (width > 640)
width = 640;
//if (height > 480)
height = 480;
ret = CVPixelBufferCreate(kCFAllocatorDefault, width, height,
kCVPixelFormatType_32BGRA, ioscamera->attrs, &ioscamera->renderTarget);
if (ret)
{
RARCH_ERR("ioscamera: CVPixelBufferCreate failed.\n");
goto dealloc;
}
ret = texture_cache_create(&ioscamera->textureCache);
if (ret)
{
RARCH_ERR("ioscamera: texture_cache_create failed.\n");
goto dealloc;
}
// create a texture from our render target.
// textureCache will be what you previously made with CVOpenGLESTextureCacheCreate
ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
ioscamera->textureCache, ioscamera->renderTarget, NULL, GL_TEXTURE_2D,
GL_RGBA, width, height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &ioscamera->renderTexture);
if (ret)
{
RARCH_ERR("ioscamera: CVOpenGLESTextureCacheCreateTextureFromImage failed.\n");
goto dealloc;
}
// TODO - call onCameraInit from RAGameView
return ioscamera;
dealloc:
@ -100,7 +51,7 @@ static void ios_camera_free(void *data)
{
ioscamera_t *ioscamera = (ioscamera_t*)data;
//TODO - anything to free here?
//TODO - call onCameraFree from RAGameView
if (ioscamera)
free(ioscamera);
@ -109,20 +60,9 @@ static void ios_camera_free(void *data)
static bool ios_camera_start(void *data)
{
ioscamera_t *ioscamera = (ioscamera_t*)data;
(void)data;
glBindTexture(CVOpenGLESTextureGetTarget(ioscamera->renderTexture),
CVOpenGLESTextureGetName(ioscamera->renderTexture));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// bind the texture to teh fraembuffer you're going to render to
// (boilerplate code to make a framebuffer not shown)
glBindFramebuffer(GL_FRAMEBUFFER, ioscamera->renderFrameBuffer);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
GL_TEXTURE_2D, CVOpenGLESTextureGetName(ioscamera->renderTexture), 0);
//TODO - call onCameraStart from RAGAmeView
return true;
}
@ -132,27 +72,34 @@ static void ios_camera_stop(void *data)
ioscamera_t *ioscamera = (ioscamera_t*)data;
(void)ioscamera;
//TODO - anything to do here?
//TODO - call onCameraStop from RAGameView
}
static bool ios_camera_poll(void *data, retro_camera_frame_raw_framebuffer_t frame_raw_cb,
retro_camera_frame_opengl_texture_t frame_gl_cb)
{
ioscamera_t *ioscamera = (ioscamera_t*)data;
bool newFrame = false;
(void)data;
(void)frame_raw_cb;
// TODO - call onCameraPoll from RAGameView
if (frame_gl_cb && newFrame)
{
// FIXME: Identity for now. Use proper texture matrix as returned by iOS Camera (if at all?).
static const float affine[] = {
1.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f,
0.0f, 0.0f, 1.0f
};
if (frame_gl_cb)
(void)affine;
#if 0
frame_gl_cb(CVOpenGLESTextureGetName(ioscamera->renderTexture),
GL_TEXTURE_2D,
affine);
#endif
}
return true;
}