1
0
mirror of https://github.com/libretro/RetroArch synced 2025-03-27 14:37:32 +00:00

(Apple) Split up iOS camera and location code into separate files

This commit is contained in:
twinaphex 2014-07-09 13:27:30 +02:00
parent 5d4d5eb589
commit ac18c07e74
3 changed files with 195 additions and 197 deletions

@ -213,208 +213,13 @@ static bool g_is_syncing = true;
}
#ifdef HAVE_CAMERA
static AVCaptureSession *_session;
static NSString *_sessionPreset;
CVOpenGLESTextureCacheRef textureCache;
GLuint outputTexture;
static bool newFrame = false;
void event_process_camera_frame(void* pixelBufferPtr)
{
CVOpenGLESTextureRef renderTexture;
CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)pixelBufferPtr;
CVReturn ret;
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
//TODO - rewrite all this
// create a texture from our render target.
// textureCache will be what you previously made with CVOpenGLESTextureCacheCreate
ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
textureCache, pixelBuffer, NULL, GL_TEXTURE_2D,
GL_RGBA, (GLsizei)width, (GLsizei)height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &renderTexture);
if (!renderTexture || ret)
{
RARCH_ERR("ioscamera: CVOpenGLESTextureCacheCreateTextureFromImage failed.\n");
return;
}
outputTexture = CVOpenGLESTextureGetName(renderTexture);
glBindTexture(GL_TEXTURE_2D, outputTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
[[NSNotificationCenter defaultCenter] postNotificationName:@"NewCameraTextureReady" object:nil];
newFrame = true;
glBindTexture(GL_TEXTURE_2D, 0);
CVOpenGLESTextureCacheFlush(textureCache, 0);
CFRelease(renderTexture);
CFRelease(pixelBuffer);
pixelBuffer = 0;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
// TODO: Don't post if event queue is full
CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CVPixelBufferRetain(CMSampleBufferGetImageBuffer(sampleBuffer));
event_process_camera_frame(pixelBuffer);
}
- (void) onCameraInit
{
NSError *error;
AVCaptureVideoDataOutput * dataOutput;
AVCaptureDeviceInput *input;
AVCaptureDevice *videoDevice;
//FIXME - dehardcode this
int width = 640;
int height = 480;
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
CVReturn ret = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, g_context, NULL, &textureCache);
#else
CVReturn ret = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)g_context, NULL, &textureCache);
#endif
//-- Setup Capture Session.
_session = [[AVCaptureSession alloc] init];
[_session beginConfiguration];
// TODO: dehardcode this based on device capabilities
_sessionPreset = AVCaptureSessionPreset640x480;
//-- Set preset session size.
[_session setSessionPreset:_sessionPreset];
//-- Creata a video device and input from that Device. Add the input to the capture session.
videoDevice = (AVCaptureDevice*)[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (videoDevice == nil)
assert(0);
//-- Add the device to the session.
input = (AVCaptureDeviceInput*)[AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error)
{
RARCH_ERR("video device input %s\n", error.localizedDescription.UTF8String);
assert(0);
}
[_session addInput:input];
//-- Create the output for the capture session.
dataOutput = (AVCaptureVideoDataOutput*)[[AVCaptureVideoDataOutput alloc] init];
[dataOutput setAlwaysDiscardsLateVideoFrames:NO]; // Probably want to set this to NO when recording
[dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
// Set dispatch to be on the main thread so OpenGL can do things with the data
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:dataOutput];
[_session commitConfiguration];
}
- (void) onCameraStart
{
[_session startRunning];
}
- (void) onCameraStop
{
[_session stopRunning];
}
- (void) onCameraFree
{
CVOpenGLESTextureCacheFlush(textureCache, 0);
CFRelease(textureCache);
}
#include "contentview_camera_ios.m.inl"
#endif
#endif
#ifdef HAVE_LOCATION
#include <CoreLocation/CoreLocation.h>
static CLLocationManager *locationManager;
static bool locationChanged;
static CLLocationDegrees currentLatitude;
static CLLocationDegrees currentLongitude;
static CLLocationAccuracy currentHorizontalAccuracy;
static CLLocationAccuracy currentVerticalAccuracy;
- (bool)onLocationHasChanged
{
bool hasChanged = locationChanged;
if (hasChanged)
locationChanged = false;
return hasChanged;
}
- (void)locationManager:(CLLocationManager *)manager didUpdateToLocation:(CLLocation *)newLocation fromLocation:(CLLocation *)oldLocation
{
locationChanged = true;
currentLatitude = newLocation.coordinate.latitude;
currentLongitude = newLocation.coordinate.longitude;
currentHorizontalAccuracy = newLocation.horizontalAccuracy;
currentVerticalAccuracy = newLocation.verticalAccuracy;
RARCH_LOG("didUpdateToLocation - latitude %f, longitude %f\n", (float)currentLatitude, (float)currentLongitude);
}
- (void)locationManager:(CLLocationManager *)manager didUpdateLocations:(NSArray *)locations
{
CLLocation *location = (CLLocation*)[locations objectAtIndex:([locations count] - 1)];
locationChanged = true;
currentLatitude = [location coordinate].latitude;
currentLongitude = [location coordinate].longitude;
currentHorizontalAccuracy = location.horizontalAccuracy;
currentVerticalAccuracy = location.verticalAccuracy;
RARCH_LOG("didUpdateLocations - latitude %f, longitude %f\n", (float)currentLatitude, (float)currentLongitude);
}
- (void)locationManager:(CLLocationManager *)manager didFailWithError:(NSError *)error
{
RARCH_LOG("didFailWithError - %s\n", [[error localizedDescription] UTF8String]);
}
- (void)locationManagerDidPauseLocationUpdates:(CLLocationManager *)manager
{
RARCH_LOG("didPauseLocationUpdates\n");
}
- (void)locationManagerDidResumeLocationUpdates:(CLLocationManager *)manager
{
RARCH_LOG("didResumeLocationUpdates\n");
}
- (void)onLocationInit
{
// Create the location manager if this object does not
// already have one.
if (locationManager == nil)
locationManager = [[CLLocationManager alloc] init];
locationManager.delegate = self;
locationManager.desiredAccuracy = kCLLocationAccuracyBest;
locationManager.distanceFilter = kCLDistanceFilterNone;
[locationManager startUpdatingLocation];
}
#include "contentview_location.m.inl"
#endif
@end

@ -0,0 +1,125 @@
static AVCaptureSession *_session;
static NSString *_sessionPreset;
CVOpenGLESTextureCacheRef textureCache;
GLuint outputTexture;
static bool newFrame = false;
void event_process_camera_frame(void* pixelBufferPtr)
{
CVOpenGLESTextureRef renderTexture;
CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)pixelBufferPtr;
CVReturn ret;
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
//TODO - rewrite all this
// create a texture from our render target.
// textureCache will be what you previously made with CVOpenGLESTextureCacheCreate
ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
textureCache, pixelBuffer, NULL, GL_TEXTURE_2D,
GL_RGBA, (GLsizei)width, (GLsizei)height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &renderTexture);
if (!renderTexture || ret)
{
RARCH_ERR("ioscamera: CVOpenGLESTextureCacheCreateTextureFromImage failed.\n");
return;
}
outputTexture = CVOpenGLESTextureGetName(renderTexture);
glBindTexture(GL_TEXTURE_2D, outputTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
[[NSNotificationCenter defaultCenter] postNotificationName:@"NewCameraTextureReady" object:nil];
newFrame = true;
glBindTexture(GL_TEXTURE_2D, 0);
CVOpenGLESTextureCacheFlush(textureCache, 0);
CFRelease(renderTexture);
CFRelease(pixelBuffer);
pixelBuffer = 0;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
// TODO: Don't post if event queue is full
CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CVPixelBufferRetain(CMSampleBufferGetImageBuffer(sampleBuffer));
event_process_camera_frame(pixelBuffer);
}
- (void) onCameraInit
{
NSError *error;
AVCaptureVideoDataOutput * dataOutput;
AVCaptureDeviceInput *input;
AVCaptureDevice *videoDevice;
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API
CVReturn ret = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, g_context, NULL, &textureCache);
#else
CVReturn ret = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)g_context, NULL, &textureCache);
#endif
(void)ret;
//-- Setup Capture Session.
_session = [[AVCaptureSession alloc] init];
[_session beginConfiguration];
// TODO: dehardcode this based on device capabilities
_sessionPreset = AVCaptureSessionPreset640x480;
//-- Set preset session size.
[_session setSessionPreset:_sessionPreset];
//-- Creata a video device and input from that Device. Add the input to the capture session.
videoDevice = (AVCaptureDevice*)[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (videoDevice == nil)
assert(0);
//-- Add the device to the session.
input = (AVCaptureDeviceInput*)[AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error)
{
RARCH_ERR("video device input %s\n", error.localizedDescription.UTF8String);
assert(0);
}
[_session addInput:input];
//-- Create the output for the capture session.
dataOutput = (AVCaptureVideoDataOutput*)[[AVCaptureVideoDataOutput alloc] init];
[dataOutput setAlwaysDiscardsLateVideoFrames:NO]; // Probably want to set this to NO when recording
[dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
// Set dispatch to be on the main thread so OpenGL can do things with the data
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[_session addOutput:dataOutput];
[_session commitConfiguration];
}
- (void) onCameraStart
{
[_session startRunning];
}
- (void) onCameraStop
{
[_session stopRunning];
}
- (void) onCameraFree
{
CVOpenGLESTextureCacheFlush(textureCache, 0);
CFRelease(textureCache);
}

@ -0,0 +1,68 @@
#include <CoreLocation/CoreLocation.h>
static CLLocationManager *locationManager;
static bool locationChanged;
static CLLocationDegrees currentLatitude;
static CLLocationDegrees currentLongitude;
static CLLocationAccuracy currentHorizontalAccuracy;
static CLLocationAccuracy currentVerticalAccuracy;
- (bool)onLocationHasChanged
{
bool hasChanged = locationChanged;
if (hasChanged)
locationChanged = false;
return hasChanged;
}
- (void)locationManager:(CLLocationManager *)manager didUpdateToLocation:(CLLocation *)newLocation fromLocation:(CLLocation *)oldLocation
{
locationChanged = true;
currentLatitude = newLocation.coordinate.latitude;
currentLongitude = newLocation.coordinate.longitude;
currentHorizontalAccuracy = newLocation.horizontalAccuracy;
currentVerticalAccuracy = newLocation.verticalAccuracy;
RARCH_LOG("didUpdateToLocation - latitude %f, longitude %f\n", (float)currentLatitude, (float)currentLongitude);
}
- (void)locationManager:(CLLocationManager *)manager didUpdateLocations:(NSArray *)locations
{
CLLocation *location = (CLLocation*)[locations objectAtIndex:([locations count] - 1)];
locationChanged = true;
currentLatitude = [location coordinate].latitude;
currentLongitude = [location coordinate].longitude;
currentHorizontalAccuracy = location.horizontalAccuracy;
currentVerticalAccuracy = location.verticalAccuracy;
RARCH_LOG("didUpdateLocations - latitude %f, longitude %f\n", (float)currentLatitude, (float)currentLongitude);
}
- (void)locationManager:(CLLocationManager *)manager didFailWithError:(NSError *)error
{
RARCH_LOG("didFailWithError - %s\n", [[error localizedDescription] UTF8String]);
}
- (void)locationManagerDidPauseLocationUpdates:(CLLocationManager *)manager
{
RARCH_LOG("didPauseLocationUpdates\n");
}
- (void)locationManagerDidResumeLocationUpdates:(CLLocationManager *)manager
{
RARCH_LOG("didResumeLocationUpdates\n");
}
- (void)onLocationInit
{
// Create the location manager if this object does not
// already have one.
if (locationManager == nil)
locationManager = [[CLLocationManager alloc] init];
locationManager.delegate = self;
locationManager.desiredAccuracy = kCLLocationAccuracyBest;
locationManager.distanceFilter = kCLDistanceFilterNone;
[locationManager startUpdatingLocation];
}