mirror of
https://github.com/LizardByte/Sunshine.git
synced 2024-11-19 05:11:53 +00:00
2b450839a1
This commit introduces initial support for MacOS as third major host platform. It relies on the VideoToolbox framework for audio and video processing, which enables hardware accelerated processing of the stream on most platforms. Audio capturing requires third party tools as MacOS does not offer the recording of the audio output like the other platforms do. The commit enables most features offered by Sunshine for MacOS with the big exception of gamepad support. The patch sets was tested by a few volunteers, which allowed to remove some of the early bugs. However, several bugs especially regarding corner cases have probably not surfaced yet. Besides instructions how to build from source, the commit also adds a Portfile that allows a more easy installation. After available on the release branch, a pull request for the Portfile in the MacPorts project is planned. Signed-off-by: Anselm Busse <anselm.busse@outlook.com>
121 lines
4.2 KiB
Objective-C
121 lines
4.2 KiB
Objective-C
#import "av_audio.h"
|
|
|
|
@implementation AVAudio
|
|
|
|
+ (NSArray<AVCaptureDevice *> *)microphones {
|
|
AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInMicrophone,
|
|
AVCaptureDeviceTypeExternalUnknown]
|
|
mediaType:AVMediaTypeAudio
|
|
position:AVCaptureDevicePositionUnspecified];
|
|
return discoverySession.devices;
|
|
}
|
|
|
|
+ (NSArray<NSString *> *)microphoneNames {
|
|
NSMutableArray *result = [[NSMutableArray alloc] init];
|
|
|
|
for(AVCaptureDevice *device in [AVAudio microphones]) {
|
|
[result addObject:[device localizedName]];
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
+ (AVCaptureDevice *)findMicrophone:(NSString *)name {
|
|
for(AVCaptureDevice *device in [AVAudio microphones]) {
|
|
if([[device localizedName] isEqualToString:name]) {
|
|
return device;
|
|
}
|
|
}
|
|
|
|
return nil;
|
|
}
|
|
|
|
- (void)dealloc {
|
|
// make sure we don't process any further samples
|
|
self.audioConnection = nil;
|
|
// make sure nothing gets stuck on this signal
|
|
[self.samplesArrivedSignal signal];
|
|
[self.samplesArrivedSignal release];
|
|
TPCircularBufferCleanup(&audioSampleBuffer);
|
|
[super dealloc];
|
|
}
|
|
|
|
- (int)setupMicrophone:(AVCaptureDevice *)device sampleRate:(UInt32)sampleRate frameSize:(UInt32)frameSize channels:(UInt8)channels {
|
|
self.audioCaptureSession = [[AVCaptureSession alloc] init];
|
|
|
|
NSError *error;
|
|
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
|
|
if(audioInput == nil) {
|
|
return -1;
|
|
}
|
|
|
|
if([self.audioCaptureSession canAddInput:audioInput]) {
|
|
[self.audioCaptureSession addInput:audioInput];
|
|
}
|
|
else {
|
|
[audioInput dealloc];
|
|
return -1;
|
|
}
|
|
|
|
AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init];
|
|
|
|
[audioOutput setAudioSettings:@{
|
|
(NSString *)AVFormatIDKey: [NSNumber numberWithUnsignedInt:kAudioFormatLinearPCM],
|
|
(NSString *)AVSampleRateKey: [NSNumber numberWithUnsignedInt:sampleRate],
|
|
(NSString *)AVNumberOfChannelsKey: [NSNumber numberWithUnsignedInt:channels],
|
|
(NSString *)AVLinearPCMBitDepthKey: [NSNumber numberWithUnsignedInt:16],
|
|
(NSString *)AVLinearPCMIsFloatKey: @NO,
|
|
(NSString *)AVLinearPCMIsNonInterleaved: @NO
|
|
}];
|
|
|
|
dispatch_queue_attr_t qos = dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_CONCURRENT,
|
|
QOS_CLASS_USER_INITIATED,
|
|
DISPATCH_QUEUE_PRIORITY_HIGH);
|
|
dispatch_queue_t recordingQueue = dispatch_queue_create("audioSamplingQueue", qos);
|
|
|
|
[audioOutput setSampleBufferDelegate:self queue:recordingQueue];
|
|
|
|
if([self.audioCaptureSession canAddOutput:audioOutput]) {
|
|
[self.audioCaptureSession addOutput:audioOutput];
|
|
}
|
|
else {
|
|
[audioInput release];
|
|
[audioOutput release];
|
|
return -1;
|
|
}
|
|
|
|
self.audioConnection = [audioOutput connectionWithMediaType:AVMediaTypeAudio];
|
|
|
|
[self.audioCaptureSession startRunning];
|
|
|
|
[audioInput release];
|
|
[audioOutput release];
|
|
|
|
self.samplesArrivedSignal = [[NSCondition alloc] init];
|
|
TPCircularBufferInit(&self->audioSampleBuffer, kBufferLength * channels);
|
|
|
|
return 0;
|
|
}
|
|
|
|
- (void)captureOutput:(AVCaptureOutput *)output
|
|
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
|
fromConnection:(AVCaptureConnection *)connection {
|
|
if(connection == self.audioConnection) {
|
|
AudioBufferList audioBufferList;
|
|
CMBlockBufferRef blockBuffer;
|
|
|
|
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, NULL, &audioBufferList, sizeof(audioBufferList), NULL, NULL, 0, &blockBuffer);
|
|
|
|
//NSAssert(audioBufferList.mNumberBuffers == 1, @"Expected interlveaved PCM format but buffer contained %u streams", audioBufferList.mNumberBuffers);
|
|
|
|
// this is safe, because an interleaved PCM stream has exactly one buffer
|
|
// and we don't want to do sanity checks in a performance critical exec path
|
|
AudioBuffer audioBuffer = audioBufferList.mBuffers[0];
|
|
|
|
TPCircularBufferProduceBytes(&self->audioSampleBuffer, audioBuffer.mData, audioBuffer.mDataByteSize);
|
|
[self.samplesArrivedSignal signal];
|
|
}
|
|
}
|
|
|
|
@end
|