Replaced AudioQueue output with AUAudioUnit
This commit is contained in:
parent
c4c9a741ef
commit
72210c67e4
4 changed files with 127 additions and 247 deletions
|
@ -240,7 +240,7 @@ static void upmix(float * buffer, int inchannels, int outchannels, size_t count)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void scale_by_volume(float * buffer, size_t count, float volume)
|
void scale_by_volume(float * buffer, size_t count, float volume)
|
||||||
{
|
{
|
||||||
if ( volume != 1.0 )
|
if ( volume != 1.0 )
|
||||||
for (size_t i = 0; i < count; ++i )
|
for (size_t i = 0; i < count; ++i )
|
||||||
|
@ -378,6 +378,7 @@ tryagain:
|
||||||
|
|
||||||
amountReadFromFC = 0;
|
amountReadFromFC = 0;
|
||||||
|
|
||||||
|
if (floatOffset == floatSize) // skip this step if there's still float buffered
|
||||||
while (inpOffset == inpSize) {
|
while (inpOffset == inpSize) {
|
||||||
size_t samplesRead = 0;
|
size_t samplesRead = 0;
|
||||||
|
|
||||||
|
|
|
@ -91,6 +91,7 @@
|
||||||
832BEEF9278D4A32005E1BC4 /* retro_assert.h in Headers */ = {isa = PBXBuildFile; fileRef = 832BEED9278D4A32005E1BC4 /* retro_assert.h */; };
|
832BEEF9278D4A32005E1BC4 /* retro_assert.h in Headers */ = {isa = PBXBuildFile; fileRef = 832BEED9278D4A32005E1BC4 /* retro_assert.h */; };
|
||||||
832BEEFA278D4A32005E1BC4 /* stdstring.h in Headers */ = {isa = PBXBuildFile; fileRef = 832BEEDB278D4A32005E1BC4 /* stdstring.h */; };
|
832BEEFA278D4A32005E1BC4 /* stdstring.h in Headers */ = {isa = PBXBuildFile; fileRef = 832BEEDB278D4A32005E1BC4 /* stdstring.h */; };
|
||||||
832BEEFB278D4A32005E1BC4 /* filters.h in Headers */ = {isa = PBXBuildFile; fileRef = 832BEEDC278D4A32005E1BC4 /* filters.h */; };
|
832BEEFB278D4A32005E1BC4 /* filters.h in Headers */ = {isa = PBXBuildFile; fileRef = 832BEEDC278D4A32005E1BC4 /* filters.h */; };
|
||||||
|
832BEF04278DD06D005E1BC4 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 832BEF03278DD06D005E1BC4 /* AVFoundation.framework */; };
|
||||||
8384912718080FF100E7332D /* Logging.h in Headers */ = {isa = PBXBuildFile; fileRef = 8384912618080FF100E7332D /* Logging.h */; };
|
8384912718080FF100E7332D /* Logging.h in Headers */ = {isa = PBXBuildFile; fileRef = 8384912618080FF100E7332D /* Logging.h */; };
|
||||||
839366671815923C006DD712 /* CogPluginMulti.h in Headers */ = {isa = PBXBuildFile; fileRef = 839366651815923C006DD712 /* CogPluginMulti.h */; };
|
839366671815923C006DD712 /* CogPluginMulti.h in Headers */ = {isa = PBXBuildFile; fileRef = 839366651815923C006DD712 /* CogPluginMulti.h */; };
|
||||||
839366681815923C006DD712 /* CogPluginMulti.m in Sources */ = {isa = PBXBuildFile; fileRef = 839366661815923C006DD712 /* CogPluginMulti.m */; };
|
839366681815923C006DD712 /* CogPluginMulti.m in Sources */ = {isa = PBXBuildFile; fileRef = 839366661815923C006DD712 /* CogPluginMulti.m */; };
|
||||||
|
@ -204,6 +205,7 @@
|
||||||
832BEED9278D4A32005E1BC4 /* retro_assert.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = retro_assert.h; sourceTree = "<group>"; };
|
832BEED9278D4A32005E1BC4 /* retro_assert.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = retro_assert.h; sourceTree = "<group>"; };
|
||||||
832BEEDB278D4A32005E1BC4 /* stdstring.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = stdstring.h; sourceTree = "<group>"; };
|
832BEEDB278D4A32005E1BC4 /* stdstring.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = stdstring.h; sourceTree = "<group>"; };
|
||||||
832BEEDC278D4A32005E1BC4 /* filters.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = filters.h; sourceTree = "<group>"; };
|
832BEEDC278D4A32005E1BC4 /* filters.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = filters.h; sourceTree = "<group>"; };
|
||||||
|
832BEF03278DD06D005E1BC4 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
|
||||||
8384912618080FF100E7332D /* Logging.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = Logging.h; path = ../../Utils/Logging.h; sourceTree = "<group>"; };
|
8384912618080FF100E7332D /* Logging.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = Logging.h; path = ../../Utils/Logging.h; sourceTree = "<group>"; };
|
||||||
839366651815923C006DD712 /* CogPluginMulti.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CogPluginMulti.h; sourceTree = "<group>"; };
|
839366651815923C006DD712 /* CogPluginMulti.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CogPluginMulti.h; sourceTree = "<group>"; };
|
||||||
839366661815923C006DD712 /* CogPluginMulti.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CogPluginMulti.m; sourceTree = "<group>"; };
|
839366661815923C006DD712 /* CogPluginMulti.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CogPluginMulti.m; sourceTree = "<group>"; };
|
||||||
|
@ -223,6 +225,7 @@
|
||||||
isa = PBXFrameworksBuildPhase;
|
isa = PBXFrameworksBuildPhase;
|
||||||
buildActionMask = 2147483647;
|
buildActionMask = 2147483647;
|
||||||
files = (
|
files = (
|
||||||
|
832BEF04278DD06D005E1BC4 /* AVFoundation.framework in Frameworks */,
|
||||||
8DC2EF570486A6940098B216 /* Cocoa.framework in Frameworks */,
|
8DC2EF570486A6940098B216 /* Cocoa.framework in Frameworks */,
|
||||||
17D21DAD0B8BE76800D1EBDE /* AudioToolbox.framework in Frameworks */,
|
17D21DAD0B8BE76800D1EBDE /* AudioToolbox.framework in Frameworks */,
|
||||||
17D21DAE0B8BE76800D1EBDE /* AudioUnit.framework in Frameworks */,
|
17D21DAE0B8BE76800D1EBDE /* AudioUnit.framework in Frameworks */,
|
||||||
|
@ -251,6 +254,7 @@
|
||||||
089C1665FE841158C02AAC07 /* Resources */,
|
089C1665FE841158C02AAC07 /* Resources */,
|
||||||
0867D69AFE84028FC02AAC07 /* External Frameworks and Libraries */,
|
0867D69AFE84028FC02AAC07 /* External Frameworks and Libraries */,
|
||||||
034768DFFF38A50411DB9C8B /* Products */,
|
034768DFFF38A50411DB9C8B /* Products */,
|
||||||
|
832BEF02278DD06D005E1BC4 /* Frameworks */,
|
||||||
);
|
);
|
||||||
name = CogAudio;
|
name = CogAudio;
|
||||||
sourceTree = "<group>";
|
sourceTree = "<group>";
|
||||||
|
@ -558,6 +562,14 @@
|
||||||
path = string;
|
path = string;
|
||||||
sourceTree = "<group>";
|
sourceTree = "<group>";
|
||||||
};
|
};
|
||||||
|
832BEF02278DD06D005E1BC4 /* Frameworks */ = {
|
||||||
|
isa = PBXGroup;
|
||||||
|
children = (
|
||||||
|
832BEF03278DD06D005E1BC4 /* AVFoundation.framework */,
|
||||||
|
);
|
||||||
|
name = Frameworks;
|
||||||
|
sourceTree = "<group>";
|
||||||
|
};
|
||||||
/* End PBXGroup section */
|
/* End PBXGroup section */
|
||||||
|
|
||||||
/* Begin PBXHeadersBuildPhase section */
|
/* Begin PBXHeadersBuildPhase section */
|
||||||
|
|
|
@ -12,13 +12,13 @@
|
||||||
#import <CoreAudio/AudioHardware.h>
|
#import <CoreAudio/AudioHardware.h>
|
||||||
#import <AudioToolbox/AudioToolbox.h>
|
#import <AudioToolbox/AudioToolbox.h>
|
||||||
#import <AudioUnit/AudioUnit.h>
|
#import <AudioUnit/AudioUnit.h>
|
||||||
|
#import <AVFoundation/AVFoundation.h>
|
||||||
|
|
||||||
@class OutputNode;
|
@class OutputNode;
|
||||||
|
|
||||||
@interface OutputCoreAudio : NSObject {
|
@interface OutputCoreAudio : NSObject {
|
||||||
OutputNode * outputController;
|
OutputNode * outputController;
|
||||||
|
|
||||||
BOOL primed;
|
|
||||||
BOOL running;
|
BOOL running;
|
||||||
BOOL stopping;
|
BOOL stopping;
|
||||||
BOOL stopped;
|
BOOL stopped;
|
||||||
|
@ -30,13 +30,8 @@
|
||||||
AudioDeviceID outputDeviceID;
|
AudioDeviceID outputDeviceID;
|
||||||
AudioStreamBasicDescription deviceFormat; // info about the default device
|
AudioStreamBasicDescription deviceFormat; // info about the default device
|
||||||
|
|
||||||
AudioQueueRef audioQueue;
|
AUAudioUnit *_au;
|
||||||
AudioQueueBufferRef *buffers;
|
size_t _bufferSize;
|
||||||
UInt32 numberOfBuffers;
|
|
||||||
UInt32 bufferByteSize;
|
|
||||||
|
|
||||||
AudioUnit outputUnit;
|
|
||||||
AURenderCallbackStruct renderCallback;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (id)initWithController:(OutputNode *)c;
|
- (id)initWithController:(OutputNode *)c;
|
||||||
|
|
|
@ -11,9 +11,7 @@
|
||||||
|
|
||||||
#import "Logging.h"
|
#import "Logging.h"
|
||||||
|
|
||||||
@interface OutputCoreAudio (Private)
|
extern void scale_by_volume(float * buffer, size_t count, float volume);
|
||||||
- (void)prime;
|
|
||||||
@end
|
|
||||||
|
|
||||||
@implementation OutputCoreAudio
|
@implementation OutputCoreAudio
|
||||||
|
|
||||||
|
@ -23,10 +21,8 @@
|
||||||
if (self)
|
if (self)
|
||||||
{
|
{
|
||||||
outputController = c;
|
outputController = c;
|
||||||
outputUnit = NULL;
|
_au = nil;
|
||||||
audioQueue = NULL;
|
_bufferSize = 0;
|
||||||
buffers = NULL;
|
|
||||||
numberOfBuffers = 0;
|
|
||||||
volume = 1.0;
|
volume = 1.0;
|
||||||
outputDeviceID = -1;
|
outputDeviceID = -1;
|
||||||
listenerapplied = NO;
|
listenerapplied = NO;
|
||||||
|
@ -37,54 +33,6 @@
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void Sound_Renderer(void *userData, AudioQueueRef queue, AudioQueueBufferRef buffer)
|
|
||||||
{
|
|
||||||
OutputCoreAudio *output = (__bridge OutputCoreAudio *)userData;
|
|
||||||
void *readPointer = buffer->mAudioData;
|
|
||||||
|
|
||||||
int amountToRead, amountRead;
|
|
||||||
|
|
||||||
int framesToRead = buffer->mAudioDataByteSize / (output->deviceFormat.mBytesPerPacket);
|
|
||||||
|
|
||||||
amountToRead = framesToRead * (output->deviceFormat.mBytesPerPacket);
|
|
||||||
|
|
||||||
if (output->stopping == YES)
|
|
||||||
{
|
|
||||||
output->stopped = YES;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ([output->outputController shouldContinue] == NO)
|
|
||||||
{
|
|
||||||
// [output stop];
|
|
||||||
memset(readPointer, 0, amountToRead);
|
|
||||||
buffer->mAudioDataByteSize = amountToRead;
|
|
||||||
AudioQueueEnqueueBuffer(queue, buffer, 0, NULL);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
amountRead = [output->outputController readData:(readPointer) amount:amountToRead];
|
|
||||||
|
|
||||||
if ((amountRead < amountToRead) && [output->outputController endOfStream] == NO) //Try one more time! for track changes!
|
|
||||||
{
|
|
||||||
int amountRead2; //Use this since return type of readdata isnt known...may want to fix then can do a simple += to readdata
|
|
||||||
amountRead2 = [output->outputController readData:(readPointer+amountRead) amount:amountToRead-amountRead];
|
|
||||||
amountRead += amountRead2;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (amountRead < amountToRead)
|
|
||||||
{
|
|
||||||
// Either underrun, or no data at all. Caller output tends to just
|
|
||||||
// buffer loop if it doesn't get anything, so always produce a full
|
|
||||||
// buffer, and silence anything we couldn't supply.
|
|
||||||
memset(readPointer + amountRead, 0, amountToRead - amountRead);
|
|
||||||
amountRead = amountToRead;
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer->mAudioDataByteSize = amountRead;
|
|
||||||
AudioQueueEnqueueBuffer(queue, buffer, 0, NULL);
|
|
||||||
}
|
|
||||||
|
|
||||||
static OSStatus
|
static OSStatus
|
||||||
default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const AudioObjectPropertyAddress *inAddresses, void *inUserData)
|
default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const AudioObjectPropertyAddress *inAddresses, void *inUserData)
|
||||||
{
|
{
|
||||||
|
@ -107,7 +55,6 @@ default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const
|
||||||
{
|
{
|
||||||
OSStatus err;
|
OSStatus err;
|
||||||
BOOL defaultDevice = NO;
|
BOOL defaultDevice = NO;
|
||||||
UInt32 thePropSize;
|
|
||||||
AudioObjectPropertyAddress theAddress = {
|
AudioObjectPropertyAddress theAddress = {
|
||||||
.mSelector = kAudioHardwarePropertyDefaultOutputDevice,
|
.mSelector = kAudioHardwarePropertyDefaultOutputDevice,
|
||||||
.mScope = kAudioObjectPropertyScopeGlobal,
|
.mScope = kAudioObjectPropertyScopeGlobal,
|
||||||
|
@ -126,7 +73,7 @@ default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (audioQueue) {
|
if (_au) {
|
||||||
AudioObjectPropertyAddress defaultDeviceAddress = theAddress;
|
AudioObjectPropertyAddress defaultDeviceAddress = theAddress;
|
||||||
|
|
||||||
if (listenerapplied && !defaultDevice) {
|
if (listenerapplied && !defaultDevice) {
|
||||||
|
@ -135,31 +82,14 @@ default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const
|
||||||
}
|
}
|
||||||
|
|
||||||
if (outputDeviceID != deviceID) {
|
if (outputDeviceID != deviceID) {
|
||||||
printf("DEVICE: %i\n", deviceID);
|
DLog(@"Device: %i\n", deviceID);
|
||||||
outputDeviceID = deviceID;
|
outputDeviceID = deviceID;
|
||||||
|
|
||||||
CFStringRef theDeviceUID;
|
NSError *nserr;
|
||||||
theAddress.mSelector = kAudioDevicePropertyDeviceUID;
|
[_au setDeviceID:outputDeviceID error:&nserr];
|
||||||
theAddress.mScope = kAudioDevicePropertyScopeOutput;
|
if (nserr != nil) {
|
||||||
thePropSize = sizeof(theDeviceUID);
|
return (OSErr)[nserr code];
|
||||||
err = AudioObjectGetPropertyData(outputDeviceID, &theAddress, 0, NULL, &thePropSize, &theDeviceUID);
|
|
||||||
|
|
||||||
if (err) {
|
|
||||||
DLog(@"Error getting device UID as string");
|
|
||||||
return err;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
err = AudioQueueStop(audioQueue, true);
|
|
||||||
if (err) {
|
|
||||||
DLog(@"Error stopping stream to set device");
|
|
||||||
CFRelease(theDeviceUID);
|
|
||||||
return err;
|
|
||||||
}
|
|
||||||
primed = NO;
|
|
||||||
err = AudioQueueSetProperty(audioQueue, kAudioQueueProperty_CurrentDevice, &theDeviceUID, sizeof(theDeviceUID));
|
|
||||||
CFRelease(theDeviceUID);
|
|
||||||
if (running)
|
|
||||||
[self start];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!listenerapplied && defaultDevice) {
|
if (!listenerapplied && defaultDevice) {
|
||||||
|
@ -167,15 +97,6 @@ default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const
|
||||||
listenerapplied = YES;
|
listenerapplied = YES;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (outputUnit) {
|
|
||||||
err = AudioUnitSetProperty(outputUnit,
|
|
||||||
kAudioOutputUnitProperty_CurrentDevice,
|
|
||||||
kAudioUnitScope_Output,
|
|
||||||
0,
|
|
||||||
&deviceID,
|
|
||||||
sizeof(AudioDeviceID));
|
|
||||||
|
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
err = noErr;
|
err = noErr;
|
||||||
}
|
}
|
||||||
|
@ -298,93 +219,29 @@ default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const
|
||||||
|
|
||||||
- (BOOL)setup
|
- (BOOL)setup
|
||||||
{
|
{
|
||||||
if (outputUnit || audioQueue)
|
if (_au)
|
||||||
[self stop];
|
[self stop];
|
||||||
|
|
||||||
stopping = NO;
|
stopping = NO;
|
||||||
stopped = NO;
|
stopped = NO;
|
||||||
outputDeviceID = -1;
|
outputDeviceID = -1;
|
||||||
|
|
||||||
|
AVAudioFormat *format, *renderFormat;
|
||||||
AudioComponentDescription desc;
|
AudioComponentDescription desc;
|
||||||
OSStatus err;
|
NSError *err;
|
||||||
|
|
||||||
desc.componentType = kAudioUnitType_Output;
|
desc.componentType = kAudioUnitType_Output;
|
||||||
desc.componentSubType = kAudioUnitSubType_DefaultOutput;
|
desc.componentSubType = kAudioUnitSubType_HALOutput;
|
||||||
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
|
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
|
||||||
desc.componentFlags = 0;
|
desc.componentFlags = 0;
|
||||||
desc.componentFlagsMask = 0;
|
desc.componentFlagsMask = 0;
|
||||||
|
|
||||||
AudioComponent comp = AudioComponentFindNext(NULL, &desc); //Finds an component that meets the desc spec's
|
_au = [[AUAudioUnit alloc] initWithComponentDescription:desc error:&err];
|
||||||
if (comp == NULL)
|
if (err != nil)
|
||||||
return NO;
|
|
||||||
|
|
||||||
err = AudioComponentInstanceNew(comp, &outputUnit); //gains access to the services provided by the component
|
|
||||||
if (err)
|
|
||||||
return NO;
|
|
||||||
|
|
||||||
// Initialize AudioUnit
|
|
||||||
err = AudioUnitInitialize(outputUnit);
|
|
||||||
if (err != noErr)
|
|
||||||
return NO;
|
|
||||||
|
|
||||||
// Setup the output device before mucking with settings
|
|
||||||
NSDictionary *device = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] objectForKey:@"outputDevice"];
|
|
||||||
if (device) {
|
|
||||||
BOOL ok = [self setOutputDeviceWithDeviceDict:device];
|
|
||||||
if (!ok) {
|
|
||||||
//Ruh roh.
|
|
||||||
[self setOutputDeviceWithDeviceDict:nil];
|
|
||||||
|
|
||||||
[[[NSUserDefaultsController sharedUserDefaultsController] defaults] removeObjectForKey:@"outputDevice"];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
[self setOutputDeviceWithDeviceDict:nil];
|
|
||||||
}
|
|
||||||
|
|
||||||
UInt32 size = sizeof (AudioStreamBasicDescription);
|
|
||||||
Boolean outWritable;
|
|
||||||
//Gets the size of the Stream Format Property and if it is writable
|
|
||||||
AudioUnitGetPropertyInfo(outputUnit,
|
|
||||||
kAudioUnitProperty_StreamFormat,
|
|
||||||
kAudioUnitScope_Output,
|
|
||||||
0,
|
|
||||||
&size,
|
|
||||||
&outWritable);
|
|
||||||
//Get the current stream format of the output
|
|
||||||
err = AudioUnitGetProperty (outputUnit,
|
|
||||||
kAudioUnitProperty_StreamFormat,
|
|
||||||
kAudioUnitScope_Output,
|
|
||||||
0,
|
|
||||||
&deviceFormat,
|
|
||||||
&size);
|
|
||||||
|
|
||||||
if (err != noErr)
|
|
||||||
return NO;
|
|
||||||
|
|
||||||
AudioUnitUninitialize (outputUnit);
|
|
||||||
AudioComponentInstanceDispose(outputUnit);
|
|
||||||
outputUnit = NULL;
|
|
||||||
|
|
||||||
///Seems some 3rd party devices return incorrect stuff...or I just don't like noninterleaved data.
|
|
||||||
deviceFormat.mFormatFlags &= ~kLinearPCMFormatFlagIsNonInterleaved;
|
|
||||||
// deviceFormat.mFormatFlags &= ~kLinearPCMFormatFlagIsFloat;
|
|
||||||
// deviceFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
|
|
||||||
if (@available(macOS 12.0, *)) {
|
|
||||||
// Let's enable surround upmixing, for surround and spatial output
|
|
||||||
deviceFormat.mChannelsPerFrame = 8;
|
|
||||||
}
|
|
||||||
// And force a default rate for crappy devices
|
|
||||||
if (deviceFormat.mSampleRate < 32000)
|
|
||||||
deviceFormat.mSampleRate = 48000;
|
|
||||||
deviceFormat.mBytesPerFrame = deviceFormat.mChannelsPerFrame*(deviceFormat.mBitsPerChannel/8);
|
|
||||||
deviceFormat.mBytesPerPacket = deviceFormat.mBytesPerFrame * deviceFormat.mFramesPerPacket;
|
|
||||||
|
|
||||||
err = AudioQueueNewOutput(&deviceFormat, Sound_Renderer, (__bridge void * _Nullable)(self), NULL, NULL, 0, &audioQueue);
|
|
||||||
|
|
||||||
if (err != noErr)
|
|
||||||
return NO;
|
return NO;
|
||||||
|
|
||||||
|
// Setup the output device before mucking with settings
|
||||||
|
NSDictionary *device = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] objectForKey:@"outputDevice"];
|
||||||
if (device) {
|
if (device) {
|
||||||
BOOL ok = [self setOutputDeviceWithDeviceDict:device];
|
BOOL ok = [self setOutputDeviceWithDeviceDict:device];
|
||||||
if (!ok) {
|
if (!ok) {
|
||||||
|
@ -398,95 +255,128 @@ default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const
|
||||||
[self setOutputDeviceWithDeviceDict:nil];
|
[self setOutputDeviceWithDeviceDict:nil];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
format = _au.outputBusses[0].format;
|
||||||
|
|
||||||
|
deviceFormat = *(format.streamDescription);
|
||||||
|
|
||||||
|
///Seems some 3rd party devices return incorrect stuff...or I just don't like noninterleaved data.
|
||||||
|
deviceFormat.mFormatFlags &= ~kLinearPCMFormatFlagIsNonInterleaved;
|
||||||
|
// deviceFormat.mFormatFlags &= ~kLinearPCMFormatFlagIsFloat;
|
||||||
|
// deviceFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
|
||||||
|
if (@available(macOS 12.0, *)) {
|
||||||
|
// Let's enable surround upmixing, for surround and spatial output
|
||||||
|
deviceFormat.mChannelsPerFrame = 8;
|
||||||
|
}
|
||||||
|
// And force a default rate for crappy devices
|
||||||
|
if (deviceFormat.mSampleRate < 32000)
|
||||||
|
deviceFormat.mSampleRate = 48000;
|
||||||
|
deviceFormat.mBytesPerFrame = deviceFormat.mChannelsPerFrame*(deviceFormat.mBitsPerChannel/8);
|
||||||
|
deviceFormat.mBytesPerPacket = deviceFormat.mBytesPerFrame * deviceFormat.mFramesPerPacket;
|
||||||
|
|
||||||
/* Set the channel layout for the audio queue */
|
/* Set the channel layout for the audio queue */
|
||||||
AudioChannelLayout layout = {0};
|
AudioChannelLayoutTag tag = 0;
|
||||||
switch (deviceFormat.mChannelsPerFrame) {
|
switch (deviceFormat.mChannelsPerFrame) {
|
||||||
case 1:
|
case 1:
|
||||||
layout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
|
tag = kAudioChannelLayoutTag_Mono;
|
||||||
break;
|
break;
|
||||||
case 2:
|
case 2:
|
||||||
layout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
|
tag = kAudioChannelLayoutTag_Stereo;
|
||||||
break;
|
break;
|
||||||
case 3:
|
case 3:
|
||||||
layout.mChannelLayoutTag = kAudioChannelLayoutTag_DVD_4;
|
tag = kAudioChannelLayoutTag_DVD_4;
|
||||||
break;
|
break;
|
||||||
case 4:
|
case 4:
|
||||||
layout.mChannelLayoutTag = kAudioChannelLayoutTag_Quadraphonic;
|
tag = kAudioChannelLayoutTag_Quadraphonic;
|
||||||
break;
|
break;
|
||||||
case 5:
|
case 5:
|
||||||
layout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_0_A;
|
tag = kAudioChannelLayoutTag_MPEG_5_0_A;
|
||||||
break;
|
break;
|
||||||
case 6:
|
case 6:
|
||||||
layout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_A;
|
tag = kAudioChannelLayoutTag_MPEG_5_1_A;
|
||||||
break;
|
break;
|
||||||
case 7:
|
case 7:
|
||||||
layout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_6_1_A;
|
tag = kAudioChannelLayoutTag_MPEG_6_1_A;
|
||||||
break;
|
break;
|
||||||
case 8:
|
case 8:
|
||||||
layout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_7_1_A;
|
tag = kAudioChannelLayoutTag_MPEG_7_1_A;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (layout.mChannelLayoutTag != 0) {
|
|
||||||
err = AudioQueueSetProperty(audioQueue, kAudioQueueProperty_ChannelLayout, &layout, sizeof(layout));
|
|
||||||
if (err != noErr) {
|
|
||||||
return NO;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
numberOfBuffers = 4;
|
|
||||||
bufferByteSize = deviceFormat.mBytesPerPacket * 512;
|
|
||||||
|
|
||||||
buffers = calloc(sizeof(buffers[0]), numberOfBuffers);
|
renderFormat = [[AVAudioFormat alloc] initWithStreamDescription:&deviceFormat channelLayout:[[AVAudioChannelLayout alloc] initWithLayoutTag:tag]];
|
||||||
|
[_au.inputBusses[0] setFormat:renderFormat error:&err];
|
||||||
if (!buffers)
|
if (err != nil)
|
||||||
{
|
|
||||||
AudioQueueDispose(audioQueue, true);
|
|
||||||
audioQueue = NULL;
|
|
||||||
return NO;
|
return NO;
|
||||||
}
|
|
||||||
|
|
||||||
for (UInt32 i = 0; i < numberOfBuffers; ++i)
|
float * volume = &self->volume;
|
||||||
|
|
||||||
|
_au.outputProvider = ^AUAudioUnitStatus(AudioUnitRenderActionFlags * actionFlags, const AudioTimeStamp * timestamp, AUAudioFrameCount frameCount, NSInteger inputBusNumber, AudioBufferList * inputData)
|
||||||
{
|
{
|
||||||
err = AudioQueueAllocateBuffer(audioQueue, bufferByteSize, buffers + i);
|
void *readPointer = inputData->mBuffers[0].mData;
|
||||||
if (err != noErr || buffers[i] == NULL)
|
|
||||||
|
int amountToRead, amountRead;
|
||||||
|
|
||||||
|
int framesToRead = inputData->mBuffers[0].mDataByteSize / (self->deviceFormat.mBytesPerPacket);
|
||||||
|
|
||||||
|
amountToRead = framesToRead * (self->deviceFormat.mBytesPerPacket);
|
||||||
|
|
||||||
|
if (self->stopping == YES)
|
||||||
{
|
{
|
||||||
err = AudioQueueDispose(audioQueue, true);
|
self->stopped = YES;
|
||||||
audioQueue = NULL;
|
memset(readPointer, 0, amountToRead);
|
||||||
return NO;
|
inputData->mBuffers[0].mDataByteSize = amountToRead;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ([self->outputController shouldContinue] == NO)
|
||||||
|
{
|
||||||
|
memset(readPointer, 0, amountToRead);
|
||||||
|
inputData->mBuffers[0].mDataByteSize = amountToRead;
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
buffers[i]->mAudioDataByteSize = bufferByteSize;
|
amountRead = [self->outputController readData:(readPointer) amount:amountToRead];
|
||||||
}
|
|
||||||
|
|
||||||
[self prime];
|
|
||||||
|
|
||||||
[outputController setFormat:&deviceFormat];
|
|
||||||
|
|
||||||
return (err == noErr);
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)prime
|
if ((amountRead < amountToRead) && [self->outputController endOfStream] == NO) //Try one more time! for track changes!
|
||||||
{
|
{
|
||||||
for (UInt32 i = 0; i < numberOfBuffers; ++i)
|
int amountRead2; //Use this since return type of readdata isnt known...may want to fix then can do a simple += to readdata
|
||||||
Sound_Renderer((__bridge void * _Nullable)(self), audioQueue, buffers[i]);
|
amountRead2 = [self->outputController readData:(readPointer+amountRead) amount:amountToRead-amountRead];
|
||||||
primed = YES;
|
amountRead += amountRead2;
|
||||||
|
}
|
||||||
|
|
||||||
|
int framesRead = amountRead / sizeof(float);
|
||||||
|
scale_by_volume((float*)readPointer, framesRead, *volume);
|
||||||
|
|
||||||
|
if (amountRead < amountToRead)
|
||||||
|
{
|
||||||
|
// Either underrun, or no data at all. Caller output tends to just
|
||||||
|
// buffer loop if it doesn't get anything, so always produce a full
|
||||||
|
// buffer, and silence anything we couldn't supply.
|
||||||
|
memset(readPointer + amountRead, 0, amountToRead - amountRead);
|
||||||
|
amountRead = amountToRead;
|
||||||
|
}
|
||||||
|
|
||||||
|
inputData->mBuffers[0].mDataByteSize = amountRead;
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
[_au allocateRenderResourcesAndReturnError:&err];
|
||||||
|
|
||||||
|
[outputController setFormat:&deviceFormat];
|
||||||
|
|
||||||
|
return (err == nil);
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setVolume:(double)v
|
- (void)setVolume:(double)v
|
||||||
{
|
{
|
||||||
volume = v * 0.01f;
|
volume = v * 0.01f;
|
||||||
AudioQueueSetParameter(audioQueue, kAudioQueueParam_VolumeRampTime, 0);
|
|
||||||
AudioQueueSetParameter(audioQueue, kAudioQueueParam_Volume, volume);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)start
|
- (void)start
|
||||||
{
|
{
|
||||||
AudioQueueSetParameter(audioQueue, kAudioQueueParam_VolumeRampTime, 0);
|
NSError *err;
|
||||||
AudioQueueSetParameter(audioQueue, kAudioQueueParam_Volume, volume);
|
[_au startHardwareAndReturnError:&err];
|
||||||
AudioQueueStart(audioQueue, NULL);
|
|
||||||
running = YES;
|
running = YES;
|
||||||
if (!primed)
|
|
||||||
[self prime];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)stop
|
- (void)stop
|
||||||
|
@ -501,27 +391,10 @@ default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const
|
||||||
AudioObjectRemovePropertyListener(kAudioObjectSystemObject, &theAddress, default_device_changed, (__bridge void * _Nullable)(self));
|
AudioObjectRemovePropertyListener(kAudioObjectSystemObject, &theAddress, default_device_changed, (__bridge void * _Nullable)(self));
|
||||||
listenerapplied = NO;
|
listenerapplied = NO;
|
||||||
}
|
}
|
||||||
if (outputUnit) {
|
if (_au) {
|
||||||
AudioUnitUninitialize (outputUnit);
|
[_au stopHardware];
|
||||||
AudioComponentInstanceDispose(outputUnit);
|
|
||||||
outputUnit = NULL;
|
|
||||||
}
|
|
||||||
if (audioQueue && buffers) {
|
|
||||||
AudioQueuePause(audioQueue);
|
|
||||||
AudioQueueStop(audioQueue, true);
|
|
||||||
running = NO;
|
running = NO;
|
||||||
|
_au = nil;
|
||||||
for (UInt32 i = 0; i < numberOfBuffers; ++i) {
|
|
||||||
if (buffers[i])
|
|
||||||
AudioQueueFreeBuffer(audioQueue, buffers[i]);
|
|
||||||
buffers[i] = NULL;
|
|
||||||
}
|
|
||||||
free(buffers);
|
|
||||||
buffers = NULL;
|
|
||||||
}
|
|
||||||
if (audioQueue) {
|
|
||||||
AudioQueueDispose(audioQueue, true);
|
|
||||||
audioQueue = NULL;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -534,16 +407,15 @@ default_device_changed(AudioObjectID inObjectID, UInt32 inNumberAddresses, const
|
||||||
|
|
||||||
- (void)pause
|
- (void)pause
|
||||||
{
|
{
|
||||||
AudioQueuePause(audioQueue);
|
[_au stopHardware];
|
||||||
running = NO;
|
running = NO;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)resume
|
- (void)resume
|
||||||
{
|
{
|
||||||
AudioQueueStart(audioQueue, NULL);
|
NSError *err;
|
||||||
|
[_au startHardwareAndReturnError:&err];
|
||||||
running = YES;
|
running = YES;
|
||||||
if (!primed)
|
|
||||||
[self prime];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
Loading…
Reference in a new issue