Audio: General fixes and improvements

Signed-off-by: Christopher Snowhill <kode54@gmail.com>
This commit is contained in:
Christopher Snowhill 2025-02-13 06:34:21 -08:00
parent c48a52cda3
commit c3af7c3bdc
11 changed files with 70 additions and 46 deletions

View file

@ -87,7 +87,7 @@ enum {
- (id)init; - (id)init;
- (id)initWithProperties:(NSDictionary *)properties; - (id)initWithProperties:(NSDictionary *)properties;
- (void)assignSamples:(const void *)data frameCount:(size_t)count; - (void)assignSamples:(const void *_Nonnull)data frameCount:(size_t)count;
- (void)assignData:(NSData *)data; - (void)assignData:(NSData *)data;
- (NSData *)removeSamples:(size_t)frameCount; - (NSData *)removeSamples:(size_t)frameCount;

View file

@ -159,7 +159,7 @@ static const uint32_t AudioChannelConfigTable[] = {
channelConfig = config; channelConfig = config;
} }
- (void)assignSamples:(const void *)data frameCount:(size_t)count { - (void)assignSamples:(const void *_Nonnull)data frameCount:(size_t)count {
if(formatAssigned) { if(formatAssigned) {
const size_t bytesPerPacket = format.mBytesPerPacket; const size_t bytesPerPacket = format.mBytesPerPacket;
[chunkData appendBytes:data length:bytesPerPacket * count]; [chunkData appendBytes:data length:bytesPerPacket * count];

View file

@ -25,6 +25,8 @@ NS_ASSUME_NONNULL_BEGIN
BOOL inAdder; BOOL inAdder;
BOOL inRemover; BOOL inRemover;
BOOL inPeeker; BOOL inPeeker;
BOOL inMerger;
BOOL inConverter;
BOOL stopping; BOOL stopping;
// For format converter // For format converter

View file

@ -384,6 +384,8 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
inAdder = NO; inAdder = NO;
inRemover = NO; inRemover = NO;
inPeeker = NO; inPeeker = NO;
inMerger = NO;
inConverter = NO;
stopping = NO; stopping = NO;
formatRead = NO; formatRead = NO;
@ -407,7 +409,7 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
- (void)dealloc { - (void)dealloc {
stopping = YES; stopping = YES;
while(inAdder || inRemover || inPeeker) { while(inAdder || inRemover || inPeeker || inMerger || inConverter) {
usleep(500); usleep(500);
} }
if(hdcd_decoder) { if(hdcd_decoder) {
@ -456,8 +458,10 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
} }
- (BOOL)isFull { - (BOOL)isFull {
@synchronized (chunkList) {
return (maxDuration - listDuration) < 0.05; return (maxDuration - listDuration) < 0.05;
} }
}
- (void)addChunk:(AudioChunk *)chunk { - (void)addChunk:(AudioChunk *)chunk {
if(stopping) return; if(stopping) return;
@ -553,6 +557,12 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
} }
- (AudioChunk *)removeAndMergeSamples:(size_t)maxFrameCount { - (AudioChunk *)removeAndMergeSamples:(size_t)maxFrameCount {
if(stopping) {
return [[AudioChunk alloc] init];
}
inMerger = YES;
BOOL formatSet = NO; BOOL formatSet = NO;
AudioStreamBasicDescription currentFormat; AudioStreamBasicDescription currentFormat;
uint32_t currentChannelConfig = 0; uint32_t currentChannelConfig = 0;
@ -560,6 +570,7 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
double streamTimestamp = 0.0; double streamTimestamp = 0.0;
double streamTimeRatio = 1.0; double streamTimeRatio = 1.0;
if (![self peekTimestamp:&streamTimestamp timeRatio:&streamTimeRatio]) { if (![self peekTimestamp:&streamTimestamp timeRatio:&streamTimeRatio]) {
inMerger = NO;
return [[AudioChunk alloc] init]; return [[AudioChunk alloc] init];
} }
@ -570,11 +581,12 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
[outputChunk setStreamTimestamp:streamTimestamp]; [outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:streamTimeRatio]; [outputChunk setStreamTimeRatio:streamTimeRatio];
while(totalFrameCount < maxFrameCount) { while(!stopping && totalFrameCount < maxFrameCount) {
AudioStreamBasicDescription newFormat; AudioStreamBasicDescription newFormat;
uint32_t newChannelConfig; uint32_t newChannelConfig;
if(![self peekFormat:&newFormat channelConfig:&newChannelConfig]) { if(![self peekFormat:&newFormat channelConfig:&newChannelConfig]) {
break; usleep(500);
continue;
} }
if(formatSet && if(formatSet &&
(memcmp(&newFormat, &currentFormat, sizeof(newFormat)) != 0 || (memcmp(&newFormat, &currentFormat, sizeof(newFormat)) != 0 ||
@ -589,8 +601,9 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
} }
chunk = [self removeSamples:maxFrameCount - totalFrameCount]; chunk = [self removeSamples:maxFrameCount - totalFrameCount];
if(![chunk duration]) { if(!chunk || ![chunk frameCount]) {
break; usleep(500);
continue;
} }
if([chunk isHDCD]) { if([chunk isHDCD]) {
@ -606,9 +619,11 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
} }
if(!totalFrameCount) { if(!totalFrameCount) {
inMerger = NO;
return [[AudioChunk alloc] init]; return [[AudioChunk alloc] init];
} }
inMerger = NO;
return outputChunk; return outputChunk;
} }
@ -618,10 +633,15 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
} }
- (AudioChunk *)convertChunk:(AudioChunk *)inChunk { - (AudioChunk *)convertChunk:(AudioChunk *)inChunk {
if(stopping) return [[AudioChunk alloc] init];
inConverter = YES;
AudioStreamBasicDescription chunkFormat = [inChunk format]; AudioStreamBasicDescription chunkFormat = [inChunk format];
if(![inChunk duration] || if(![inChunk duration] ||
(chunkFormat.mFormatFlags == kAudioFormatFlagsNativeFloatPacked && (chunkFormat.mFormatFlags == kAudioFormatFlagsNativeFloatPacked &&
chunkFormat.mBitsPerChannel == 32)) { chunkFormat.mBitsPerChannel == 32)) {
inConverter = NO;
return inChunk; return inChunk;
} }
@ -635,8 +655,10 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
inputLossless = chunkLossless; inputLossless = chunkLossless;
BOOL isFloat = !!(inputFormat.mFormatFlags & kAudioFormatFlagIsFloat); BOOL isFloat = !!(inputFormat.mFormatFlags & kAudioFormatFlagIsFloat);
if((!isFloat && !(inputFormat.mBitsPerChannel >= 1 && inputFormat.mBitsPerChannel <= 32)) || (isFloat && !(inputFormat.mBitsPerChannel == 32 || inputFormat.mBitsPerChannel == 64))) if((!isFloat && !(inputFormat.mBitsPerChannel >= 1 && inputFormat.mBitsPerChannel <= 32)) || (isFloat && !(inputFormat.mBitsPerChannel == 32 || inputFormat.mBitsPerChannel == 64))) {
inConverter = NO;
return [[AudioChunk alloc] init]; return [[AudioChunk alloc] init];
}
// These are really placeholders, as we're doing everything internally now // These are really placeholders, as we're doing everything internally now
if(inputLossless && if(inputLossless &&
@ -684,6 +706,7 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
NSUInteger samplesRead = [inChunk frameCount]; NSUInteger samplesRead = [inChunk frameCount];
if(!samplesRead) { if(!samplesRead) {
inConverter = NO;
return [[AudioChunk alloc] init]; return [[AudioChunk alloc] init];
} }
@ -865,34 +888,41 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
[outChunk assignSamples:inputBuffer frameCount:bytesReadFromInput / floatFormat.mBytesPerPacket]; [outChunk assignSamples:inputBuffer frameCount:bytesReadFromInput / floatFormat.mBytesPerPacket];
inConverter = NO;
return outChunk; return outChunk;
} }
- (BOOL)peekFormat:(AudioStreamBasicDescription *)format channelConfig:(uint32_t *)config { - (BOOL)peekFormat:(AudioStreamBasicDescription *)format channelConfig:(uint32_t *)config {
if(stopping) return NO; if(stopping) return NO;
inPeeker = YES;
@synchronized(chunkList) { @synchronized(chunkList) {
if([chunkList count]) { if([chunkList count]) {
AudioChunk *chunk = [chunkList objectAtIndex:0]; AudioChunk *chunk = [chunkList objectAtIndex:0];
*format = [chunk format]; *format = [chunk format];
*config = [chunk channelConfig]; *config = [chunk channelConfig];
inPeeker = NO;
return YES; return YES;
} }
} }
inPeeker = NO;
return NO; return NO;
} }
- (BOOL)peekTimestamp:(double *)timestamp timeRatio:(double *)timeRatio { - (BOOL)peekTimestamp:(double *)timestamp timeRatio:(double *)timeRatio {
if(stopping) return NO; if(stopping) return NO;
inPeeker = YES;
@synchronized (chunkList) { @synchronized (chunkList) {
if([chunkList count]) { if([chunkList count]) {
AudioChunk *chunk = [chunkList objectAtIndex:0]; AudioChunk *chunk = [chunkList objectAtIndex:0];
*timestamp = [chunk streamTimestamp]; *timestamp = [chunk streamTimestamp];
*timeRatio = [chunk streamTimeRatio]; *timeRatio = [chunk streamTimeRatio];
inPeeker = NO;
return YES; return YES;
} }
} }
*timestamp = 0.0; *timestamp = 0.0;
*timeRatio = 1.0; *timeRatio = 1.0;
inPeeker = NO;
return NO; return NO;
} }

View file

@ -101,7 +101,7 @@ void scale_by_volume(float *buffer, size_t count, float volume) {
@autoreleasepool { @autoreleasepool {
AudioChunk *chunk = nil; AudioChunk *chunk = nil;
chunk = [self convert]; chunk = [self convert];
if(!chunk || ![chunk duration]) { if(!chunk || ![chunk frameCount]) {
if([self endOfStream] == YES) { if([self endOfStream] == YES) {
break; break;
} }

View file

@ -299,7 +299,7 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
- (void)cleanUp { - (void)cleanUp {
stopping = YES; stopping = YES;
while(processEntered) { while(processEntered) {
usleep(1000); usleep(500);
} }
[self fullShutdown]; [self fullShutdown];
} }
@ -323,7 +323,7 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
@autoreleasepool { @autoreleasepool {
AudioChunk *chunk = nil; AudioChunk *chunk = nil;
chunk = [self convert]; chunk = [self convert];
if(!chunk || ![chunk duration]) { if(!chunk || ![chunk frameCount]) {
if([self endOfStream] == YES) { if([self endOfStream] == YES) {
break; break;
} }
@ -387,7 +387,7 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
size_t totalFrameCount = 0; size_t totalFrameCount = 0;
AudioChunk *chunk = [self readAndMergeChunksAsFloat32:4096]; AudioChunk *chunk = [self readAndMergeChunksAsFloat32:4096];
if(![chunk duration]) { if(!chunk || ![chunk frameCount]) {
processEntered = NO; processEntered = NO;
return nil; return nil;
} }

View file

@ -22,7 +22,6 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
@implementation DSPFSurroundNode { @implementation DSPFSurroundNode {
BOOL enableFSurround; BOOL enableFSurround;
BOOL FSurroundDelayRemoved; BOOL FSurroundDelayRemoved;
BOOL resetStreamFormat;
FSurroundFilter *fsurround; FSurroundFilter *fsurround;
BOOL stopping, paused; BOOL stopping, paused;
@ -96,7 +95,6 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
outputChannelConfig = [fsurround channelConfig]; outputChannelConfig = [fsurround channelConfig];
FSurroundDelayRemoved = NO; FSurroundDelayRemoved = NO;
resetStreamFormat = YES;
} else { } else {
fsurround = nil; fsurround = nil;
} }
@ -118,7 +116,7 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
- (void)cleanUp { - (void)cleanUp {
stopping = YES; stopping = YES;
while(processEntered) { while(processEntered) {
usleep(1000); usleep(500);
} }
[self fullShutdown]; [self fullShutdown];
} }
@ -142,7 +140,7 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
@autoreleasepool { @autoreleasepool {
AudioChunk *chunk = nil; AudioChunk *chunk = nil;
chunk = [self convert]; chunk = [self convert];
if(!chunk || ![chunk duration]) { if(!chunk || ![chunk frameCount]) {
if([self endOfStream] == YES) { if([self endOfStream] == YES) {
break; break;
} }
@ -204,18 +202,18 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
return [self readChunk:4096]; return [self readChunk:4096];
} }
size_t totalRequestedSamples = resetStreamFormat ? 2048 : 4096; size_t totalRequestedSamples = 4096;
size_t totalFrameCount = 0; size_t totalFrameCount = 0;
AudioChunk *chunk = [self readAndMergeChunksAsFloat32:totalRequestedSamples]; AudioChunk *chunk = [self readAndMergeChunksAsFloat32:totalRequestedSamples];
if(![chunk duration]) { if(!chunk || ![chunk frameCount]) {
processEntered = NO; processEntered = NO;
return nil; return nil;
} }
double streamTimestamp = [chunk streamTimestamp]; double streamTimestamp = [chunk streamTimestamp];
float *samplePtr = resetStreamFormat ? &inBuffer[2048 * 2] : &inBuffer[0]; float *samplePtr = &inBuffer[0];
size_t frameCount = [chunk frameCount]; size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount]; NSData *sampleData = [chunk removeSamples:frameCount];
@ -224,12 +222,6 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
totalFrameCount = frameCount; totalFrameCount = frameCount;
if(resetStreamFormat) {
bzero(&inBuffer[0], 2048 * 2 * sizeof(float));
totalFrameCount += 2048;
resetStreamFormat = NO;
}
size_t countToProcess = totalFrameCount; size_t countToProcess = totalFrameCount;
size_t samplesRendered; size_t samplesRendered;
if(countToProcess < 4096) { if(countToProcess < 4096) {
@ -237,8 +229,8 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
countToProcess = 4096; countToProcess = 4096;
} }
[fsurround process:&inBuffer[0] output:&outBuffer[4096 * 6] count:(int)countToProcess]; [fsurround process:&inBuffer[0] output:&outBuffer[0] count:(int)countToProcess];
samplePtr = &outBuffer[4096 * 6]; samplePtr = &outBuffer[0];
samplesRendered = totalFrameCount; samplesRendered = totalFrameCount;
if(totalFrameCount < 4096) { if(totalFrameCount < 4096) {

View file

@ -241,7 +241,7 @@ static void unregisterMotionListener(void) {
- (void)cleanUp { - (void)cleanUp {
stopping = YES; stopping = YES;
while(processEntered) { while(processEntered) {
usleep(1000); usleep(500);
} }
[self fullShutdown]; [self fullShutdown];
} }
@ -265,7 +265,7 @@ static void unregisterMotionListener(void) {
@autoreleasepool { @autoreleasepool {
AudioChunk *chunk = nil; AudioChunk *chunk = nil;
chunk = [self convert]; chunk = [self convert];
if(!chunk || ![chunk duration]) { if(!chunk || ![chunk frameCount]) {
if([self endOfStream] == YES) { if([self endOfStream] == YES) {
break; break;
} }
@ -328,7 +328,7 @@ static void unregisterMotionListener(void) {
} }
AudioChunk *chunk = [self readChunkAsFloat32:4096]; AudioChunk *chunk = [self readChunkAsFloat32:4096];
if(!chunk || ![chunk duration]) { if(!chunk || ![chunk frameCount]) {
processEntered = NO; processEntered = NO;
return nil; return nil;
} }

View file

@ -322,7 +322,7 @@ static void * kDSPRubberbandNodeContext = &kDSPRubberbandNodeContext;
- (void)cleanUp { - (void)cleanUp {
stopping = YES; stopping = YES;
while(processEntered) { while(processEntered) {
usleep(1000); usleep(500);
} }
[self fullShutdown]; [self fullShutdown];
} }
@ -346,7 +346,7 @@ static void * kDSPRubberbandNodeContext = &kDSPRubberbandNodeContext;
@autoreleasepool { @autoreleasepool {
AudioChunk *chunk = nil; AudioChunk *chunk = nil;
chunk = [self convert]; chunk = [self convert];
if(!chunk || ![chunk duration]) { if(!chunk || ![chunk frameCount]) {
if([self endOfStream] == YES) { if([self endOfStream] == YES) {
break; break;
} }
@ -417,7 +417,7 @@ static void * kDSPRubberbandNodeContext = &kDSPRubberbandNodeContext;
samplesToProcess = blockSize; samplesToProcess = blockSize;
AudioChunk *chunk = [self readAndMergeChunksAsFloat32:samplesToProcess]; AudioChunk *chunk = [self readAndMergeChunksAsFloat32:samplesToProcess];
if(![chunk duration]) { if(!chunk || ![chunk frameCount]) {
processEntered = NO; processEntered = NO;
return nil; return nil;
} }

View file

@ -259,14 +259,14 @@
[[previousNode semaphore] signal]; [[previousNode semaphore] signal];
} }
[accessLock unlock];
AudioChunk *ret; AudioChunk *ret;
@autoreleasepool { @autoreleasepool {
ret = [[previousNode buffer] removeAndMergeSamples:maxFrames]; ret = [[previousNode buffer] removeAndMergeSamples:maxFrames];
} }
[accessLock unlock];
if([ret frameCount]) { if([ret frameCount]) {
[[previousNode semaphore] signal]; [[previousNode semaphore] signal];
} }
@ -294,14 +294,14 @@
[[previousNode semaphore] signal]; [[previousNode semaphore] signal];
} }
[accessLock unlock];
AudioChunk *ret; AudioChunk *ret;
@autoreleasepool { @autoreleasepool {
ret = [[previousNode buffer] removeAndMergeSamplesAsFloat32:maxFrames]; ret = [[previousNode buffer] removeAndMergeSamplesAsFloat32:maxFrames];
} }
[accessLock unlock];
if([ret frameCount]) { if([ret frameCount]) {
[[previousNode semaphore] signal]; [[previousNode semaphore] signal];
} }

View file

@ -233,7 +233,7 @@ static VisualizationCollection *theCollection = nil;
} }
AudioChunk *chunk = nil; AudioChunk *chunk = nil;
chunk = [self readAndMergeChunksAsFloat32:512]; chunk = [self readAndMergeChunksAsFloat32:512];
if(!chunk || ![chunk duration]) { if(!chunk || ![chunk frameCount]) {
if([self endOfStream] == YES) { if([self endOfStream] == YES) {
break; break;
} }