Bug Fix: Greatly improve audio buffer handling

Buffers were being treated as empty before they were actually processed,
due to races between the current node's end of stream marker and
actually feeding the output buffer.

Signed-off-by: Christopher Snowhill <kode54@gmail.com>
This commit is contained in:
Christopher Snowhill 2025-03-04 00:15:47 -08:00
parent 915e212ae5
commit bce00aff2e
9 changed files with 30 additions and 25 deletions

View file

@ -110,7 +110,8 @@ void scale_by_volume(float *buffer, size_t count, float volume) {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([self endOfStream] == YES) {
if([previousNode endOfStream] == YES) {
endOfStream = YES;
break;
}
if(paused || !streamFormatChanged) {
@ -127,6 +128,7 @@ void scale_by_volume(float *buffer, size_t count, float volume) {
}
}
}
endOfStream = YES;
}
- (AudioChunk *)convert {
@ -163,7 +165,7 @@ void scale_by_volume(float *buffer, size_t count, float volume) {
ssize_t bytesReadFromInput = 0;
while(bytesReadFromInput < amountToWrite && !stopping && !paused && !streamFormatChanged && [self shouldContinue] == YES && [self endOfStream] == NO) {
while(bytesReadFromInput < amountToWrite && !stopping && !paused && !streamFormatChanged && [self shouldContinue] == YES && !([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES)) {
AudioStreamBasicDescription inf;
uint32_t config;
if([self peekFormat:&inf channelConfig:&config]) {
@ -203,7 +205,7 @@ void scale_by_volume(float *buffer, size_t count, float volume) {
return nil;
}
if(stopping || paused || streamFormatChanged || [self shouldContinue] == NO || [self endOfStream] == YES) {
if(stopping || paused || streamFormatChanged || [self shouldContinue] == NO || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES)) {
if(!skipResampler) {
if(!is_postextrapolated_) {
is_postextrapolated_ = 1;

View file

@ -110,7 +110,7 @@
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([self endOfStream] == YES) {
if([previousNode endOfStream] == YES) {
break;
}
if(paused) {
@ -123,6 +123,7 @@
}
}
}
endOfStream = YES;
}
- (AudioChunk *)convert {
@ -131,7 +132,7 @@
processEntered = YES;
if(stopping || [self endOfStream] == YES || [self shouldContinue] == NO) {
if(stopping || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}

View file

@ -350,7 +350,7 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([self endOfStream] == YES) {
if([previousNode endOfStream] == YES) {
break;
}
if(paused) {
@ -366,6 +366,7 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
}
}
}
endOfStream = YES;
}
- (AudioChunk *)convert {
@ -374,7 +375,7 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
processEntered = YES;
if(stopping || [self endOfStream] == YES || [self shouldContinue] == NO) {
if(stopping || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}

View file

@ -146,7 +146,7 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([self endOfStream] == YES) {
if([previousNode endOfStream] == YES) {
break;
}
if(paused) {
@ -162,6 +162,7 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
}
}
}
endOfStream = YES;
}
- (AudioChunk *)convert {
@ -170,7 +171,7 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
processEntered = YES;
if(stopping || [self endOfStream] == YES || [self shouldContinue] == NO) {
if(stopping || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}

View file

@ -271,7 +271,7 @@ static void unregisterMotionListener(void) {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([self endOfStream] == YES) {
if([previousNode endOfStream] == YES) {
break;
}
if(paused) {
@ -287,6 +287,7 @@ static void unregisterMotionListener(void) {
}
}
}
endOfStream = YES;
}
- (AudioChunk *)convert {
@ -295,7 +296,7 @@ static void unregisterMotionListener(void) {
processEntered = YES;
if(stopping || [self endOfStream] == YES || [self shouldContinue] == NO) {
if(stopping || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}

View file

@ -361,7 +361,6 @@ static void * kDSPRubberbandNodeContext = &kDSPRubberbandNodeContext;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if(flushed) {
endOfStream = YES;
break;
}
if(paused) {
@ -381,6 +380,7 @@ static void * kDSPRubberbandNodeContext = &kDSPRubberbandNodeContext;
}
}
}
endOfStream = YES;
}
- (AudioChunk *)convert {
@ -389,7 +389,7 @@ static void * kDSPRubberbandNodeContext = &kDSPRubberbandNodeContext;
processEntered = YES;
if(stopping || flushed || [self endOfStream] == YES || [self shouldContinue] == NO) {
if(stopping || flushed || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}

View file

@ -257,7 +257,6 @@ static uint64_t _Node_serial;
}
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
endOfStream = YES;
[accessLock unlock];
inPeek = NO;
return NO;
@ -296,7 +295,6 @@ static uint64_t _Node_serial;
}
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
endOfStream = YES;
[accessLock unlock];
inPeek = NO;
return NO;
@ -338,7 +336,6 @@ static uint64_t _Node_serial;
}
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
endOfStream = YES;
[accessLock unlock];
inRead = NO;
return [[AudioChunk alloc] init];
@ -408,7 +405,6 @@ static uint64_t _Node_serial;
}
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
endOfStream = YES;
[accessLock unlock];
inRead = NO;
return [[AudioChunk alloc] init];
@ -461,7 +457,6 @@ static uint64_t _Node_serial;
[accessLock lock];
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
endOfStream = YES;
[accessLock unlock];
inMerge = NO;
return [[AudioChunk alloc] init];
@ -519,7 +514,6 @@ static uint64_t _Node_serial;
[accessLock lock];
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
endOfStream = YES;
[accessLock unlock];
inMerge = NO;
return [[AudioChunk alloc] init];

View file

@ -112,10 +112,10 @@
if(finalNode) {
AudioChunk *ret = [super readChunk:amount];
/* if (n == 0) {
DLog(@"Output Buffer dry!");
}
*/
if((!ret || ![ret frameCount]) && [previousNode endOfStream]) {
endOfStream = YES;
}
return ret;
} else {
return [[AudioChunk alloc] init];
@ -127,7 +127,11 @@
@autoreleasepool {
[self setPreviousNode:[[controller bufferChain] finalNode]];
return [super peekFormat:format channelConfig:config];
BOOL ret = [super peekFormat:format channelConfig:config];
if(!ret && [previousNode endOfStream]) {
endOfStream = YES;
}
return ret;
}
}

View file

@ -245,7 +245,7 @@ static VisualizationCollection *theCollection = nil;
AudioChunk *chunk = nil;
chunk = [self readAndMergeChunksAsFloat32:512];
if(!chunk || ![chunk frameCount]) {
if([self endOfStream] == YES) {
if([previousNode endOfStream] == YES) {
break;
}
} else {
@ -255,6 +255,7 @@ static VisualizationCollection *theCollection = nil;
}
}
}
endOfStream = YES;
}
- (void)postVisPCM:(const float *)visTemp amount:(size_t)samples {