Core Audio: Implement proper fade on seek

Whew, what a mess! And this may pave the way for crossfading.

Signed-off-by: Christopher Snowhill <kode54@gmail.com>
This commit is contained in:
Christopher Snowhill 2025-03-10 23:08:49 -07:00
parent 9b973a4b53
commit 15eaa877b1
9 changed files with 229 additions and 99 deletions

View file

@ -75,14 +75,12 @@
[self waitUntilCallbacksExit]; [self waitUntilCallbacksExit];
if(output) { if(output) {
[output fadeOut]; [output fadeOutBackground];
[output setShouldContinue:NO];
[output close];
} }
if(!output) { if(!output) {
output = [[OutputNode alloc] initWithController:self previous:nil]; output = [[OutputNode alloc] initWithController:self previous:nil];
[output setupWithInterval:resumeInterval];
} }
[output setupWithInterval:resumeInterval];
[output setVolume:volume]; [output setVolume:volume];
@synchronized(chainQueue) { @synchronized(chainQueue) {
for(id anObject in chainQueue) { for(id anObject in chainQueue) {
@ -125,14 +123,15 @@
} }
if(time > 0.0) { if(time > 0.0) {
[output fadeIn];
[output seek:time]; [output seek:time];
[bufferChain seek:time]; [bufferChain seek:time];
} }
[self setShouldContinue:YES]; [self setShouldContinue:YES];
outputLaunched = NO; if(!resumeInterval) {
outputLaunched = NO;
}
startedPaused = paused; startedPaused = paused;
initialBufferFilled = NO; initialBufferFilled = NO;
previousUserInfo = userInfo; previousUserInfo = userInfo;
@ -144,6 +143,8 @@
if(time > 0.0) { if(time > 0.0) {
[self updatePosition:userInfo]; [self updatePosition:userInfo];
} }
} else if(resumeInterval) {
[output fadeIn];
} }
} }

View file

@ -17,6 +17,8 @@
- (void)threadEntry:(id _Nullable)arg; - (void)threadEntry:(id _Nullable)arg;
- (void)setShouldContinue:(BOOL)s;
- (double)secondsBuffered; - (double)secondsBuffered;
@end @end

View file

@ -9,7 +9,9 @@
#import "DSPNode.h" #import "DSPNode.h"
@implementation DSPNode @implementation DSPNode {
BOOL threadTerminated;
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency { - (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super init]; self = [super init];
@ -53,7 +55,17 @@
NSThread *currentThread = [NSThread currentThread]; NSThread *currentThread = [NSThread currentThread];
[currentThread setThreadPriority:0.75]; [currentThread setThreadPriority:0.75];
[currentThread setQualityOfService:NSQualityOfServiceUserInitiated]; [currentThread setQualityOfService:NSQualityOfServiceUserInitiated];
threadTerminated = NO;
[self process]; [self process];
threadTerminated = YES;
}
}
- (void)setShouldContinue:(BOOL)s {
BOOL currentShouldContinue = shouldContinue;
shouldContinue = s;
if(!currentShouldContinue && s && threadTerminated) {
[self launchThread];
} }
} }

View file

@ -39,6 +39,8 @@
- (BOOL)selectNextBuffer; - (BOOL)selectNextBuffer;
- (void)endOfInputPlayed; - (void)endOfInputPlayed;
- (BOOL)endOfStream;
- (BOOL)chainQueueHasTracks; - (BOOL)chainQueueHasTracks;
- (double)secondsBuffered; - (double)secondsBuffered;
@ -50,6 +52,7 @@
- (void)seek:(double)time; - (void)seek:(double)time;
- (void)fadeOut; - (void)fadeOut;
- (void)fadeOutBackground;
- (void)fadeIn; - (void)fadeIn;
- (AudioChunk *)readChunk:(size_t)amount; - (AudioChunk *)readChunk:(size_t)amount;

View file

@ -103,7 +103,12 @@
[output fadeOut]; [output fadeOut];
} }
- (void)fadeOutBackground {
[output fadeOutBackground];
}
- (void)fadeIn { - (void)fadeIn {
[self reconnectInputAndReplumb];
[output fadeIn]; [output fadeIn];
} }
@ -137,9 +142,7 @@
- (BOOL)selectNextBuffer { - (BOOL)selectNextBuffer {
BOOL ret = [controller selectNextBuffer]; BOOL ret = [controller selectNextBuffer];
if(!ret) { if(!ret) {
Node *finalNode = [[controller bufferChain] finalNode]; [self reconnectInputAndReplumb];
[rubberbandNode setPreviousNode:finalNode];
[self reconnectInput];
} }
return ret; return ret;
} }
@ -169,11 +172,28 @@
} }
} }
- (void)reconnectInput { - (BOOL)reconnectInput {
Node *finalNode = nil;
if(rubberbandNode) {
finalNode = [[controller bufferChain] finalNode];
[rubberbandNode setPreviousNode:finalNode];
}
return !!finalNode;
}
- (void)reconnectInputAndReplumb {
Node *finalNode = nil;
if(rubberbandNode) {
finalNode = [[controller bufferChain] finalNode];
[rubberbandNode setPreviousNode:finalNode];
}
NSArray *DSPs = [self DSPs]; NSArray *DSPs = [self DSPs];
for (Node *node in DSPs) { for (Node *node in DSPs) {
[node setEndOfStream:NO]; [node setEndOfStream:NO];
[node setShouldContinue:YES];
} }
} }
@ -187,10 +207,7 @@
- (AudioChunk *)readChunk:(size_t)amount { - (AudioChunk *)readChunk:(size_t)amount {
@autoreleasepool { @autoreleasepool {
Node *finalNode = [[controller bufferChain] finalNode]; if([self reconnectInput]) {
[rubberbandNode setPreviousNode:finalNode];
if(finalNode) {
AudioChunk *ret = [super readChunk:amount]; AudioChunk *ret = [super readChunk:amount];
if((!ret || ![ret frameCount]) && [previousNode endOfStream]) { if((!ret || ![ret frameCount]) && [previousNode endOfStream]) {
@ -206,14 +223,15 @@
- (BOOL)peekFormat:(nonnull AudioStreamBasicDescription *)format channelConfig:(nonnull uint32_t *)config { - (BOOL)peekFormat:(nonnull AudioStreamBasicDescription *)format channelConfig:(nonnull uint32_t *)config {
@autoreleasepool { @autoreleasepool {
Node *finalNode = [[controller bufferChain] finalNode]; if([self reconnectInput]) {
[rubberbandNode setPreviousNode:finalNode]; BOOL ret = [super peekFormat:format channelConfig:config];
if(!ret && [previousNode endOfStream]) {
BOOL ret = [super peekFormat:format channelConfig:config]; endOfStream = YES;
if(!ret && [previousNode endOfStream]) { }
endOfStream = YES; return ret;
} else {
return NO;
} }
return ret;
} }
} }

View file

@ -24,8 +24,7 @@
- (void)resetBuffer; - (void)resetBuffer;
- (void)pop; - (void)setShouldContinue:(BOOL)s;
- (void)replayPreroll;
- (void)process; - (void)process;

View file

@ -30,6 +30,7 @@
BOOL processEntered; BOOL processEntered;
BOOL stopping; BOOL stopping;
BOOL paused; BOOL paused;
BOOL threadTerminated;
AudioStreamBasicDescription inputFormat; AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription visFormat; // Mono format for vis AudioStreamBasicDescription visFormat; // Mono format for vis
@ -95,7 +96,9 @@
NSThread *currentThread = [NSThread currentThread]; NSThread *currentThread = [NSThread currentThread];
[currentThread setThreadPriority:0.75]; [currentThread setThreadPriority:0.75];
[currentThread setQualityOfService:NSQualityOfServiceUserInitiated]; [currentThread setQualityOfService:NSQualityOfServiceUserInitiated];
threadTerminated = NO;
[self process]; [self process];
threadTerminated = YES;
} }
} }
@ -113,6 +116,14 @@
return [buffer listDuration]; return [buffer listDuration];
} }
- (void)setShouldContinue:(BOOL)s {
BOOL currentShouldContinue = shouldContinue;
shouldContinue = s;
if(!currentShouldContinue && s && threadTerminated) {
[self launchThread];
}
}
- (BOOL)setup { - (BOOL)setup {
if(fabs(inputFormat.mSampleRate - 44100.0) > 1e-6) { if(fabs(inputFormat.mSampleRate - 44100.0) > 1e-6) {
rs = rsstate_new(1, inputFormat.mSampleRate, 44100.0); rs = rsstate_new(1, inputFormat.mSampleRate, 44100.0);

View file

@ -57,6 +57,7 @@ using std::atomic_long;
BOOL commandStop; BOOL commandStop;
BOOL resetting; BOOL resetting;
BOOL cutOffInput;
BOOL fading, faded; BOOL fading, faded;
float fadeLevel; float fadeLevel;
float fadeStep; float fadeStep;
@ -116,6 +117,7 @@ using std::atomic_long;
- (void)stop; - (void)stop;
- (void)fadeOut; - (void)fadeOut;
- (void)fadeOutBackground;
- (void)fadeIn; - (void)fadeIn;
- (double)latency; - (double)latency;

View file

@ -23,15 +23,89 @@ extern void scale_by_volume(float *buffer, size_t count, float volume);
static NSString *CogPlaybackDidBeginNotificiation = @"CogPlaybackDidBeginNotificiation"; static NSString *CogPlaybackDidBeginNotificiation = @"CogPlaybackDidBeginNotificiation";
static BOOL fadeAudio(const float *inSamples, float *outSamples, size_t channels, size_t count, float *fadeLevel, float fadeStep, float fadeTarget) {
float _fadeLevel = *fadeLevel;
BOOL towardZero = fadeStep < 0.0;
BOOL stopping = NO;
for(size_t i = 0; i < count; ++i) {
for(size_t j = 0; j < channels; ++j) {
outSamples[j] += inSamples[j] * _fadeLevel;
}
inSamples += channels;
outSamples += channels;
_fadeLevel += fadeStep;
if(towardZero && _fadeLevel <= fadeTarget) {
_fadeLevel = fadeTarget;
fadeStep = 0.0;
stopping = YES;
break;
} else if(!towardZero && _fadeLevel >= fadeTarget) {
_fadeLevel = fadeTarget;
fadeStep = 0.0;
stopping = YES;
}
}
*fadeLevel = _fadeLevel;
return stopping;
}
@interface FadedBuffer : NSObject {
float fadeLevel;
float fadeStep;
float fadeTarget;
ChunkList *lastBuffer;
}
- (id)initWithBuffer:(ChunkList *)buffer fadeTarget:(float)fadeTarget sampleRate:(double)sampleRate;
- (BOOL)mix:(float *)outputBuffer sampleCount:(size_t)samples channelCount:(size_t)channels;
@end
@implementation FadedBuffer
- (id)initWithBuffer:(ChunkList *)buffer fadeTarget:(float)fadeTarget sampleRate:(double)sampleRate {
self = [super init];
if(self) {
fadeLevel = 1.0;
self->fadeTarget = fadeTarget;
lastBuffer = buffer;
const double maxFadeDurationMS = 1000.0 * [buffer listDuration];
const double fadeDuration = MIN(125.0f, maxFadeDurationMS);
fadeStep = ((fadeTarget - fadeLevel) / sampleRate) * (1000.0f / fadeDuration);
}
return self;
}
- (BOOL)mix:(float *)outputBuffer sampleCount:(size_t)samples channelCount:(size_t)channels {
if(lastBuffer) {
AudioChunk * chunk = [lastBuffer removeAndMergeSamples:samples callBlock:^BOOL{
// Always interrupt if buffer runs empty, because it is not being refilled any more
return true;
}];
if(chunk && [chunk frameCount]) {
// Will always be input request size or less
size_t samplesToMix = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:samplesToMix];
return fadeAudio((const float *)[sampleData bytes], outputBuffer, channels, samplesToMix, &fadeLevel, fadeStep, fadeTarget);
}
}
// No buffer or no chunk, stream ended
return true;
}
@end
@implementation OutputCoreAudio { @implementation OutputCoreAudio {
VisualizationController *visController; VisualizationController *visController;
NSLock *fadedBuffersLock;
NSMutableArray<FadedBuffer *> *fadedBuffers;
} }
static void *kOutputCoreAudioContext = &kOutputCoreAudioContext; static void *kOutputCoreAudioContext = &kOutputCoreAudioContext;
- (AudioChunk *)renderInput:(int)amountToRead { - (AudioChunk *)renderInput:(int)amountToRead {
int amountRead = 0;
if(stopping == YES || [outputController shouldContinue] == NO) { if(stopping == YES || [outputController shouldContinue] == NO) {
// Chain is dead, fill out the serial number pointer forever with silence // Chain is dead, fill out the serial number pointer forever with silence
stopping = YES; stopping = YES;
@ -67,6 +141,9 @@ static void *kOutputCoreAudioContext = &kOutputCoreAudioContext;
outputLock = [[NSLock alloc] init]; outputLock = [[NSLock alloc] init];
fadedBuffersLock = [[NSLock alloc] init];
fadedBuffers = [[NSMutableArray alloc] init];
#ifdef OUTPUT_LOG #ifdef OUTPUT_LOG
NSString *logName = [NSTemporaryDirectory() stringByAppendingPathComponent:@"CogAudioLog.raw"]; NSString *logName = [NSTemporaryDirectory() stringByAppendingPathComponent:@"CogAudioLog.raw"];
_logFile = fopen([logName UTF8String], "wb"); _logFile = fopen([logName UTF8String], "wb");
@ -162,22 +239,13 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
if(stopping) if(stopping)
break; break;
if(![outputBuffer isFull]) { if(!cutOffInput && ![outputBuffer isFull]) {
[self renderAndConvert]; [self renderAndConvert];
rendered = YES; rendered = YES;
} else { } else {
rendered = NO; rendered = NO;
} }
#if 0
if(faded && !paused) {
resetting = YES;
[self pause];
started = NO;
resetting = NO;
}
#endif
if(!started && !paused) { if(!started && !paused) {
// Prevent this call from hanging when used in this thread, when buffer may be empty // Prevent this call from hanging when used in this thread, when buffer may be empty
// and waiting for this very thread to fill it // and waiting for this very thread to fill it
@ -543,30 +611,6 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
} }
} }
static BOOL fadeAudio(float * samples, size_t channels, size_t count, float * fadeLevel, float fadeStep, float fadeTarget) {
float _fadeLevel = *fadeLevel;
BOOL towardZero = fadeStep < 0.0;
BOOL stopping = NO;
for(size_t i = 0; i < count; ++i) {
for(size_t j = 0; j < channels; ++j) {
samples[j] *= _fadeLevel;
}
samples += channels;
_fadeLevel += fadeStep;
if(towardZero && _fadeLevel <= fadeTarget) {
_fadeLevel = fadeTarget;
fadeStep = 0.0;
stopping = YES;
} else if(!towardZero && _fadeLevel >= fadeTarget) {
_fadeLevel = fadeTarget;
fadeStep = 0.0;
stopping = YES;
}
}
*fadeLevel = _fadeLevel;
return stopping;
}
- (void)renderAndConvert { - (void)renderAndConvert {
if(resetStreamFormat) { if(resetStreamFormat) {
[self updateStreamFormat]; [self updateStreamFormat];
@ -600,6 +644,8 @@ static BOOL fadeAudio(float * samples, size_t channels, size_t count, float * fa
__block AudioStreamBasicDescription *format = &deviceFormat; __block AudioStreamBasicDescription *format = &deviceFormat;
__block void *refCon = (__bridge void *)self; __block void *refCon = (__bridge void *)self;
__block NSLock *refLock = self->outputLock; __block NSLock *refLock = self->outputLock;
__block NSLock *fadersLock = self->fadedBuffersLock;
__block NSMutableArray *faders = self->fadedBuffers;
#ifdef OUTPUT_LOG #ifdef OUTPUT_LOG
__block FILE *logFile = _logFile; __block FILE *logFile = _logFile;
@ -616,35 +662,55 @@ static BOOL fadeAudio(float * samples, size_t channels, size_t count, float * fa
OutputCoreAudio *_self = (__bridge OutputCoreAudio *)refCon; OutputCoreAudio *_self = (__bridge OutputCoreAudio *)refCon;
int renderedSamples = 0; int renderedSamples = 0;
if(_self->resetting || _self->faded) { inputData->mBuffers[0].mDataByteSize = frameCount * format->mBytesPerPacket;
inputData->mBuffers[0].mDataByteSize = frameCount * format->mBytesPerPacket; bzero(inputData->mBuffers[0].mData, inputData->mBuffers[0].mDataByteSize);
bzero(inputData->mBuffers[0].mData, inputData->mBuffers[0].mDataByteSize); inputData->mBuffers[0].mNumberChannels = channels;
inputData->mBuffers[0].mNumberChannels = channels;
if(_self->resetting) {
return 0; return 0;
} }
float *outSamples = (float*)inputData->mBuffers[0].mData;
@autoreleasepool { @autoreleasepool {
while(renderedSamples < frameCount) { if(!_self->faded) {
[refLock lock]; while(renderedSamples < frameCount) {
AudioChunk *chunk = nil; [refLock lock];
if(![_self->outputBuffer isEmpty]) { AudioChunk *chunk = nil;
chunk = [_self->outputBuffer removeSamples:frameCount - renderedSamples]; if(_self->outputBuffer && ![_self->outputBuffer isEmpty]) {
} chunk = [_self->outputBuffer removeSamples:frameCount - renderedSamples];
[refLock unlock]; }
[refLock unlock];
if(chunk && [chunk frameCount]) {
_self->streamTimestamp = [chunk streamTimestamp];
size_t _frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:_frameCount];
float *samplePtr = (float *)[sampleData bytes];
size_t inputTodo = MIN(_frameCount, frameCount - renderedSamples);
cblas_scopy((int)(inputTodo * channels), samplePtr, 1, ((float *)inputData->mBuffers[0].mData) + renderedSamples * channels, 1);
renderedSamples += inputTodo;
}
if(_self->stopping || _self->resetting || _self->faded) { size_t _frameCount = 0;
break;
if(chunk && [chunk frameCount]) {
_self->streamTimestamp = [chunk streamTimestamp];
_frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:_frameCount];
float *samplePtr = (float *)[sampleData bytes];
size_t inputTodo = MIN(_frameCount, frameCount - renderedSamples);
if(!_self->fading) {
cblas_scopy((int)(inputTodo * channels), samplePtr, 1, outSamples + renderedSamples * channels, 1);
} else {
BOOL faded = fadeAudio(samplePtr, outSamples + renderedSamples * channels, channels, inputTodo, &_self->fadeLevel, _self->fadeStep, _self->fadeTarget);
if(faded) {
if(_self->fadeStep < 0.0) {
_self->faded = YES;
}
_self->fading = NO;
_self->fadeStep = 0.0f;
}
}
renderedSamples += inputTodo;
}
if(_self->stopping || _self->resetting || _self->faded || !chunk || !_frameCount) {
break;
}
} }
} }
@ -661,21 +727,19 @@ static BOOL fadeAudio(float * samples, size_t channels, size_t count, float * fa
} }
} }
scale_by_volume((float*)inputData->mBuffers[0].mData, renderedSamples * channels, volumeScale * _self->volume); [fadersLock lock];
for(size_t i = 0; i < [faders count];) {
if(_self->fading) { FadedBuffer *buffer = faders[i];
BOOL faded = fadeAudio((float*)inputData->mBuffers[0].mData, channels, renderedSamples, &_self->fadeLevel, _self->fadeStep, _self->fadeTarget); BOOL stopping = [buffer mix:outSamples sampleCount:frameCount channelCount:channels];
if(faded) { if(stopping) {
if(_self->fadeStep < 0.0f) { [faders removeObjectAtIndex:i];
_self->faded = YES; } else {
} ++i;
_self->fading = NO;
_self->fadeStep = 0.0f;
} }
} }
[fadersLock unlock];
inputData->mBuffers[0].mDataByteSize = renderedSamples * format->mBytesPerPacket; scale_by_volume(outSamples, frameCount * channels, volumeScale * _self->volume);
inputData->mBuffers[0].mNumberChannels = channels;
[_self updateLatency:secondsRendered]; [_self updateLatency:secondsRendered];
} }
@ -711,6 +775,7 @@ static BOOL fadeAudio(float * samples, size_t channels, size_t count, float * fa
outputDeviceID = -1; outputDeviceID = -1;
restarted = NO; restarted = NO;
cutOffInput = NO;
fadeTarget = 1.0f; fadeTarget = 1.0f;
fadeLevel = 1.0f; fadeLevel = 1.0f;
fadeStep = 0.0f; fadeStep = 0.0f;
@ -840,9 +905,13 @@ static BOOL fadeAudio(float * samples, size_t channels, size_t count, float * fa
} while(!commandStop && compareVal > 0 && compareMax-- > 0); } while(!commandStop && compareVal > 0 && compareMax-- > 0);
} else { } else {
[self fadeOut]; [self fadeOut];
while(fading && !faded) { [fadedBuffersLock lock];
usleep(5000); while([fadedBuffers count]) {
[fadedBuffersLock unlock];
usleep(10000);
[fadedBuffersLock lock];
} }
[fadedBuffersLock unlock];
} }
[_au stopHardware]; [_au stopHardware];
_au = nil; _au = nil;
@ -912,11 +981,24 @@ static BOOL fadeAudio(float * samples, size_t channels, size_t count, float * fa
fading = YES; fading = YES;
} }
- (void)fadeOutBackground {
cutOffInput = YES;
[outputLock lock];
[fadedBuffersLock lock];
FadedBuffer *buffer = [[FadedBuffer alloc] initWithBuffer:outputBuffer fadeTarget:0.0 sampleRate:deviceFormat.mSampleRate];
outputBuffer = [[ChunkList alloc] initWithMaximumDuration:0.5];
[fadedBuffers addObject:buffer];
[fadedBuffersLock unlock];
[outputLock unlock];
}
- (void)fadeIn { - (void)fadeIn {
fadeTarget = 1.0; fadeLevel = 0.0f;
fadeTarget = 1.0f;
fadeStep = ((fadeTarget - fadeLevel) / deviceFormat.mSampleRate) * (1000.0f / 125.0f); fadeStep = ((fadeTarget - fadeLevel) / deviceFormat.mSampleRate) * (1000.0f / 125.0f);
fading = YES; fading = YES;
faded = NO; faded = NO;
cutOffInput = NO;
} }
@end @end