Sound Output: Move DSPs, restructure output buffer

Move the DSPs to the output node, so they don't get closed and reopened
across each file. Also restructure the output handler to buffer a little
on its own, to account for track switch activity.

Signed-off-by: Christopher Snowhill <kode54@gmail.com>
This commit is contained in:
Christopher Snowhill 2025-03-05 20:05:33 -08:00
parent fcb2639d01
commit 0b1482b3c6
12 changed files with 248 additions and 444 deletions

View file

@ -10,23 +10,11 @@
#import <CogAudio/AudioPlayer.h>
#import <CogAudio/ConverterNode.h>
#import <CogAudio/DSPRubberbandNode.h>
#import <CogAudio/DSPFSurroundNode.h>
#import <CogAudio/DSPHRTFNode.h>
#import <CogAudio/DSPEqualizerNode.h>
#import <CogAudio/VisualizationNode.h>
#import <CogAudio/DSPDownmixNode.h>
#import <CogAudio/InputNode.h>
@interface BufferChain : NSObject {
InputNode *inputNode;
ConverterNode *converterNode;
DSPRubberbandNode *rubberbandNode;
DSPFSurroundNode *fsurroundNode;
DSPHRTFNode *hrtfNode;
DSPEqualizerNode *equalizerNode;
DSPDownmixNode *downmixNode;
VisualizationNode *visualizationNode;
NSURL *streamURL;
id userInfo;
@ -83,18 +71,6 @@
- (AudioStreamBasicDescription)inputFormat;
- (uint32_t)inputConfig;
- (DSPRubberbandNode *)rubberband;
- (DSPFSurroundNode *)fsurround;
- (DSPHRTFNode *)hrtf;
- (DSPEqualizerNode *)equalizer;
- (DSPDownmixNode *)downmix;
- (VisualizationNode *)visualization;
- (double)secondsBuffered;
- (void)sustainHDCD;
@ -105,6 +81,4 @@
- (void)setError:(BOOL)status;
- (double)getPostVisLatency;
@end

View file

@ -9,6 +9,7 @@
#import "BufferChain.h"
#import "AudioSource.h"
#import "CoreAudioUtils.h"
#import "DSPDownmixNode.h"
#import "OutputNode.h"
#import "AudioPlayer.h"
@ -27,14 +28,6 @@
inputNode = nil;
converterNode = nil;
rubberbandNode = nil;
fsurroundNode = nil;
equalizerNode = nil;
hrtfNode = nil;
downmixNode = nil;
visualizationNode = nil;
}
return self;
@ -45,12 +38,6 @@
finalNode = nil;
// Tear them down in reverse
visualizationNode = nil;
downmixNode = nil;
hrtfNode = nil;
equalizerNode = nil;
fsurroundNode = nil;
rubberbandNode = nil;
converterNode = nil;
inputNode = nil;
@ -58,22 +45,8 @@
if(!inputNode) return NO;
converterNode = [[ConverterNode alloc] initWithController:self previous:inputNode];
if(!converterNode) return NO;
rubberbandNode = [[DSPRubberbandNode alloc] initWithController:self previous:converterNode latency:0.1];
if(!rubberbandNode) return NO;
fsurroundNode = [[DSPFSurroundNode alloc] initWithController:self previous:rubberbandNode latency:0.03];
if(!fsurroundNode) return NO;
equalizerNode = [[DSPEqualizerNode alloc] initWithController:self previous:fsurroundNode latency:0.03];
if(!equalizerNode) return NO;
hrtfNode = [[DSPHRTFNode alloc] initWithController:self previous:equalizerNode latency:0.03];
if(!hrtfNode) return NO;
downmixNode = [[DSPDownmixNode alloc] initWithController:self previous:hrtfNode latency:0.03];
if(!downmixNode) return NO;
// Approximately double the chunk size for Vis at 44100Hz
visualizationNode = [[VisualizationNode alloc] initWithController:self previous:downmixNode latency:8192.0 / 44100.0];
if(!visualizationNode) return NO;
finalNode = visualizationNode;
finalNode = converterNode;
return YES;
}
@ -178,6 +151,7 @@
- (void)initDownmixer {
AudioPlayer * audioPlayer = controller;
OutputNode *outputNode = [audioPlayer output];
DSPDownmixNode *downmixNode = [outputNode downmix];
[downmixNode setOutputFormat:[outputNode deviceFormat] withChannelConfig:[outputNode deviceChannelConfig]];
}
@ -186,12 +160,6 @@
[inputNode launchThread];
[converterNode launchThread];
[rubberbandNode launchThread];
[fsurroundNode launchThread];
[equalizerNode launchThread];
[hrtfNode launchThread];
[downmixNode launchThread];
[visualizationNode launchThread];
}
- (void)setUserInfo:(id)i {
@ -218,9 +186,6 @@
if(![inputNode threadExited])
[[inputNode exitAtTheEndOfTheStream] wait]; // wait for decoder to be closed (see InputNode's -(void)process )
// Must do this here, or else the VisualizationContainer will carry a reference forever
[visualizationNode pop];
DLog(@"Bufferchain dealloc");
}
@ -262,12 +227,6 @@
- (void)setShouldContinue:(BOOL)s {
[inputNode setShouldContinue:s];
[converterNode setShouldContinue:s];
[rubberbandNode setShouldContinue:s];
[fsurroundNode setShouldContinue:s];
[equalizerNode setShouldContinue:s];
[hrtfNode setShouldContinue:s];
[downmixNode setShouldContinue:s];
[visualizationNode setShouldContinue:s];
}
- (BOOL)isRunning {
@ -286,30 +245,6 @@
return converterNode;
}
- (DSPRubberbandNode *)rubberband {
return rubberbandNode;
}
- (DSPFSurroundNode *)fsurround {
return fsurroundNode;
}
- (DSPHRTFNode *)hrtf {
return hrtfNode;
}
- (DSPEqualizerNode *)equalizer {
return equalizerNode;
}
- (DSPDownmixNode *)downmix {
return downmixNode;
}
- (VisualizationNode *)visualization {
return visualizationNode;
}
- (AudioStreamBasicDescription)inputFormat {
return [inputNode nodeFormat];
}
@ -346,17 +281,4 @@
[controller setError:status];
}
- (double)getPostVisLatency {
double latency = 0.0;
Node *node = finalNode;
while(node) {
latency += [node secondsBuffered];
if(node == visualizationNode) {
break;
}
node = [node previousNode];
}
return latency;
}
@end

View file

@ -37,6 +37,7 @@
- (void)dealloc {
DLog(@"Downmix dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[super cleanUp];
}
@ -102,7 +103,7 @@
- (void)process {
while([self shouldContinue] == YES) {
if(paused) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@ -111,7 +112,9 @@
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
break;
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
@ -123,7 +126,6 @@
}
}
}
endOfStream = YES;
}
- (AudioChunk *)convert {

View file

@ -14,7 +14,7 @@
#import "DSPEqualizerNode.h"
#import "BufferChain.h"
#import "OutputNode.h"
#import "Logging.h"
@ -24,81 +24,6 @@ extern void scale_by_volume(float *buffer, size_t count, float volume);
static void * kDSPEqualizerNodeContext = &kDSPEqualizerNodeContext;
@interface EQObject : NSObject {
AudioUnit eq;
}
@property AudioUnit eq;
@end
@implementation EQObject
@synthesize eq;
@end
@interface EQHookContainer : NSObject {
NSMutableArray *equalizers;
}
+ (EQHookContainer *)sharedContainer;
- (id)init;
- (void)pushEqualizer:(AudioUnit)eq forPlayer:(AudioPlayer *)audioPlayer;
- (void)popEqualizer:(AudioUnit)eq forPlayer:(AudioPlayer *)audioPlayer;
@end
@implementation EQHookContainer
static EQHookContainer *theContainer = nil;
+ (EQHookContainer *)sharedContainer {
@synchronized(theContainer) {
if(!theContainer) {
theContainer = [[EQHookContainer alloc] init];
}
return theContainer;
}
}
- (id)init {
self = [super init];
if(self) {
equalizers = [[NSMutableArray alloc] init];
}
return self;
}
- (void)pushEqualizer:(AudioUnit)eq forPlayer:(AudioPlayer *)audioPlayer {
@synchronized (equalizers) {
EQObject *_eq = [[EQObject alloc] init];
_eq.eq = eq;
[equalizers addObject:_eq];
if([equalizers count] == 1) {
[audioPlayer beginEqualizer:eq];
} else {
[audioPlayer refreshEqualizer:eq];
}
}
}
- (void)popEqualizer:(AudioUnit)eq forPlayer:(AudioPlayer *)audioPlayer {
@synchronized (equalizers) {
for(EQObject *_eq in equalizers) {
if(_eq.eq == eq) {
[equalizers removeObject:_eq];
break;
}
}
[audioPlayer endEqualizer:eq];
if([equalizers count]) {
EQObject *_eq = [equalizers objectAtIndex:0];
[audioPlayer beginEqualizer:_eq.eq];
}
}
}
@end
@implementation DSPEqualizerNode {
BOOL enableEqualizer;
BOOL equalizerInitialized;
@ -167,8 +92,8 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
float preamp = [defaults floatForKey:@"eqPreamp"];
equalizerPreamp = pow(10.0, preamp / 20.0);
BufferChain *bufferChain = c;
audioPlayer = [bufferChain controller];
OutputNode *outputNode = c;
audioPlayer = [outputNode controller];
[self addObservers];
}
@ -177,6 +102,7 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
- (void)dealloc {
DLog(@"Equalizer dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[self removeObservers];
[super cleanUp];
@ -293,7 +219,7 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
equalizerInitialized = YES;
[[EQHookContainer sharedContainer] pushEqualizer:_eq forPlayer:[self audioPlayer]];
[[self audioPlayer] beginEqualizer:_eq];
}
return YES;
@ -302,7 +228,7 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
- (void)fullShutdown {
if(_eq) {
if(equalizerInitialized) {
[[EQHookContainer sharedContainer] popEqualizer:_eq forPlayer:[self audioPlayer]];
[[self audioPlayer] endEqualizer:_eq];
AudioUnitUninitialize(_eq);
equalizerInitialized = NO;
}
@ -342,7 +268,7 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
- (void)process {
while([self shouldContinue] == YES) {
if(paused) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@ -351,7 +277,9 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
break;
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
@ -366,7 +294,6 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
}
}
}
endOfStream = YES;
}
- (AudioChunk *)convert {

View file

@ -53,6 +53,7 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
- (void)dealloc {
DLog(@"FreeSurround dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[self removeObservers];
[super cleanUp];
@ -138,7 +139,7 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
- (void)process {
while([self shouldContinue] == YES) {
if(paused) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@ -147,7 +148,9 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
break;
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
@ -162,7 +165,6 @@ static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
}
}
}
endOfStream = YES;
}
- (AudioChunk *)convert {

View file

@ -136,6 +136,7 @@ static void unregisterMotionListener(void) {
- (void)dealloc {
DLog(@"HRTF dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[self removeObservers];
[super cleanUp];
@ -278,7 +279,7 @@ static void unregisterMotionListener(void) {
- (void)process {
while([self shouldContinue] == YES) {
if(paused) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@ -287,7 +288,9 @@ static void unregisterMotionListener(void) {
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
break;
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
@ -302,7 +305,6 @@ static void unregisterMotionListener(void) {
}
}
}
endOfStream = YES;
}
- (AudioChunk *)convert {

View file

@ -71,6 +71,7 @@ static void * kDSPRubberbandNodeContext = &kDSPRubberbandNodeContext;
- (void)dealloc {
DLog(@"Rubber Band dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[self removeObservers];
[super cleanUp];
@ -351,9 +352,26 @@ static void * kDSPRubberbandNodeContext = &kDSPRubberbandNodeContext;
return paused;
}
- (void)setPreviousNode:(id)p {
if(previousNode != p) {
paused = YES;
while(processEntered);
previousNode = p;
paused = NO;
}
}
- (void)setEndOfStream:(BOOL)e {
if(endOfStream && !e) {
[self fullShutdown];
}
[super setEndOfStream:e];
flushed = e;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@ -362,7 +380,9 @@ static void * kDSPRubberbandNodeContext = &kDSPRubberbandNodeContext;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if(flushed) {
break;
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
@ -381,7 +401,6 @@ static void * kDSPRubberbandNodeContext = &kDSPRubberbandNodeContext;
}
}
}
endOfStream = YES;
}
- (AudioChunk *)convert {
@ -390,7 +409,7 @@ static void * kDSPRubberbandNodeContext = &kDSPRubberbandNodeContext;
processEntered = YES;
if(stopping || flushed || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
if(stopping || flushed || !previousNode || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}

View file

@ -49,8 +49,6 @@
- (void)close;
- (void)seek:(double)time;
- (double)latency;
- (AudioChunk *)readChunk:(size_t)amount;
- (void)setFormat:(AudioStreamBasicDescription *)f channelConfig:(uint32_t)channelConfig;
@ -76,7 +74,12 @@
- (void)restartPlaybackAtCurrentPosition;
- (double)latency;
- (double)getVisLatency;
- (double)getTotalLatency;
- (double)getPostVisLatency;
- (id)controller;
- (id)downmix;
@end

View file

@ -11,9 +11,27 @@
#import "BufferChain.h"
#import "OutputCoreAudio.h"
#import "DSPRubberbandNode.h"
#import "DSPFSurroundNode.h"
#import "DSPHRTFNode.h"
#import "DSPEqualizerNode.h"
#import "VisualizationNode.h"
#import "DSPDownmixNode.h"
#import "Logging.h"
@implementation OutputNode
@implementation OutputNode {
BOOL DSPsLaunched;
Node *previousInput;
DSPRubberbandNode *rubberbandNode;
DSPFSurroundNode *fsurroundNode;
DSPHRTFNode *hrtfNode;
DSPEqualizerNode *equalizerNode;
DSPDownmixNode *downmixNode;
VisualizationNode *visualizationNode;
}
- (void)setup {
[self setupWithInterval:NO];
@ -32,6 +50,31 @@
output = [[OutputCoreAudio alloc] initWithController:self];
[output setup];
if(!DSPsLaunched) {
rubberbandNode = [[DSPRubberbandNode alloc] initWithController:self previous:nil latency:0.1];
if(!rubberbandNode) return;
fsurroundNode = [[DSPFSurroundNode alloc] initWithController:self previous:rubberbandNode latency:0.03];
if(!fsurroundNode) return;
equalizerNode = [[DSPEqualizerNode alloc] initWithController:self previous:fsurroundNode latency:0.03];
if(!equalizerNode) return;
hrtfNode = [[DSPHRTFNode alloc] initWithController:self previous:equalizerNode latency:0.03];
if(!hrtfNode) return;
downmixNode = [[DSPDownmixNode alloc] initWithController:self previous:hrtfNode latency:0.03];
if(!downmixNode) return;
// Approximately double the chunk size for Vis at 44100Hz
visualizationNode = [[VisualizationNode alloc] initWithController:self previous:downmixNode latency:8192.0 / 44100.0];
if(!visualizationNode) return;
[self setPreviousNode:visualizationNode];
DSPsLaunched = YES;
[self launchDSPs];
previousInput = nil;
}
}
- (void)seek:(double)time {
@ -84,7 +127,13 @@
}
- (BOOL)selectNextBuffer {
return [controller selectNextBuffer];
BOOL ret = [controller selectNextBuffer];
if(!ret) {
Node *finalNode = [[controller bufferChain] finalNode];
[rubberbandNode setPreviousNode:finalNode];
[self reconnectInput];
}
return ret;
}
- (void)endOfInputPlayed {
@ -104,10 +153,34 @@
return [buffer listDuration];
}
- (NSArray *)DSPs {
if(DSPsLaunched) {
return @[rubberbandNode, fsurroundNode, equalizerNode, hrtfNode, downmixNode, visualizationNode];
} else {
return @[];
}
}
- (void)reconnectInput {
NSArray *DSPs = [self DSPs];
for (Node *node in DSPs) {
[node setEndOfStream:NO];
}
}
- (void)launchDSPs {
NSArray *DSPs = [self DSPs];
for (Node *node in DSPs) {
[node launchThread];
}
}
- (AudioChunk *)readChunk:(size_t)amount {
@autoreleasepool {
Node *finalNode = [[controller bufferChain] finalNode];
[self setPreviousNode:finalNode];
[rubberbandNode setPreviousNode:finalNode];
if(finalNode) {
AudioChunk *ret = [super readChunk:amount];
@ -125,7 +198,8 @@
- (BOOL)peekFormat:(nonnull AudioStreamBasicDescription *)format channelConfig:(nonnull uint32_t *)config {
@autoreleasepool {
[self setPreviousNode:[[controller bufferChain] finalNode]];
Node *finalNode = [[controller bufferChain] finalNode];
[rubberbandNode setPreviousNode:finalNode];
BOOL ret = [super peekFormat:format channelConfig:config];
if(!ret && [previousNode endOfStream]) {
@ -170,7 +244,6 @@
BufferChain *bufferChain = [audioPlayer bufferChain];
if(bufferChain) {
ConverterNode *converter = [bufferChain converter];
DSPDownmixNode *downmix = [bufferChain downmix];
AudioStreamBasicDescription outputFormat;
uint32_t outputChannelConfig;
BOOL formatChanged = NO;
@ -180,11 +253,11 @@
formatChanged = YES;
}
}
if(downmix && output && !formatChanged) {
if(downmixNode && output && !formatChanged) {
outputFormat = [output deviceFormat];
outputChannelConfig = [output deviceChannelConfig];
AudioStreamBasicDescription currentOutputFormat = [downmix nodeFormat];
uint32_t currentOutputChannelConfig = [downmix nodeChannelConfig];
AudioStreamBasicDescription currentOutputFormat = [downmixNode nodeFormat];
uint32_t currentOutputChannelConfig = [downmixNode nodeChannelConfig];
if(memcmp(&currentOutputFormat, &outputFormat, sizeof(currentOutputFormat)) != 0 ||
currentOutputChannelConfig != outputChannelConfig) {
formatChanged = YES;
@ -195,8 +268,8 @@
if(converter) {
[converter setOutputFormat:format];
}
if(downmix && output) {
[downmix setOutputFormat:[output deviceFormat] withChannelConfig:[output deviceChannelConfig]];
if(downmixNode && output) {
[downmixNode setOutputFormat:[output deviceFormat] withChannelConfig:[output deviceChannelConfig]];
}
if(inputNode) {
AudioStreamBasicDescription inputFormat = [inputNode nodeFormat];
@ -209,6 +282,20 @@
- (void)close {
[output stop];
output = nil;
if(DSPsLaunched) {
NSArray *DSPs = [self DSPs];
for(Node *node in DSPs) {
[node setShouldContinue:NO];
}
previousNode = nil;
visualizationNode = nil;
downmixNode = nil;
hrtfNode = nil;
fsurroundNode = nil;
rubberbandNode = nil;
previousInput = nil;
DSPsLaunched = NO;
}
}
- (double)volume {
@ -222,6 +309,10 @@
- (void)setShouldContinue:(BOOL)s {
[super setShouldContinue:s];
NSArray *DSPs = [self DSPs];
for(Node *node in DSPs) {
[node setShouldContinue:s];
}
// if (s == NO)
// [output stop];
}
@ -243,15 +334,28 @@
}
- (double)latency {
return [output latency];
double latency = 0.0;
NSArray *DSPs = [self DSPs];
for(Node *node in DSPs) {
latency += [node secondsBuffered];
}
return [output latency] + latency;
}
- (double)getVisLatency {
return [output latency] + [visualizationNode secondsBuffered];
}
- (double)getTotalLatency {
return [[controller bufferChain] secondsBuffered] + [output latency];
return [[controller bufferChain] secondsBuffered] + [self latency];
}
- (double)getPostVisLatency {
return [[controller bufferChain] getPostVisLatency] + [output latency];
- (id)controller {
return controller;
}
- (id)downmix {
return downmixNode;
}
@end

View file

@ -23,59 +23,6 @@
#import "VisualizationNode.h"
@interface VisualizationCollection : NSObject {
NSMutableArray *collection;
}
+ (VisualizationCollection *)sharedCollection;
- (id)init;
- (BOOL)pushVisualizer:(VisualizationNode *)visualization;
- (void)popVisualizer:(VisualizationNode *)visualization;
@end
@implementation VisualizationCollection
static VisualizationCollection *theCollection = nil;
+ (VisualizationCollection *)sharedCollection {
@synchronized (theCollection) {
if(!theCollection) {
theCollection = [[VisualizationCollection alloc] init];
}
return theCollection;
}
}
- (id)init {
self = [super init];
if(self) {
collection = [[NSMutableArray alloc] init];
}
return self;
}
- (BOOL)pushVisualizer:(VisualizationNode *)visualization {
@synchronized (collection) {
[collection addObject:visualization];
return [collection count] > 1;
}
}
- (void)popVisualizer:(VisualizationNode *)visualization {
@synchronized (collection) {
[collection removeObject:visualization];
if([collection count]) {
VisualizationNode *next = [collection objectAtIndex:0];
[next replayPreroll];
}
}
}
@end
@implementation VisualizationNode {
void *rs;
double lastVisRate;
@ -83,7 +30,6 @@ static VisualizationCollection *theCollection = nil;
BOOL processEntered;
BOOL stopping;
BOOL paused;
BOOL replay;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription visFormat; // Mono format for vis
@ -100,10 +46,6 @@ static VisualizationCollection *theCollection = nil;
float visAudio[512];
float resamplerInput[8192];
float visTemp[8192];
BOOL registered;
BOOL prerolling;
NSMutableData *prerollBuffer;
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
@ -129,11 +71,6 @@ static VisualizationCollection *theCollection = nil;
visController = [VisualizationController sharedController];
registered = NO;
prerolling = NO;
replay = NO;
prerollBuffer = [[NSMutableData alloc] init];
inWrite = NO;
inPeek = NO;
inRead = NO;
@ -147,18 +84,11 @@ static VisualizationCollection *theCollection = nil;
- (void)dealloc {
DLog(@"Visualization node dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[self pop];
[super cleanUp];
}
- (void)pop {
if(registered) {
[[VisualizationCollection sharedCollection] popVisualizer:self];
registered = NO;
}
}
// Visualization thread should be fairly high priority, too
- (void)threadEntry:(id _Nullable)arg {
@autoreleasepool {
@ -228,25 +158,18 @@ static VisualizationCollection *theCollection = nil;
- (void)process {
while([self shouldContinue] == YES) {
if(paused) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@autoreleasepool {
if(replay) {
size_t length = [prerollBuffer length];
if(length) {
[visController postVisPCM:(const float *)[prerollBuffer bytes] amount:(int)(length / sizeof(float))];
[prerollBuffer replaceBytesInRange:NSMakeRange(0, length) withBytes:NULL length:0];
}
replay = NO;
prerolling = NO;
}
AudioChunk *chunk = nil;
chunk = [self readAndMergeChunksAsFloat32:512];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
break;
usleep(500);
endOfStream = YES;
continue;
}
} else {
[self processVis:[chunk copy]];
@ -259,24 +182,7 @@ static VisualizationCollection *theCollection = nil;
}
- (void)postVisPCM:(const float *)visTemp amount:(size_t)samples {
if(!registered) {
prerolling = [[VisualizationCollection sharedCollection] pushVisualizer:self];
registered = YES;
}
if(prerolling) {
[prerollBuffer appendBytes:visTemp length:(samples * sizeof(float))];
} else {
[visController postVisPCM:visTemp amount:samples];
}
}
- (void)replayPreroll {
paused = YES;
while(processEntered) {
usleep(500);
}
replay = YES;
paused = NO;
[visController postVisPCM:visTemp amount:(int)samples];
}
- (void)processVis:(AudioChunk *)chunk {

View file

@ -24,6 +24,7 @@ using std::atomic_long;
#import <simd/simd.h>
#import <CogAudio/ChunkList.h>
#import <CogAudio/HeadphoneFilter.h>
//#define OUTPUT_LOG
@ -87,17 +88,11 @@ using std::atomic_long;
size_t _bufferSize;
int inputRemain;
AudioChunk *chunkRemain;
BOOL resetStreamFormat;
BOOL shouldPlayOutBuffer;
float *samplePtr;
float tempBuffer[512 * 32];
float inputBuffer[4096 * 32]; // 4096 samples times maximum supported channel count
ChunkList *outputBuffer;
#ifdef OUTPUT_LOG
FILE *_logFile;

View file

@ -29,22 +29,18 @@ static NSString *CogPlaybackDidBeginNotificiation = @"CogPlaybackDidBeginNotific
static void *kOutputCoreAudioContext = &kOutputCoreAudioContext;
- (int)renderInput:(int)amountToRead toBuffer:(float *)buffer {
- (AudioChunk *)renderInput:(int)amountToRead {
int amountRead = 0;
if(stopping == YES || [outputController shouldContinue] == NO) {
// Chain is dead, fill out the serial number pointer forever with silence
stopping = YES;
return 0;
return [[AudioChunk alloc] init];
}
AudioStreamBasicDescription format;
uint32_t config;
if([outputController peekFormat:&format channelConfig:&config]) {
AudioStreamBasicDescription origFormat;
uint32_t origConfig = config;
origFormat = format;
if(!streamFormatStarted || config != realStreamChannelConfig || memcmp(&realStreamFormat, &format, sizeof(format)) != 0) {
realStreamFormat = format;
realStreamChannelConfig = config;
@ -54,58 +50,10 @@ static void *kOutputCoreAudioContext = &kOutputCoreAudioContext;
}
if(streamFormatChanged) {
return 0;
return [[AudioChunk alloc] init];
}
AudioChunk *chunk;
if(!chunkRemain) {
chunk = [outputController readChunk:amountToRead];
streamTimestamp = [chunk streamTimestamp];
} else {
chunk = chunkRemain;
chunkRemain = nil;
}
int frameCount = (int)[chunk frameCount];
format = [chunk format];
config = [chunk channelConfig];
double chunkDuration = 0;
if(frameCount) {
chunkDuration = [chunk duration];
NSData *samples = [chunk removeSamples:frameCount];
#ifdef _DEBUG
[BadSampleCleaner cleanSamples:(float *)[samples bytes]
amount:frameCount * format.mChannelsPerFrame
location:@"pre downmix"];
#endif
const float *outputPtr = (const float *)[samples bytes];
cblas_scopy((int)(frameCount * realStreamFormat.mChannelsPerFrame), outputPtr, 1, &buffer[0], 1);
amountRead = frameCount;
} else {
return 0;
}
if(stopping) return 0;
float volumeScale = 1.0;
double sustained;
sustained = secondsHdcdSustained;
if(sustained > 0) {
if(sustained < amountRead) {
secondsHdcdSustained = 0;
} else {
secondsHdcdSustained -= chunkDuration;
volumeScale = 0.5;
}
}
scale_by_volume(&buffer[0], amountRead * realStreamFormat.mChannelsPerFrame, volumeScale * volume);
return amountRead;
return [outputController readChunk:amountToRead];
}
- (id)initWithController:(OutputNode *)c {
@ -193,6 +141,7 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
running = YES;
started = NO;
shouldPlayOutBuffer = NO;
BOOL rendered = NO;
while(!stopping) {
@autoreleasepool {
@ -206,13 +155,20 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
[outputLock lock];
started = NO;
restarted = NO;
inputRemain = 0;
[outputBuffer reset];
[outputLock unlock];
}
if(stopping)
break;
if(![outputBuffer isFull]) {
[self renderAndConvert];
rendered = YES;
} else {
rendered = NO;
}
if(!started && !paused) {
[self resume];
}
@ -222,7 +178,9 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
}
}
usleep(5000);
if(!rendered) {
usleep(5000);
}
}
stopped = YES;
@ -495,6 +453,10 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
return NO;
[outputController setFormat:&deviceFormat channelConfig:deviceChannelConfig];
[outputLock lock];
[outputBuffer reset];
[outputLock unlock];
}
return YES;
@ -556,55 +518,39 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
}
}
- (int)renderAndConvert {
int inputRendered = 0;
int bytesRendered = inputRendered * realStreamFormat.mBytesPerPacket;
- (void)renderAndConvert {
if(resetStreamFormat) {
[self updateStreamFormat];
if([self processEndOfStream]) {
return 0;
return;
}
}
while(inputRendered < 4096) {
int maxToRender = MIN(4096 - inputRendered, 512);
int rendered = [self renderInput:maxToRender toBuffer:&tempBuffer[0]];
if(rendered > 0) {
memcpy(((uint8_t*)&inputBuffer[0]) + bytesRendered, &tempBuffer[0], rendered * realStreamFormat.mBytesPerPacket);
}
inputRendered += rendered;
bytesRendered += rendered * realStreamFormat.mBytesPerPacket;
if(streamFormatChanged) {
streamFormatChanged = NO;
if(inputRendered) {
resetStreamFormat = YES;
break;
} else {
[self updateStreamFormat];
}
}
if([self processEndOfStream]) break;
AudioChunk *chunk = [self renderInput:512];
size_t frameCount = 0;
if(chunk) {
frameCount = [chunk frameCount];
[outputLock lock];
[outputBuffer addChunk:chunk];
[outputLock unlock];
}
int samplesRendered = inputRendered;
samplePtr = &inputBuffer[0];
#ifdef OUTPUT_LOG
if(samplesRendered) {
size_t dataByteSize = samplesRendered * sizeof(float) * deviceFormat.mChannelsPerFrame;
fwrite(samplePtr, 1, dataByteSize, _logFile);
if(streamFormatChanged) {
streamFormatChanged = NO;
if(frameCount) {
resetStreamFormat = YES;
} else {
[self updateStreamFormat];
}
}
#endif
return samplesRendered;
[self processEndOfStream];
}
- (void)audioOutputBlock {
__block AudioStreamBasicDescription *format = &deviceFormat;
__block void *refCon = (__bridge void *)self;
__block NSLock *refLock = self->outputLock;
#ifdef OUTPUT_LOG
__block FILE *logFile = _logFile;
@ -623,24 +569,24 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
@autoreleasepool {
while(renderedSamples < frameCount) {
int inputRemain = _self->inputRemain;
while(!inputRemain || !_self->samplePtr) {
inputRemain = [_self renderAndConvert];
if(_self->stopping || !_self->samplePtr) {
inputData->mBuffers[0].mDataByteSize = frameCount * format->mBytesPerPacket;
inputData->mBuffers[0].mNumberChannels = channels;
bzero(inputData->mBuffers[0].mData, inputData->mBuffers[0].mDataByteSize);
return 0;
}
}
if(inputRemain && _self->samplePtr) {
int inputTodo = MIN(inputRemain, frameCount - renderedSamples);
cblas_scopy(inputTodo * channels, _self->samplePtr, 1, ((float *)inputData->mBuffers[0].mData) + renderedSamples * channels, 1);
_self->samplePtr += inputTodo * channels;
inputRemain -= inputTodo;
[refLock lock];
AudioChunk *chunk = [_self->outputBuffer removeSamples:frameCount - renderedSamples];
[refLock unlock];
_self->streamTimestamp = [chunk streamTimestamp];
size_t _frameCount = [chunk frameCount];
if(_frameCount) {
NSData *sampleData = [chunk removeSamples:_frameCount];
float *samplePtr = (float *)[sampleData bytes];
size_t inputTodo = MIN(_frameCount, frameCount - renderedSamples);
cblas_scopy((int)(inputTodo * channels), samplePtr, 1, ((float *)inputData->mBuffers[0].mData) + renderedSamples * channels, 1);
renderedSamples += inputTodo;
}
_self->inputRemain = inputRemain;
if(_self->stopping) {
break;
}
}
inputData->mBuffers[0].mDataByteSize = renderedSamples * format->mBytesPerPacket;
@ -681,10 +627,6 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
outputDeviceID = -1;
restarted = NO;
inputRemain = 0;
chunkRemain = nil;
AudioComponentDescription desc;
NSError *err;
@ -723,16 +665,22 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.outputDevice" options:0 context:kOutputCoreAudioContext];
observersapplied = YES;
outputBuffer = [[ChunkList alloc] initWithMaximumDuration:0.5];
if(!outputBuffer) {
return NO;
}
return (err == nil);
}
}
- (void)updateLatency:(double)secondsPlayed {
double visLatency = [outputController getVisLatency];
if(secondsPlayed > 0) {
[outputController setAmountPlayed:streamTimestamp];
}
[visController postLatency:[outputController getPostVisLatency]];
[visController postLatency:visLatency];
}
- (double)volume {
@ -744,7 +692,7 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
}
- (double)latency {
return 0.0;
return [outputBuffer listDuration];
}
- (void)start {
@ -793,7 +741,7 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
}
if(_au) {
if(shouldPlayOutBuffer && !commandStop) {
int compareVal = 0;
double compareVal = 0;
double secondsLatency = [outputController getTotalLatency];
int compareMax = (((1000000 / 5000) * secondsLatency) + (10000 / 5000)); // latency plus 10ms, divide by sleep intervals
do {