Visualization: Reworked buffering system

Visualization now buffers in the audio output pipeline, and uses a
container system to delay multiple buffer chains from emitting
visualization data over top of each other. This should stabilize
display output significantly, while introducing minimal lag before
DSP configuration changes take effect.

Signed-off-by: Christopher Snowhill <kode54@gmail.com>
This commit is contained in:
Christopher Snowhill 2025-02-13 01:12:53 -08:00
parent 9701bd5421
commit 81b7dcfc0c
12 changed files with 444 additions and 147 deletions

View file

@ -14,6 +14,7 @@
#import "DSPFSurroundNode.h" #import "DSPFSurroundNode.h"
#import "DSPHRTFNode.h" #import "DSPHRTFNode.h"
#import "DSPEqualizerNode.h" #import "DSPEqualizerNode.h"
#import "VisualizationNode.h"
#import "InputNode.h" #import "InputNode.h"
@interface BufferChain : NSObject { @interface BufferChain : NSObject {
@ -23,6 +24,7 @@
DSPFSurroundNode *fsurroundNode; DSPFSurroundNode *fsurroundNode;
DSPHRTFNode *hrtfNode; DSPHRTFNode *hrtfNode;
DSPEqualizerNode *equalizerNode; DSPEqualizerNode *equalizerNode;
VisualizationNode *visualizationNode;
NSURL *streamURL; NSURL *streamURL;
id userInfo; id userInfo;
@ -87,6 +89,8 @@
- (DSPEqualizerNode *)equalizer; - (DSPEqualizerNode *)equalizer;
- (VisualizationNode *)visualization;
- (double)secondsBuffered; - (double)secondsBuffered;
- (void)sustainHDCD; - (void)sustainHDCD;
@ -97,4 +101,6 @@
- (void)setError:(BOOL)status; - (void)setError:(BOOL)status;
- (double)getPostVisLatency;
@end @end

View file

@ -30,6 +30,8 @@
fsurroundNode = nil; fsurroundNode = nil;
equalizerNode = nil; equalizerNode = nil;
hrtfNode = nil; hrtfNode = nil;
visualizationNode = nil;
} }
return self; return self;
@ -46,7 +48,10 @@
equalizerNode = [[DSPEqualizerNode alloc] initWithController:self previous:fsurroundNode latency:0.03]; equalizerNode = [[DSPEqualizerNode alloc] initWithController:self previous:fsurroundNode latency:0.03];
hrtfNode = [[DSPHRTFNode alloc] initWithController:self previous:equalizerNode latency:0.03]; hrtfNode = [[DSPHRTFNode alloc] initWithController:self previous:equalizerNode latency:0.03];
finalNode = hrtfNode; // Approximately five frames
visualizationNode = [[VisualizationNode alloc] initWithController:self previous:hrtfNode latency:5.0 / 60.0];
finalNode = visualizationNode;
} }
- (BOOL)open:(NSURL *)url withOutputFormat:(AudioStreamBasicDescription)outputFormat withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi { - (BOOL)open:(NSURL *)url withOutputFormat:(AudioStreamBasicDescription)outputFormat withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi {
@ -159,6 +164,7 @@
[fsurroundNode launchThread]; [fsurroundNode launchThread];
[equalizerNode launchThread]; [equalizerNode launchThread];
[hrtfNode launchThread]; [hrtfNode launchThread];
[visualizationNode launchThread];
} }
- (void)setUserInfo:(id)i { - (void)setUserInfo:(id)i {
@ -185,6 +191,9 @@
if(![inputNode threadExited]) if(![inputNode threadExited])
[[inputNode exitAtTheEndOfTheStream] wait]; // wait for decoder to be closed (see InputNode's -(void)process ) [[inputNode exitAtTheEndOfTheStream] wait]; // wait for decoder to be closed (see InputNode's -(void)process )
// Must do this here, or else the VisualizationContainer will carry a reference forever
[visualizationNode pop];
DLog(@"Bufferchain dealloc"); DLog(@"Bufferchain dealloc");
} }
@ -230,6 +239,7 @@
[fsurroundNode setShouldContinue:s]; [fsurroundNode setShouldContinue:s];
[equalizerNode setShouldContinue:s]; [equalizerNode setShouldContinue:s];
[hrtfNode setShouldContinue:s]; [hrtfNode setShouldContinue:s];
[visualizationNode setShouldContinue:s];
} }
- (BOOL)isRunning { - (BOOL)isRunning {
@ -264,6 +274,10 @@
return equalizerNode; return equalizerNode;
} }
- (VisualizationNode *)visualization {
return visualizationNode;
}
- (AudioStreamBasicDescription)inputFormat { - (AudioStreamBasicDescription)inputFormat {
return [inputNode nodeFormat]; return [inputNode nodeFormat];
} }
@ -300,4 +314,17 @@
[controller setError:status]; [controller setError:status];
} }
- (double)getPostVisLatency {
double latency = 0.0;
Node *node = finalNode;
while(node) {
latency += [node secondsBuffered];
if(node == visualizationNode) {
break;
}
node = [node previousNode];
}
return latency;
}
@end @end

View file

@ -17,6 +17,8 @@
- (void)threadEntry:(id _Nullable)arg; - (void)threadEntry:(id _Nullable)arg;
- (double)secondsBuffered;
@end @end
#endif /* DSPNode_h */ #endif /* DSPNode_h */

View file

@ -47,4 +47,8 @@
} }
} }
- (double)secondsBuffered {
return [buffer listDuration];
}
@end @end

View file

@ -161,6 +161,7 @@ static void *kInputNodeContext = &kInputNodeContext;
if(shouldSeek == YES) { if(shouldSeek == YES) {
BufferChain *bufferChain = [[controller controller] bufferChain]; BufferChain *bufferChain = [[controller controller] bufferChain];
ConverterNode *converter = [bufferChain converter]; ConverterNode *converter = [bufferChain converter];
VisualizationNode *visualization = [bufferChain visualization];
DSPRubberbandNode *rubberband = [bufferChain rubberband]; DSPRubberbandNode *rubberband = [bufferChain rubberband];
DSPFSurroundNode *fsurround = [bufferChain fsurround]; DSPFSurroundNode *fsurround = [bufferChain fsurround];
DSPEqualizerNode *equalizer = [bufferChain equalizer]; DSPEqualizerNode *equalizer = [bufferChain equalizer];
@ -171,6 +172,7 @@ static void *kInputNodeContext = &kInputNodeContext;
[self resetBuffer]; [self resetBuffer];
[converter resetBuffer]; [converter resetBuffer];
[converter inputFormatDidChange:[bufferChain inputFormat] inputConfig:[bufferChain inputConfig]]; [converter inputFormatDidChange:[bufferChain inputFormat] inputConfig:[bufferChain inputConfig]];
[visualization resetBuffer];
[rubberband resetBuffer]; [rubberband resetBuffer];
[fsurround resetBuffer]; [fsurround resetBuffer];
[equalizer resetBuffer]; [equalizer resetBuffer];

View file

@ -71,4 +71,7 @@
- (void)restartPlaybackAtCurrentPosition; - (void)restartPlaybackAtCurrentPosition;
- (double)getTotalLatency;
- (double)getPostVisLatency;
@end @end

View file

@ -200,4 +200,12 @@
return [output latency]; return [output latency];
} }
- (double)getTotalLatency {
return [[controller bufferChain] secondsBuffered] + [output latency];
}
- (double)getPostVisLatency {
return [[controller bufferChain] getPostVisLatency] + [output latency];
}
@end @end

View file

@ -0,0 +1,34 @@
//
// VisualizationNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/12/25.
//
#ifndef VisualizationNode_h
#define VisualizationNode_h
#import "Node.h"
@interface VisualizationNode : Node {
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (void)threadEntry:(id _Nullable)arg;
- (BOOL)setup;
- (void)cleanUp;
- (void)resetBuffer;
- (void)pop;
- (void)replayPreroll;
- (void)process;
- (double)secondsBuffered;
@end
#endif /* VisualizationNode_h */

View file

@ -0,0 +1,344 @@
//
// VisualizationNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/12/25.
//
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import <Accelerate/Accelerate.h>
#import "Downmix.h"
#import <CogAudio/CogAudio-Swift.h>
#import "BufferChain.h"
#import "Logging.h"
#import "rsstate.h"
#import "VisualizationNode.h"
@interface VisualizationCollection : NSObject {
NSMutableArray *collection;
}
+ (VisualizationCollection *)sharedCollection;
- (id)init;
- (BOOL)pushVisualizer:(VisualizationNode *)visualization;
- (void)popVisualizer:(VisualizationNode *)visualization;
@end
@implementation VisualizationCollection
static VisualizationCollection *theCollection = nil;
+ (VisualizationCollection *)sharedCollection {
@synchronized (theCollection) {
if(!theCollection) {
theCollection = [[VisualizationCollection alloc] init];
}
return theCollection;
}
}
- (id)init {
self = [super init];
if(self) {
collection = [[NSMutableArray alloc] init];
}
return self;
}
- (BOOL)pushVisualizer:(VisualizationNode *)visualization {
@synchronized (collection) {
[collection addObject:visualization];
return [collection count] > 1;
}
}
- (void)popVisualizer:(VisualizationNode *)visualization {
@synchronized (collection) {
[collection removeObject:visualization];
if([collection count]) {
VisualizationNode *next = [collection objectAtIndex:0];
[next replayPreroll];
}
}
}
@end
@implementation VisualizationNode {
void *rs;
double lastVisRate;
BOOL processEntered;
BOOL stopping;
BOOL paused;
BOOL replay;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription visFormat; // Mono format for vis
uint32_t inputChannelConfig;
uint32_t visChannelConfig;
size_t resamplerRemain;
DownmixProcessor *downmixer;
VisualizationController *visController;
float visAudio[512];
float resamplerInput[8192];
float visTemp[8192];
BOOL registered;
BOOL prerolling;
NSMutableData *prerollBuffer;
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super init];
if(self) {
buffer = [[ChunkList alloc] initWithMaximumDuration:latency];
semaphore = [[Semaphore alloc] init];
accessLock = [[NSLock alloc] init];
initialBufferFilled = NO;
controller = c;
endOfStream = NO;
shouldContinue = YES;
nodeChannelConfig = 0;
nodeLossless = NO;
durationPrebuffer = latency * 0.25;
visController = [VisualizationController sharedController];
registered = NO;
prerolling = NO;
replay = NO;
prerollBuffer = [[NSMutableData alloc] init];
[self setPreviousNode:p];
}
return self;
}
- (void)dealloc {
DLog(@"Visualization node dealloc");
[self cleanUp];
[self pop];
}
- (void)pop {
if(registered) {
[[VisualizationCollection sharedCollection] popVisualizer:self];
registered = NO;
}
}
// Visualization thread should be fairly high priority, too
- (void)threadEntry:(id _Nullable)arg {
@autoreleasepool {
NSThread *currentThread = [NSThread currentThread];
[currentThread setThreadPriority:0.75];
[currentThread setQualityOfService:NSQualityOfServiceUserInitiated];
[self process];
}
}
- (void)resetBuffer {
paused = YES;
while(processEntered) {
usleep(500);
}
[super resetBuffer];
[self fullShutdown];
paused = NO;
}
- (double)secondsBuffered {
return [buffer listDuration];
}
- (BOOL)setup {
if(fabs(inputFormat.mSampleRate - 44100.0) > 1e-6) {
rs = rsstate_new(1, inputFormat.mSampleRate, 44100.0);
if(!rs) {
return NO;
}
resamplerRemain = 0;
}
visFormat = inputFormat;
visFormat.mChannelsPerFrame = 1;
visFormat.mBytesPerFrame = sizeof(float);
visFormat.mBytesPerPacket = visFormat.mBytesPerFrame * visFormat.mFramesPerPacket;
visChannelConfig = AudioChannelFrontCenter;
downmixer = [[DownmixProcessor alloc] initWithInputFormat:inputFormat inputConfig:inputChannelConfig andOutputFormat:visFormat outputConfig:visChannelConfig];
if(!downmixer) {
return NO;
}
return YES;
}
- (void)cleanUp {
stopping = YES;
while(processEntered) {
usleep(500);
}
[self fullShutdown];
}
- (void)fullShutdown {
if(rs) {
rsstate_delete(rs);
rs = NULL;
}
downmixer = nil;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused) {
usleep(500);
continue;
}
@autoreleasepool {
if(replay) {
size_t length = [prerollBuffer length];
if(length) {
[visController postVisPCM:(const float *)[prerollBuffer bytes] amount:(length / sizeof(float))];
[prerollBuffer replaceBytesInRange:NSMakeRange(0, length) withBytes:NULL length:0];
}
replay = NO;
prerolling = NO;
}
AudioChunk *chunk = nil;
chunk = [self readAndMergeChunksAsFloat32:512];
if(!chunk || ![chunk duration]) {
if([self endOfStream] == YES) {
break;
}
} else {
[self processVis:[chunk copy]];
[self writeChunk:chunk];
chunk = nil;
}
}
}
}
- (void)postVisPCM:(const float *)visTemp amount:(size_t)samples {
if(!registered) {
prerolling = [[VisualizationCollection sharedCollection] pushVisualizer:self];
registered = YES;
}
if(prerolling) {
[prerollBuffer appendBytes:visTemp length:(samples * sizeof(float))];
} else {
[visController postVisPCM:visTemp amount:samples];
}
}
- (void)replayPreroll {
paused = YES;
while(processEntered) {
usleep(500);
}
replay = YES;
paused = NO;
}
- (void)processVis:(AudioChunk *)chunk {
processEntered = YES;
if(paused) {
processEntered = NO;
return;
}
AudioStreamBasicDescription format = [chunk format];
uint32_t channelConfig = [chunk channelConfig];
[visController postSampleRate:44100.0];
if(!rs || !downmixer ||
memcmp(&format, &inputFormat, sizeof(format)) != 0 ||
channelConfig != inputChannelConfig) {
if(rs) {
while(!stopping) {
int samplesFlushed;
samplesFlushed = (int)rsstate_flush(rs, &visTemp[0], 8192);
if(samplesFlushed > 1) {
[self postVisPCM:visTemp amount:samplesFlushed];
} else {
break;
}
}
}
[self fullShutdown];
inputFormat = format;
inputChannelConfig = channelConfig;
if(![self setup]) {
processEntered = NO;
return;
}
}
size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount];
[downmixer process:[sampleData bytes] frameCount:frameCount output:&visAudio[0]];
if(rs) {
int samplesProcessed;
size_t totalDone = 0;
size_t inDone = 0;
size_t visFrameCount = frameCount;
do {
if(stopping) {
break;
}
int visTodo = (int)MIN(visFrameCount, resamplerRemain + visFrameCount - 8192);
if(visTodo) {
cblas_scopy(visTodo, &visAudio[0], 1, &resamplerInput[resamplerRemain], 1);
}
visTodo += resamplerRemain;
resamplerRemain = 0;
samplesProcessed = (int)rsstate_resample(rs, &resamplerInput[0], visTodo, &inDone, &visTemp[0], 8192);
resamplerRemain = (int)(visTodo - inDone);
if(resamplerRemain && inDone) {
memmove(&resamplerInput[0], &resamplerInput[inDone], resamplerRemain * sizeof(float));
}
if(samplesProcessed) {
[self postVisPCM:&visTemp[0] amount:samplesProcessed];
}
totalDone += inDone;
visFrameCount -= inDone;
} while(samplesProcessed && visFrameCount);
} else {
[self postVisPCM:&visAudio[0] amount:frameCount];
}
processEntered = NO;
}
@end

View file

@ -90,6 +90,8 @@
839E56E82879450300DFB5F4 /* IHrtfData.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E42879450300DFB5F4 /* IHrtfData.h */; }; 839E56E82879450300DFB5F4 /* IHrtfData.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E42879450300DFB5F4 /* IHrtfData.h */; };
839E56EA28794F6300DFB5F4 /* HrtfTypes.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E928794F6300DFB5F4 /* HrtfTypes.h */; }; 839E56EA28794F6300DFB5F4 /* HrtfTypes.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E928794F6300DFB5F4 /* HrtfTypes.h */; };
839E56F7287974A100DFB5F4 /* SandboxBroker.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56F6287974A100DFB5F4 /* SandboxBroker.h */; }; 839E56F7287974A100DFB5F4 /* SandboxBroker.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56F6287974A100DFB5F4 /* SandboxBroker.h */; };
839E899E2D5DB9D500A13526 /* VisualizationNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E899D2D5DB9D500A13526 /* VisualizationNode.h */; };
839E89A02D5DBA1700A13526 /* VisualizationNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 839E899F2D5DBA1700A13526 /* VisualizationNode.m */; };
83A3496A2D5C3F430096D530 /* DSPRubberbandNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83A349682D5C3F430096D530 /* DSPRubberbandNode.m */; }; 83A3496A2D5C3F430096D530 /* DSPRubberbandNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83A349682D5C3F430096D530 /* DSPRubberbandNode.m */; };
83A3496B2D5C3F430096D530 /* DSPRubberbandNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A349672D5C3F430096D530 /* DSPRubberbandNode.h */; }; 83A3496B2D5C3F430096D530 /* DSPRubberbandNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A349672D5C3F430096D530 /* DSPRubberbandNode.h */; };
83A3496D2D5C40490096D530 /* DSPFSurroundNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A3496C2D5C40490096D530 /* DSPFSurroundNode.h */; }; 83A3496D2D5C40490096D530 /* DSPFSurroundNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A3496C2D5C40490096D530 /* DSPFSurroundNode.h */; };
@ -214,6 +216,8 @@
839E56E42879450300DFB5F4 /* IHrtfData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = IHrtfData.h; sourceTree = "<group>"; }; 839E56E42879450300DFB5F4 /* IHrtfData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = IHrtfData.h; sourceTree = "<group>"; };
839E56E928794F6300DFB5F4 /* HrtfTypes.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HrtfTypes.h; sourceTree = "<group>"; }; 839E56E928794F6300DFB5F4 /* HrtfTypes.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HrtfTypes.h; sourceTree = "<group>"; };
839E56F6287974A100DFB5F4 /* SandboxBroker.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = SandboxBroker.h; path = ../Utils/SandboxBroker.h; sourceTree = "<group>"; }; 839E56F6287974A100DFB5F4 /* SandboxBroker.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = SandboxBroker.h; path = ../Utils/SandboxBroker.h; sourceTree = "<group>"; };
839E899D2D5DB9D500A13526 /* VisualizationNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisualizationNode.h; sourceTree = "<group>"; };
839E899F2D5DBA1700A13526 /* VisualizationNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VisualizationNode.m; sourceTree = "<group>"; };
83A349672D5C3F430096D530 /* DSPRubberbandNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPRubberbandNode.h; sourceTree = "<group>"; }; 83A349672D5C3F430096D530 /* DSPRubberbandNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPRubberbandNode.h; sourceTree = "<group>"; };
83A349682D5C3F430096D530 /* DSPRubberbandNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPRubberbandNode.m; sourceTree = "<group>"; }; 83A349682D5C3F430096D530 /* DSPRubberbandNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPRubberbandNode.m; sourceTree = "<group>"; };
83A3496C2D5C40490096D530 /* DSPFSurroundNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPFSurroundNode.h; sourceTree = "<group>"; }; 83A3496C2D5C40490096D530 /* DSPFSurroundNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPFSurroundNode.h; sourceTree = "<group>"; };
@ -384,6 +388,8 @@
17D21C7F0B8BE4BA00D1EBDE /* OutputNode.m */, 17D21C7F0B8BE4BA00D1EBDE /* OutputNode.m */,
83FFED502D5B08BC0044CCAF /* DSPNode.h */, 83FFED502D5B08BC0044CCAF /* DSPNode.h */,
83FFED522D5B09320044CCAF /* DSPNode.m */, 83FFED522D5B09320044CCAF /* DSPNode.m */,
839E899D2D5DB9D500A13526 /* VisualizationNode.h */,
839E899F2D5DBA1700A13526 /* VisualizationNode.m */,
); );
path = Chain; path = Chain;
sourceTree = "<group>"; sourceTree = "<group>";
@ -588,6 +594,7 @@
834FD4EB27AF8F380063BC83 /* AudioChunk.h in Headers */, 834FD4EB27AF8F380063BC83 /* AudioChunk.h in Headers */,
83A3496B2D5C3F430096D530 /* DSPRubberbandNode.h in Headers */, 83A3496B2D5C3F430096D530 /* DSPRubberbandNode.h in Headers */,
17A2D3C50B8D1D37000778C4 /* AudioDecoder.h in Headers */, 17A2D3C50B8D1D37000778C4 /* AudioDecoder.h in Headers */,
839E899E2D5DB9D500A13526 /* VisualizationNode.h in Headers */,
8347C7412796C58800FA8A7D /* NSFileHandle+CreateFile.h in Headers */, 8347C7412796C58800FA8A7D /* NSFileHandle+CreateFile.h in Headers */,
83FFED512D5B08BC0044CCAF /* DSPNode.h in Headers */, 83FFED512D5B08BC0044CCAF /* DSPNode.h in Headers */,
17C940230B900909008627D6 /* AudioMetadataReader.h in Headers */, 17C940230B900909008627D6 /* AudioMetadataReader.h in Headers */,
@ -697,6 +704,7 @@
834FD4ED27AF91220063BC83 /* AudioChunk.m in Sources */, 834FD4ED27AF91220063BC83 /* AudioChunk.m in Sources */,
17D21CF40B8BE5EF00D1EBDE /* CogSemaphore.m in Sources */, 17D21CF40B8BE5EF00D1EBDE /* CogSemaphore.m in Sources */,
839B83FA286D91ED00F529EE /* VisualizationController.swift in Sources */, 839B83FA286D91ED00F529EE /* VisualizationController.swift in Sources */,
839E89A02D5DBA1700A13526 /* VisualizationNode.m in Sources */,
8347C7422796C58800FA8A7D /* NSFileHandle+CreateFile.m in Sources */, 8347C7422796C58800FA8A7D /* NSFileHandle+CreateFile.m in Sources */,
83A3496F2D5C405E0096D530 /* DSPFSurroundNode.m in Sources */, 83A3496F2D5C405E0096D530 /* DSPFSurroundNode.m in Sources */,
17D21DC80B8BE79700D1EBDE /* CoreAudioUtils.m in Sources */, 17D21DC80B8BE79700D1EBDE /* CoreAudioUtils.m in Sources */,

View file

@ -47,16 +47,10 @@ using std::atomic_long;
NSLock *outputLock; NSLock *outputLock;
double secondsLatency;
double visPushed;
double streamTimestamp; double streamTimestamp;
double lastClippedSampleRate; double lastClippedSampleRate;
void *rsvis;
double lastVisRate;
BOOL stopInvoked; BOOL stopInvoked;
BOOL stopCompleted; BOOL stopCompleted;
BOOL running; BOOL running;
@ -91,8 +85,6 @@ using std::atomic_long;
AudioStreamBasicDescription realStreamFormat; // stream format pre-hrtf AudioStreamBasicDescription realStreamFormat; // stream format pre-hrtf
AudioStreamBasicDescription streamFormat; // stream format last seen in render callback AudioStreamBasicDescription streamFormat; // stream format last seen in render callback
AudioStreamBasicDescription visFormat; // Mono format for vis
uint32_t deviceChannelConfig; uint32_t deviceChannelConfig;
uint32_t realStreamChannelConfig; uint32_t realStreamChannelConfig;
uint32_t streamChannelConfig; uint32_t streamChannelConfig;
@ -102,7 +94,6 @@ using std::atomic_long;
size_t _bufferSize; size_t _bufferSize;
DownmixProcessor *downmixer; DownmixProcessor *downmixer;
DownmixProcessor *downmixerForVis;
VisualizationController *visController; VisualizationController *visController;
@ -110,8 +101,6 @@ using std::atomic_long;
AudioChunk *chunkRemain; AudioChunk *chunkRemain;
int visResamplerRemain;
BOOL resetStreamFormat; BOOL resetStreamFormat;
BOOL shouldPlayOutBuffer; BOOL shouldPlayOutBuffer;
@ -121,10 +110,6 @@ using std::atomic_long;
float inputBuffer[4096 * 32]; // 4096 samples times maximum supported channel count float inputBuffer[4096 * 32]; // 4096 samples times maximum supported channel count
float downmixBuffer[4096 * 8]; float downmixBuffer[4096 * 8];
float visAudio[4096];
float visResamplerInput[8192];
float visTemp[8192];
#ifdef OUTPUT_LOG #ifdef OUTPUT_LOG
FILE *_logFile; FILE *_logFile;
#endif #endif

View file

@ -17,8 +17,6 @@
#import <Accelerate/Accelerate.h> #import <Accelerate/Accelerate.h>
#import "rsstate.h"
extern void scale_by_volume(float *buffer, size_t count, float volume); extern void scale_by_volume(float *buffer, size_t count, float volume);
static NSString *CogPlaybackDidBeginNotificiation = @"CogPlaybackDidBeginNotificiation"; static NSString *CogPlaybackDidBeginNotificiation = @"CogPlaybackDidBeginNotificiation";
@ -58,13 +56,6 @@ static void *kOutputCoreAudioContext = &kOutputCoreAudioContext;
realStreamChannelConfig = config; realStreamChannelConfig = config;
streamFormatStarted = YES; streamFormatStarted = YES;
streamFormatChanged = YES; streamFormatChanged = YES;
visFormat = format;
visFormat.mChannelsPerFrame = 1;
visFormat.mBytesPerFrame = visFormat.mChannelsPerFrame * (visFormat.mBitsPerChannel / 8);
visFormat.mBytesPerPacket = visFormat.mBytesPerFrame * visFormat.mFramesPerPacket;
downmixerForVis = [[DownmixProcessor alloc] initWithInputFormat:origFormat inputConfig:origConfig andOutputFormat:visFormat outputConfig:AudioConfigMono];
} }
} }
@ -98,86 +89,6 @@ static void *kOutputCoreAudioContext = &kOutputCoreAudioContext;
location:@"pre downmix"]; location:@"pre downmix"];
#endif #endif
const float *outputPtr = (const float *)[samples bytes]; const float *outputPtr = (const float *)[samples bytes];
[downmixerForVis process:outputPtr
frameCount:frameCount
output:&visAudio[0]];
[visController postSampleRate:44100.0];
[outputLock lock];
if(fabs(realStreamFormat.mSampleRate - 44100.0) > 1e-5) {
if(fabs(realStreamFormat.mSampleRate - lastVisRate) > 1e-5) {
if(rsvis) {
for(;;) {
if(stopping) {
break;
}
int samplesFlushed;
samplesFlushed = (int)rsstate_flush(rsvis, &visTemp[0], 8192);
if(samplesFlushed > 1) {
[visController postVisPCM:visTemp amount:samplesFlushed];
visPushed += (double)samplesFlushed / 44100.0;
} else {
break;
}
}
rsstate_delete(rsvis);
rsvis = NULL;
}
lastVisRate = realStreamFormat.mSampleRate;
rsvis = rsstate_new(1, lastVisRate, 44100.0);
}
if(rsvis) {
int samplesProcessed;
size_t totalDone = 0;
size_t inDone = 0;
size_t visFrameCount = frameCount;
do {
if(stopping) {
break;
}
int visTodo = (int)MIN(visFrameCount, visResamplerRemain + visFrameCount - 8192);
if(visTodo) {
cblas_scopy(visTodo, &visAudio[0], 1, &visResamplerInput[visResamplerRemain], 1);
}
visTodo += visResamplerRemain;
visResamplerRemain = 0;
samplesProcessed = (int)rsstate_resample(rsvis, &visResamplerInput[0], visTodo, &inDone, &visTemp[0], 8192);
visResamplerRemain = (int)(visTodo - inDone);
if(visResamplerRemain && inDone) {
memmove(&visResamplerInput[0], &visResamplerInput[inDone], visResamplerRemain * sizeof(float));
}
if(samplesProcessed) {
[visController postVisPCM:&visTemp[0] amount:samplesProcessed];
visPushed += (double)samplesProcessed / 44100.0;
}
totalDone += inDone;
visFrameCount -= inDone;
} while(samplesProcessed && visFrameCount);
}
} else if(rsvis) {
for(;;) {
if(stopping) {
break;
}
int samplesFlushed;
samplesFlushed = (int)rsstate_flush(rsvis, &visTemp[0], 8192);
if(samplesFlushed > 1) {
[visController postVisPCM:visTemp amount:samplesFlushed];
visPushed += (double)samplesFlushed / 44100.0;
} else {
break;
}
}
rsstate_delete(rsvis);
rsvis = NULL;
[visController postVisPCM:&visAudio[0] amount:frameCount];
visPushed += (double)frameCount / 44100.0;
} else if(!stopping) {
[visController postVisPCM:&visAudio[0] amount:frameCount];
visPushed += (double)frameCount / 44100.0;
}
[outputLock unlock];
cblas_scopy((int)(frameCount * realStreamFormat.mChannelsPerFrame), outputPtr, 1, &buffer[0], 1); cblas_scopy((int)(frameCount * realStreamFormat.mChannelsPerFrame), outputPtr, 1, &buffer[0], 1);
amountRead = frameCount; amountRead = frameCount;
@ -272,7 +183,7 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
} }
- (BOOL)processEndOfStream { - (BOOL)processEndOfStream {
if(stopping || ([outputController endOfStream] == YES && [self signalEndOfStream:secondsLatency])) { if(stopping || ([outputController endOfStream] == YES && [self signalEndOfStream:[outputController getTotalLatency]])) {
stopping = YES; stopping = YES;
return YES; return YES;
} }
@ -289,7 +200,6 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
running = YES; running = YES;
started = NO; started = NO;
shouldPlayOutBuffer = NO; shouldPlayOutBuffer = NO;
secondsLatency = 1.0;
while(!stopping) { while(!stopping) {
@autoreleasepool { @autoreleasepool {
@ -301,16 +211,9 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
if([outputController shouldReset]) { if([outputController shouldReset]) {
[outputController setShouldReset:NO]; [outputController setShouldReset:NO];
[outputLock lock]; [outputLock lock];
secondsLatency = 0.0;
visPushed = 0.0;
started = NO; started = NO;
restarted = NO; restarted = NO;
if(rsvis) {
rsstate_delete(rsvis);
rsvis = NULL;
}
lastClippedSampleRate = 0.0; lastClippedSampleRate = 0.0;
lastVisRate = 0.0;
[outputLock unlock]; [outputLock unlock];
} }
@ -556,11 +459,6 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
deviceFormat.mBytesPerFrame = deviceFormat.mChannelsPerFrame * (deviceFormat.mBitsPerChannel / 8); deviceFormat.mBytesPerFrame = deviceFormat.mChannelsPerFrame * (deviceFormat.mBitsPerChannel / 8);
deviceFormat.mBytesPerPacket = deviceFormat.mBytesPerFrame * deviceFormat.mFramesPerPacket; deviceFormat.mBytesPerPacket = deviceFormat.mBytesPerFrame * deviceFormat.mFramesPerPacket;
visFormat = deviceFormat;
visFormat.mChannelsPerFrame = 1;
visFormat.mBytesPerFrame = visFormat.mChannelsPerFrame * (visFormat.mBitsPerChannel / 8);
visFormat.mBytesPerPacket = visFormat.mBytesPerFrame * visFormat.mFramesPerPacket;
/* Set the channel layout for the audio queue */ /* Set the channel layout for the audio queue */
AudioChannelLayoutTag tag = 0; AudioChannelLayoutTag tag = 0;
switch(deviceFormat.mChannelsPerFrame) { switch(deviceFormat.mChannelsPerFrame) {
@ -791,22 +689,13 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
restarted = NO; restarted = NO;
downmixer = nil; downmixer = nil;
downmixerForVis = nil;
lastClippedSampleRate = 0.0; lastClippedSampleRate = 0.0;
rsvis = NULL;
lastVisRate = 44100.0;
inputRemain = 0; inputRemain = 0;
chunkRemain = nil; chunkRemain = nil;
visResamplerRemain = 0;
secondsLatency = 0;
visPushed = 0;
AudioComponentDescription desc; AudioComponentDescription desc;
NSError *err; NSError *err;
@ -854,14 +743,7 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
if(secondsPlayed > 0) { if(secondsPlayed > 0) {
[outputController setAmountPlayed:streamTimestamp]; [outputController setAmountPlayed:streamTimestamp];
} }
double visLatency = visPushed; [visController postLatency:[outputController getPostVisLatency]];
visPushed -= secondsPlayed;
if(visLatency < secondsPlayed || visLatency > 30.0) {
visLatency = secondsPlayed;
visPushed = secondsPlayed;
}
secondsLatency = visLatency;
[visController postLatency:visLatency];
} }
- (void)setVolume:(double)v { - (void)setVolume:(double)v {
@ -869,8 +751,7 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
} }
- (double)latency { - (double)latency {
if(secondsLatency > 0) return secondsLatency; return 0.0;
else return 0;
} }
- (void)start { - (void)start {
@ -920,10 +801,10 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
if(_au) { if(_au) {
if(shouldPlayOutBuffer && !commandStop) { if(shouldPlayOutBuffer && !commandStop) {
int compareVal = 0; int compareVal = 0;
double secondsLatency = self->secondsLatency >= 1e-5 ? self->secondsLatency : 0; double secondsLatency = [outputController getTotalLatency];
int compareMax = (((1000000 / 5000) * secondsLatency) + (10000 / 5000)); // latency plus 10ms, divide by sleep intervals int compareMax = (((1000000 / 5000) * secondsLatency) + (10000 / 5000)); // latency plus 10ms, divide by sleep intervals
do { do {
compareVal = self->secondsLatency >= 1e-5 ? self->secondsLatency : 0; compareVal = [outputController getTotalLatency];
usleep(5000); usleep(5000);
} while(!commandStop && compareVal > 0 && compareMax-- > 0); } while(!commandStop && compareVal > 0 && compareMax-- > 0);
} }
@ -936,9 +817,6 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
usleep(5000); usleep(5000);
} }
} }
if(downmixerForVis) {
downmixerForVis = nil;
}
#ifdef OUTPUT_LOG #ifdef OUTPUT_LOG
if(_logFile) { if(_logFile) {
fclose(_logFile); fclose(_logFile);
@ -950,10 +828,6 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
[visController reset]; [visController reset];
visController = nil; visController = nil;
} }
if(rsvis) {
rsstate_delete(rsvis);
rsvis = NULL;
}
stopCompleted = YES; stopCompleted = YES;
} }
} }