Compare commits
4 commits
5aec42d518
...
0494ef981c
Author | SHA1 | Date | |
---|---|---|---|
|
0494ef981c | ||
|
aeb312121f | ||
|
e5e4b4791c | ||
|
bb84d75d73 |
3 changed files with 36 additions and 74 deletions
|
@ -13,8 +13,10 @@
|
||||||
|
|
||||||
#import "DSPRubberbandNode.h"
|
#import "DSPRubberbandNode.h"
|
||||||
#import "DSPFSurroundNode.h"
|
#import "DSPFSurroundNode.h"
|
||||||
|
#import "DSPHRTFNode.h"
|
||||||
#import "DSPEqualizerNode.h"
|
#import "DSPEqualizerNode.h"
|
||||||
#import "VisualizationNode.h"
|
#import "VisualizationNode.h"
|
||||||
|
#import "DSPDownmixNode.h"
|
||||||
|
|
||||||
#import "Logging.h"
|
#import "Logging.h"
|
||||||
|
|
||||||
|
@ -25,7 +27,9 @@
|
||||||
|
|
||||||
DSPRubberbandNode *rubberbandNode;
|
DSPRubberbandNode *rubberbandNode;
|
||||||
DSPFSurroundNode *fsurroundNode;
|
DSPFSurroundNode *fsurroundNode;
|
||||||
|
DSPHRTFNode *hrtfNode;
|
||||||
DSPEqualizerNode *equalizerNode;
|
DSPEqualizerNode *equalizerNode;
|
||||||
|
DSPDownmixNode *downmixNode;
|
||||||
VisualizationNode *visualizationNode;
|
VisualizationNode *visualizationNode;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,9 +61,13 @@
|
||||||
if(!fsurroundNode) return NO;
|
if(!fsurroundNode) return NO;
|
||||||
equalizerNode = [[DSPEqualizerNode alloc] initWithController:self previous:fsurroundNode latency:0.03];
|
equalizerNode = [[DSPEqualizerNode alloc] initWithController:self previous:fsurroundNode latency:0.03];
|
||||||
if(!equalizerNode) return NO;
|
if(!equalizerNode) return NO;
|
||||||
|
hrtfNode = [[DSPHRTFNode alloc] initWithController:self previous:equalizerNode latency:0.03];
|
||||||
|
if(!hrtfNode) return NO;
|
||||||
|
downmixNode = [[DSPDownmixNode alloc] initWithController:self previous:hrtfNode latency:0.03];
|
||||||
|
if(!downmixNode) return NO;
|
||||||
|
|
||||||
// Approximately double the chunk size for Vis at 44100Hz
|
// Approximately double the chunk size for Vis at 44100Hz
|
||||||
visualizationNode = [[VisualizationNode alloc] initWithController:self previous:equalizerNode latency:8192.0 / 44100.0];
|
visualizationNode = [[VisualizationNode alloc] initWithController:self previous:downmixNode latency:8192.0 / 44100.0];
|
||||||
if(!visualizationNode) return NO;
|
if(!visualizationNode) return NO;
|
||||||
|
|
||||||
[self setPreviousNode:visualizationNode];
|
[self setPreviousNode:visualizationNode];
|
||||||
|
@ -163,7 +171,7 @@
|
||||||
|
|
||||||
- (NSArray *)DSPs {
|
- (NSArray *)DSPs {
|
||||||
if(DSPsLaunched) {
|
if(DSPsLaunched) {
|
||||||
return @[rubberbandNode, fsurroundNode, equalizerNode, visualizationNode];
|
return @[rubberbandNode, fsurroundNode, equalizerNode, hrtfNode, downmixNode, visualizationNode];
|
||||||
} else {
|
} else {
|
||||||
return @[];
|
return @[];
|
||||||
}
|
}
|
||||||
|
@ -280,11 +288,7 @@
|
||||||
formatChanged = YES;
|
formatChanged = YES;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
DSPDownmixNode *downmixNode = nil;
|
if(downmixNode && output && !formatChanged) {
|
||||||
if(output) {
|
|
||||||
downmixNode = [output downmix];
|
|
||||||
}
|
|
||||||
if(downmixNode && !formatChanged) {
|
|
||||||
outputFormat = [output deviceFormat];
|
outputFormat = [output deviceFormat];
|
||||||
outputChannelConfig = [output deviceChannelConfig];
|
outputChannelConfig = [output deviceChannelConfig];
|
||||||
AudioStreamBasicDescription currentOutputFormat = [downmixNode nodeFormat];
|
AudioStreamBasicDescription currentOutputFormat = [downmixNode nodeFormat];
|
||||||
|
@ -299,7 +303,7 @@
|
||||||
if(converter) {
|
if(converter) {
|
||||||
[converter setOutputFormat:format];
|
[converter setOutputFormat:format];
|
||||||
}
|
}
|
||||||
if(downmixNode) {
|
if(downmixNode && output) {
|
||||||
[downmixNode setOutputFormat:[output deviceFormat] withChannelConfig:[output deviceChannelConfig]];
|
[downmixNode setOutputFormat:[output deviceFormat] withChannelConfig:[output deviceChannelConfig]];
|
||||||
}
|
}
|
||||||
if(inputNode) {
|
if(inputNode) {
|
||||||
|
@ -323,6 +327,8 @@
|
||||||
}
|
}
|
||||||
previousNode = nil;
|
previousNode = nil;
|
||||||
visualizationNode = nil;
|
visualizationNode = nil;
|
||||||
|
downmixNode = nil;
|
||||||
|
hrtfNode = nil;
|
||||||
fsurroundNode = nil;
|
fsurroundNode = nil;
|
||||||
rubberbandNode = nil;
|
rubberbandNode = nil;
|
||||||
previousInput = nil;
|
previousInput = nil;
|
||||||
|
@ -387,7 +393,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
- (id)downmix {
|
- (id)downmix {
|
||||||
return [output downmix];
|
return downmixNode;
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -25,11 +25,7 @@ using std::atomic_long;
|
||||||
#import <simd/simd.h>
|
#import <simd/simd.h>
|
||||||
|
|
||||||
#import <CogAudio/ChunkList.h>
|
#import <CogAudio/ChunkList.h>
|
||||||
|
#import <CogAudio/HeadphoneFilter.h>
|
||||||
#import <CogAudio/Node.h>
|
|
||||||
|
|
||||||
#import <CogAudio/DSPDownmixNode.h>
|
|
||||||
#import <CogAudio/DSPHRTFNode.h>
|
|
||||||
|
|
||||||
//#define OUTPUT_LOG
|
//#define OUTPUT_LOG
|
||||||
|
|
||||||
|
@ -37,9 +33,12 @@ using std::atomic_long;
|
||||||
|
|
||||||
@class AudioChunk;
|
@class AudioChunk;
|
||||||
|
|
||||||
@interface OutputCoreAudio : Node {
|
@interface OutputCoreAudio : NSObject {
|
||||||
OutputNode *outputController;
|
OutputNode *outputController;
|
||||||
|
|
||||||
|
dispatch_semaphore_t writeSemaphore;
|
||||||
|
dispatch_semaphore_t readSemaphore;
|
||||||
|
|
||||||
NSLock *outputLock;
|
NSLock *outputLock;
|
||||||
|
|
||||||
double streamTimestamp;
|
double streamTimestamp;
|
||||||
|
@ -97,9 +96,7 @@ using std::atomic_long;
|
||||||
|
|
||||||
BOOL shouldPlayOutBuffer;
|
BOOL shouldPlayOutBuffer;
|
||||||
|
|
||||||
BOOL DSPsLaunched;
|
ChunkList *outputBuffer;
|
||||||
DSPHRTFNode *hrtfNode;
|
|
||||||
DSPDownmixNode *downmixNode;
|
|
||||||
|
|
||||||
#ifdef OUTPUT_LOG
|
#ifdef OUTPUT_LOG
|
||||||
NSFileHandle *_logFile;
|
NSFileHandle *_logFile;
|
||||||
|
@ -132,6 +129,4 @@ using std::atomic_long;
|
||||||
- (AudioStreamBasicDescription)deviceFormat;
|
- (AudioStreamBasicDescription)deviceFormat;
|
||||||
- (uint32_t)deviceChannelConfig;
|
- (uint32_t)deviceChannelConfig;
|
||||||
|
|
||||||
- (DSPDownmixNode *)downmix;
|
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -133,10 +133,6 @@ static void *kOutputCoreAudioContext = &kOutputCoreAudioContext;
|
||||||
- (id)initWithController:(OutputNode *)c {
|
- (id)initWithController:(OutputNode *)c {
|
||||||
self = [super init];
|
self = [super init];
|
||||||
if(self) {
|
if(self) {
|
||||||
buffer = [[ChunkList alloc] initWithMaximumDuration:0.5];
|
|
||||||
writeSemaphore = [[Semaphore alloc] init];
|
|
||||||
readSemaphore = [[Semaphore alloc] init];
|
|
||||||
|
|
||||||
outputController = c;
|
outputController = c;
|
||||||
volume = 1.0;
|
volume = 1.0;
|
||||||
outputDeviceID = -1;
|
outputDeviceID = -1;
|
||||||
|
@ -212,26 +208,6 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
return NO;
|
return NO;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (NSArray *)DSPs {
|
|
||||||
if(DSPsLaunched) {
|
|
||||||
return @[hrtfNode, downmixNode];
|
|
||||||
} else {
|
|
||||||
return @[];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (DSPDownmixNode *)downmix {
|
|
||||||
return downmixNode;
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)launchDSPs {
|
|
||||||
NSArray *DSPs = [self DSPs];
|
|
||||||
|
|
||||||
for (Node *node in DSPs) {
|
|
||||||
[node launchThread];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
- (void)threadEntry:(id)arg {
|
- (void)threadEntry:(id)arg {
|
||||||
@autoreleasepool {
|
@autoreleasepool {
|
||||||
NSThread *currentThread = [NSThread currentThread];
|
NSThread *currentThread = [NSThread currentThread];
|
||||||
|
@ -260,15 +236,14 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
[outputLock lock];
|
[outputLock lock];
|
||||||
started = NO;
|
started = NO;
|
||||||
restarted = NO;
|
restarted = NO;
|
||||||
[buffer reset];
|
[outputBuffer reset];
|
||||||
[self setShouldReset:YES];
|
|
||||||
[outputLock unlock];
|
[outputLock unlock];
|
||||||
}
|
}
|
||||||
|
|
||||||
if(stopping)
|
if(stopping)
|
||||||
break;
|
break;
|
||||||
|
|
||||||
if(!cutOffInput && ![buffer isFull]) {
|
if(!cutOffInput && ![outputBuffer isFull]) {
|
||||||
[self renderAndConvert];
|
[self renderAndConvert];
|
||||||
rendered = YES;
|
rendered = YES;
|
||||||
} else {
|
} else {
|
||||||
|
@ -581,8 +556,7 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
[outputController setFormat:&deviceFormat channelConfig:deviceChannelConfig];
|
[outputController setFormat:&deviceFormat channelConfig:deviceChannelConfig];
|
||||||
|
|
||||||
[outputLock lock];
|
[outputLock lock];
|
||||||
[buffer reset];
|
[outputBuffer reset];
|
||||||
[self setShouldReset:YES];
|
|
||||||
[outputLock unlock];
|
[outputLock unlock];
|
||||||
|
|
||||||
if(started) {
|
if(started) {
|
||||||
|
@ -665,9 +639,8 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
size_t frameCount = 0;
|
size_t frameCount = 0;
|
||||||
if(chunk && (frameCount = [chunk frameCount])) {
|
if(chunk && (frameCount = [chunk frameCount])) {
|
||||||
[outputLock lock];
|
[outputLock lock];
|
||||||
[buffer addChunk:chunk];
|
[outputBuffer addChunk:chunk];
|
||||||
[outputLock unlock];
|
[outputLock unlock];
|
||||||
[readSemaphore signal];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if(streamFormatChanged) {
|
if(streamFormatChanged) {
|
||||||
|
@ -718,8 +691,8 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
while(renderedSamples < frameCount) {
|
while(renderedSamples < frameCount) {
|
||||||
[refLock lock];
|
[refLock lock];
|
||||||
AudioChunk *chunk = nil;
|
AudioChunk *chunk = nil;
|
||||||
if(![_self->downmixNode.buffer isEmpty]) {
|
if(_self->outputBuffer && ![_self->outputBuffer isEmpty]) {
|
||||||
chunk = [self->downmixNode.buffer removeSamples:frameCount - renderedSamples];
|
chunk = [_self->outputBuffer removeSamples:frameCount - renderedSamples];
|
||||||
}
|
}
|
||||||
[refLock unlock];
|
[refLock unlock];
|
||||||
|
|
||||||
|
@ -854,19 +827,15 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
|
|
||||||
visController = [VisualizationController sharedController];
|
visController = [VisualizationController sharedController];
|
||||||
|
|
||||||
hrtfNode = [[DSPHRTFNode alloc] initWithController:self previous:self latency:0.03];
|
|
||||||
downmixNode = [[DSPDownmixNode alloc] initWithController:self previous:hrtfNode latency:0.03];
|
|
||||||
|
|
||||||
[self setShouldContinue:YES];
|
|
||||||
[self setEndOfStream:NO];
|
|
||||||
|
|
||||||
DSPsLaunched = YES;
|
|
||||||
[self launchDSPs];
|
|
||||||
|
|
||||||
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.outputDevice" options:0 context:kOutputCoreAudioContext];
|
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.outputDevice" options:0 context:kOutputCoreAudioContext];
|
||||||
|
|
||||||
observersapplied = YES;
|
observersapplied = YES;
|
||||||
|
|
||||||
|
outputBuffer = [[ChunkList alloc] initWithMaximumDuration:0.5];
|
||||||
|
if(!outputBuffer) {
|
||||||
|
return NO;
|
||||||
|
}
|
||||||
|
|
||||||
return (err == nil);
|
return (err == nil);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -888,7 +857,7 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
}
|
}
|
||||||
|
|
||||||
- (double)latency {
|
- (double)latency {
|
||||||
return [buffer listDuration] + [[hrtfNode buffer] listDuration] + [[downmixNode buffer] listDuration];
|
return [outputBuffer listDuration];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)start {
|
- (void)start {
|
||||||
|
@ -963,14 +932,6 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
usleep(5000);
|
usleep(5000);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if(DSPsLaunched) {
|
|
||||||
[self setShouldContinue:NO];
|
|
||||||
[hrtfNode setShouldContinue:NO];
|
|
||||||
[downmixNode setShouldContinue:NO];
|
|
||||||
hrtfNode = nil;
|
|
||||||
downmixNode = nil;
|
|
||||||
DSPsLaunched = NO;
|
|
||||||
}
|
|
||||||
#ifdef OUTPUT_LOG
|
#ifdef OUTPUT_LOG
|
||||||
if(_logFile) {
|
if(_logFile) {
|
||||||
[_logFile closeFile];
|
[_logFile closeFile];
|
||||||
|
@ -1034,9 +995,9 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
||||||
cutOffInput = YES;
|
cutOffInput = YES;
|
||||||
[outputLock lock];
|
[outputLock lock];
|
||||||
[fadedBuffersLock lock];
|
[fadedBuffersLock lock];
|
||||||
FadedBuffer *fbuffer = [[FadedBuffer alloc] initWithBuffer:buffer fadeTarget:0.0 sampleRate:deviceFormat.mSampleRate];
|
FadedBuffer *buffer = [[FadedBuffer alloc] initWithBuffer:outputBuffer fadeTarget:0.0 sampleRate:deviceFormat.mSampleRate];
|
||||||
buffer = [[ChunkList alloc] initWithMaximumDuration:0.5];
|
outputBuffer = [[ChunkList alloc] initWithMaximumDuration:0.5];
|
||||||
[fadedBuffers addObject:fbuffer];
|
[fadedBuffers addObject:buffer];
|
||||||
[fadedBuffersLock unlock];
|
[fadedBuffersLock unlock];
|
||||||
[outputLock unlock];
|
[outputLock unlock];
|
||||||
cutOffInput = NO;
|
cutOffInput = NO;
|
||||||
|
|
Loading…
Reference in a new issue