diff --git a/Audio/Chain/BufferChain.h b/Audio/Chain/BufferChain.h index a9ea34b42..71032bfa7 100644 --- a/Audio/Chain/BufferChain.h +++ b/Audio/Chain/BufferChain.h @@ -12,6 +12,7 @@ #import "ConverterNode.h" #import "DSPRubberbandNode.h" #import "DSPFSurroundNode.h" +#import "DSPHRTFNode.h" #import "InputNode.h" @interface BufferChain : NSObject { @@ -19,6 +20,7 @@ ConverterNode *converterNode; DSPRubberbandNode *rubberbandNode; DSPFSurroundNode *fsurroundNode; + DSPHRTFNode *hrtfNode; NSURL *streamURL; id userInfo; @@ -79,6 +81,8 @@ - (DSPFSurroundNode *)fsurround; +- (DSPHRTFNode *)hrtf; + - (double)secondsBuffered; - (void)sustainHDCD; diff --git a/Audio/Chain/BufferChain.m b/Audio/Chain/BufferChain.m index 1ba3ad8eb..886768f02 100644 --- a/Audio/Chain/BufferChain.m +++ b/Audio/Chain/BufferChain.m @@ -28,6 +28,7 @@ rubberbandNode = nil; fsurroundNode = nil; + hrtfNode = nil; } return self; @@ -41,8 +42,9 @@ converterNode = [[ConverterNode alloc] initWithController:self previous:inputNode]; rubberbandNode = [[DSPRubberbandNode alloc] initWithController:self previous:converterNode latency:0.03]; fsurroundNode = [[DSPFSurroundNode alloc] initWithController:self previous:rubberbandNode latency:0.03]; + hrtfNode = [[DSPHRTFNode alloc] initWithController:self previous:fsurroundNode latency:0.03]; - finalNode = fsurroundNode; + finalNode = hrtfNode; } - (BOOL)open:(NSURL *)url withOutputFormat:(AudioStreamBasicDescription)outputFormat withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi { @@ -153,6 +155,7 @@ [converterNode launchThread]; [rubberbandNode launchThread]; [fsurroundNode launchThread]; + [hrtfNode launchThread]; } - (void)setUserInfo:(id)i { @@ -222,6 +225,7 @@ [converterNode setShouldContinue:s]; [rubberbandNode setShouldContinue:s]; [fsurroundNode setShouldContinue:s]; + [hrtfNode setShouldContinue:s]; } - (BOOL)isRunning { @@ -248,6 +252,10 @@ return fsurroundNode; } +- (DSPHRTFNode *)hrtf { + return hrtfNode; +} + - (AudioStreamBasicDescription)inputFormat { return [inputNode nodeFormat]; } diff --git a/Audio/Chain/DSP/DSPHRTFNode.h b/Audio/Chain/DSP/DSPHRTFNode.h new file mode 100644 index 000000000..fd65aac24 --- /dev/null +++ b/Audio/Chain/DSP/DSPHRTFNode.h @@ -0,0 +1,33 @@ +// +// DSPHRTFNode.h +// CogAudio +// +// Created by Christopher Snowhill on 2/11/25. +// + +#ifndef DSPHRTFNode_h +#define DSPHRTFNode_h + +#import + +#import "DSPNode.h" + +@interface DSPHRTFNode : DSPNode { +} + +- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency; + +- (BOOL)setup; +- (void)cleanUp; + +- (void)resetBuffer; + +- (void)process; +- (AudioChunk * _Nullable)convert; + +- (void)reportMotion:(simd_float4x4)matrix; +- (void)resetReferencePosition:(NSNotification *_Nullable)notification; + +@end + +#endif /* DSPHRTFNode_h */ diff --git a/Audio/Chain/DSP/DSPHRTFNode.m b/Audio/Chain/DSP/DSPHRTFNode.m new file mode 100644 index 000000000..d46d7e9dd --- /dev/null +++ b/Audio/Chain/DSP/DSPHRTFNode.m @@ -0,0 +1,368 @@ +// +// DSPHRTFNode.m +// CogAudio Framework +// +// Created by Christopher Snowhill on 2/11/25. +// + +#import + +#import + +#import "Logging.h" + +#import "DSPHRTFNode.h" + +#import "HeadphoneFilter.h" + +static void * kDSPHRTFNodeContext = &kDSPHRTFNodeContext; + +static NSString *CogPlaybackDidResetHeadTracking = @"CogPlaybackDigResetHeadTracking"; + +static simd_float4x4 convertMatrix(CMRotationMatrix r) { + simd_float4x4 matrix = { + simd_make_float4(r.m33, -r.m31, r.m32, 0.0f), + simd_make_float4(r.m13, -r.m11, r.m12, 0.0f), + simd_make_float4(r.m23, -r.m21, r.m22, 0.0f), + simd_make_float4(0.0f, 0.0f, 0.0f, 1.0f) + }; + return matrix; +} + +static NSLock *motionManagerLock = nil; +API_AVAILABLE(macos(14.0)) static CMHeadphoneMotionManager *motionManager = nil; +static DSPHRTFNode *registeredMotionListener = nil; + +static void registerMotionListener(DSPHRTFNode *listener) { + if(@available(macOS 14, *)) { + [motionManagerLock lock]; + if([motionManager isDeviceMotionActive]) { + [motionManager stopDeviceMotionUpdates]; + } + if([motionManager isDeviceMotionAvailable]) { + registeredMotionListener = listener; + [motionManager startDeviceMotionUpdatesToQueue:[NSOperationQueue mainQueue] withHandler:^(CMDeviceMotion * _Nullable motion, NSError * _Nullable error) { + if(motion) { + [motionManagerLock lock]; + [registeredMotionListener reportMotion:convertMatrix(motion.attitude.rotationMatrix)]; + [motionManagerLock unlock]; + } + }]; + } + [motionManagerLock unlock]; + } +} + +static void unregisterMotionListener(void) { + if(@available(macOS 14, *)) { + [motionManagerLock lock]; + if([motionManager isDeviceMotionActive]) { + [motionManager stopDeviceMotionUpdates]; + } + registeredMotionListener = nil; + [motionManagerLock unlock]; + } +} + +@implementation DSPHRTFNode { + BOOL enableHrtf; + BOOL enableHeadTracking; + BOOL lastEnableHeadTracking; + + HeadphoneFilter *hrtf; + + BOOL stopping, paused; + BOOL processEntered; + BOOL resetFilter; + + BOOL observersapplied; + + AudioStreamBasicDescription lastInputFormat; + AudioStreamBasicDescription inputFormat; + AudioStreamBasicDescription outputFormat; + + uint32_t lastInputChannelConfig, inputChannelConfig; + uint32_t outputChannelConfig; + + BOOL referenceMatrixSet; + BOOL rotationMatrixUpdated; + simd_float4x4 rotationMatrix; + simd_float4x4 referenceMatrix; + + float outBuffer[4096 * 2]; +} + ++ (void)initialize { + motionManagerLock = [[NSLock alloc] init]; + + if(@available(macOS 14, *)) { + CMAuthorizationStatus status = [CMHeadphoneMotionManager authorizationStatus]; + if(status == CMAuthorizationStatusDenied) { + ALog(@"Headphone motion not authorized"); + return; + } else if(status == CMAuthorizationStatusAuthorized) { + ALog(@"Headphone motion authorized"); + } else if(status == CMAuthorizationStatusRestricted) { + ALog(@"Headphone motion restricted"); + } else if(status == CMAuthorizationStatusNotDetermined) { + ALog(@"Headphone motion status not determined; will prompt for access"); + } + + motionManager = [[CMHeadphoneMotionManager alloc] init]; + } +} + +- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency { + self = [super initWithController:c previous:p latency:latency]; + if(self) { + NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults]; + enableHrtf = [defaults boolForKey:@"enableHrtf"]; + enableHeadTracking = [defaults boolForKey:@"enableHeadTracking"]; + + rotationMatrix = matrix_identity_float4x4; + + [self addObservers]; + } + return self; +} + +- (void)dealloc { + [self cleanUp]; + [self removeObservers]; +} + +- (void)addObservers { + [[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.enableHrtf" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPHRTFNodeContext]; + [[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.enableHeadTracking" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPHRTFNodeContext]; + + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(resetReferencePosition:) name:CogPlaybackDidResetHeadTracking object:nil]; + + observersapplied = YES; +} + +- (void)removeObservers { + if(observersapplied) { + [[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.enableHrtf" context:kDSPHRTFNodeContext]; + [[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.enableHeadTracking" context:kDSPHRTFNodeContext]; + + [[NSNotificationCenter defaultCenter] removeObserver:self name:CogPlaybackDidResetHeadTracking object:nil]; + + observersapplied = NO; + } +} + +- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { + if(context != kDSPHRTFNodeContext) { + [super observeValueForKeyPath:keyPath ofObject:object change:change context:context]; + return; + } + + if([keyPath isEqualToString:@"values.enableHrtf"] || + [keyPath isEqualToString:@"values.enableHeadTracking"]) { + NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults]; + enableHrtf = [defaults boolForKey:@"enableHrtf"]; + enableHeadTracking = [defaults boolForKey:@"enableHeadTracking"]; + resetFilter = YES; + } +} + +- (BOOL)fullInit { + if(enableHrtf) { + NSURL *presetUrl = [[NSBundle mainBundle] URLForResource:@"SADIE_D02-96000" withExtension:@"mhr"]; + + rotationMatrixUpdated = NO; + + simd_float4x4 matrix; + if(!referenceMatrixSet || !enableHeadTracking) { + referenceMatrixSet = NO; + matrix = matrix_identity_float4x4; + self->referenceMatrix = matrix; + if(enableHeadTracking) { + lastEnableHeadTracking = YES; + registerMotionListener(self); + } else if(lastEnableHeadTracking) { + lastEnableHeadTracking = NO; + unregisterMotionListener(); + } + } else { + simd_float4x4 mirrorTransform = { + simd_make_float4(-1.0, 0.0, 0.0, 0.0), + simd_make_float4(0.0, 1.0, 0.0, 0.0), + simd_make_float4(0.0, 0.0, 1.0, 0.0), + simd_make_float4(0.0, 0.0, 0.0, 1.0) + }; + + matrix = simd_mul(mirrorTransform, rotationMatrix); + matrix = simd_mul(matrix, referenceMatrix); + } + + hrtf = [[HeadphoneFilter alloc] initWithImpulseFile:presetUrl forSampleRate:inputFormat.mSampleRate withInputChannels:inputFormat.mChannelsPerFrame withConfig:inputChannelConfig withMatrix:matrix]; + if(!hrtf) { + return NO; + } + + outputFormat = inputFormat; + outputFormat.mChannelsPerFrame = 2; + outputFormat.mBytesPerFrame = sizeof(float) * outputFormat.mChannelsPerFrame; + outputFormat.mBytesPerPacket = outputFormat.mBytesPerFrame * outputFormat.mFramesPerPacket; + outputChannelConfig = AudioChannelSideLeft | AudioChannelSideRight; + + resetFilter = NO; + } else { + if(lastEnableHeadTracking) { + lastEnableHeadTracking = NO; + unregisterMotionListener(); + } + referenceMatrixSet = NO; + + hrtf = nil; + } + + return YES; +} + +- (void)fullShutdown { + hrtf = nil; + if(lastEnableHeadTracking) { + lastEnableHeadTracking = NO; + unregisterMotionListener(); + } + resetFilter = NO; +} + +- (BOOL)setup { + if(stopping) + return NO; + [self fullShutdown]; + return [self fullInit]; +} + +- (void)cleanUp { + stopping = YES; + while(processEntered) { + usleep(1000); + } + [self fullShutdown]; +} + +- (void)resetBuffer { + paused = YES; + while(processEntered) { + usleep(500); + } + [super resetBuffer]; + [self fullShutdown]; + paused = NO; +} + +- (void)process { + while([self shouldContinue] == YES) { + if(paused) { + usleep(500); + continue; + } + @autoreleasepool { + AudioChunk *chunk = nil; + chunk = [self convert]; + if(!chunk) { + if([self endOfStream] == YES) { + break; + } + if(paused) { + continue; + } + } else { + [self writeChunk:chunk]; + chunk = nil; + } + if(resetFilter || (!enableHrtf && hrtf)) { + [self fullShutdown]; + } + } + } +} + +- (AudioChunk *)convert { + if(stopping) + return nil; + + processEntered = YES; + + if(stopping || [self endOfStream] == YES || [self shouldContinue] == NO) { + processEntered = NO; + return nil; + } + + if(![self peekFormat:&inputFormat channelConfig:&inputChannelConfig]) { + processEntered = NO; + return nil; + } + + if((enableHrtf && !hrtf) || + memcmp(&inputFormat, &lastInputFormat, sizeof(inputFormat)) != 0 || + inputChannelConfig != lastInputChannelConfig) { + lastInputFormat = inputFormat; + lastInputChannelConfig = inputChannelConfig; + [self fullShutdown]; + if(![self setup]) { + processEntered = NO; + return nil; + } + } + + if(!hrtf) { + processEntered = NO; + return [self readChunk:4096]; + } + + AudioChunk *chunk = [self readChunkAsFloat32:4096]; + if(!chunk) { + processEntered = NO; + return nil; + } + + if(rotationMatrixUpdated) { + rotationMatrixUpdated = NO; + simd_float4x4 mirrorTransform = { + simd_make_float4(-1.0, 0.0, 0.0, 0.0), + simd_make_float4(0.0, 1.0, 0.0, 0.0), + simd_make_float4(0.0, 0.0, 1.0, 0.0), + simd_make_float4(0.0, 0.0, 0.0, 1.0) + }; + + simd_float4x4 matrix = simd_mul(mirrorTransform, rotationMatrix); + matrix = simd_mul(matrix, referenceMatrix); + + [hrtf reloadWithMatrix:matrix]; + } + + size_t frameCount = [chunk frameCount]; + NSData *sampleData = [chunk removeSamples:frameCount]; + + [hrtf process:(const float *)[sampleData bytes] sampleCount:(int)frameCount toBuffer:&outBuffer[0]]; + + AudioChunk *outputChunk = [[AudioChunk alloc] init]; + [outputChunk setFormat:outputFormat]; + if(outputChannelConfig) { + [outputChunk setChannelConfig:outputChannelConfig]; + } + [outputChunk assignSamples:&outBuffer[0] frameCount:frameCount]; + + processEntered = NO; + return outputChunk; +} + +- (void)reportMotion:(simd_float4x4)matrix { + rotationMatrix = matrix; + if(!referenceMatrixSet) { + referenceMatrix = simd_inverse(matrix); + referenceMatrixSet = YES; + } + rotationMatrixUpdated = YES; +} + +- (void)resetReferencePosition:(NSNotification *)notification { + referenceMatrixSet = NO; +} + +@end diff --git a/Audio/Output/HeadphoneFilter.h b/Audio/Chain/DSP/HeadphoneFilter.h similarity index 100% rename from Audio/Output/HeadphoneFilter.h rename to Audio/Chain/DSP/HeadphoneFilter.h diff --git a/Audio/Output/HeadphoneFilter.mm b/Audio/Chain/DSP/HeadphoneFilter.mm similarity index 100% rename from Audio/Output/HeadphoneFilter.mm rename to Audio/Chain/DSP/HeadphoneFilter.mm diff --git a/Audio/Chain/InputNode.m b/Audio/Chain/InputNode.m index 7e936a5f4..f13341e74 100644 --- a/Audio/Chain/InputNode.m +++ b/Audio/Chain/InputNode.m @@ -163,6 +163,7 @@ static void *kInputNodeContext = &kInputNodeContext; ConverterNode *converter = [bufferChain converter]; DSPRubberbandNode *rubberband = [bufferChain rubberband]; DSPFSurroundNode *fsurround = [bufferChain fsurround]; + DSPHRTFNode *hrtf = [bufferChain hrtf]; DLog(@"SEEKING! Resetting Buffer"); // This resets the converter's buffer @@ -171,6 +172,7 @@ static void *kInputNodeContext = &kInputNodeContext; [converter inputFormatDidChange:[bufferChain inputFormat] inputConfig:[bufferChain inputConfig]]; [rubberband resetBuffer]; [fsurround resetBuffer]; + [hrtf resetBuffer]; DLog(@"Reset buffer!"); diff --git a/Audio/CogAudio.xcodeproj/project.pbxproj b/Audio/CogAudio.xcodeproj/project.pbxproj index e8fffdf80..0312ad695 100644 --- a/Audio/CogAudio.xcodeproj/project.pbxproj +++ b/Audio/CogAudio.xcodeproj/project.pbxproj @@ -89,8 +89,6 @@ 839E56E72879450300DFB5F4 /* HrtfData.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 839E56E32879450300DFB5F4 /* HrtfData.cpp */; }; 839E56E82879450300DFB5F4 /* IHrtfData.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E42879450300DFB5F4 /* IHrtfData.h */; }; 839E56EA28794F6300DFB5F4 /* HrtfTypes.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E928794F6300DFB5F4 /* HrtfTypes.h */; }; - 839E56ED2879515D00DFB5F4 /* HeadphoneFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56EB2879515D00DFB5F4 /* HeadphoneFilter.h */; }; - 839E56EE2879515D00DFB5F4 /* HeadphoneFilter.mm in Sources */ = {isa = PBXBuildFile; fileRef = 839E56EC2879515D00DFB5F4 /* HeadphoneFilter.mm */; }; 839E56F7287974A100DFB5F4 /* SandboxBroker.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56F6287974A100DFB5F4 /* SandboxBroker.h */; }; 83A3496A2D5C3F430096D530 /* DSPRubberbandNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83A349682D5C3F430096D530 /* DSPRubberbandNode.m */; }; 83A3496B2D5C3F430096D530 /* DSPRubberbandNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A349672D5C3F430096D530 /* DSPRubberbandNode.h */; }; @@ -98,7 +96,11 @@ 83A3496F2D5C405E0096D530 /* DSPFSurroundNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83A3496E2D5C405E0096D530 /* DSPFSurroundNode.m */; }; 83A349722D5C41810096D530 /* FSurroundFilter.mm in Sources */ = {isa = PBXBuildFile; fileRef = 83A349712D5C41810096D530 /* FSurroundFilter.mm */; }; 83A349732D5C41810096D530 /* FSurroundFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A349702D5C41810096D530 /* FSurroundFilter.h */; }; + 83A349752D5C50A10096D530 /* DSPHRTFNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A349742D5C50A10096D530 /* DSPHRTFNode.h */; }; + 83A349772D5C50B20096D530 /* DSPHRTFNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83A349762D5C50B20096D530 /* DSPHRTFNode.m */; }; 83B74281289E027F005AAC28 /* CogAudio-Bridging-Header.h in Headers */ = {isa = PBXBuildFile; fileRef = 83B74280289E027F005AAC28 /* CogAudio-Bridging-Header.h */; }; + 83F843202D5C6272008C123B /* HeadphoneFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F8431E2D5C6272008C123B /* HeadphoneFilter.h */; }; + 83F843212D5C6272008C123B /* HeadphoneFilter.mm in Sources */ = {isa = PBXBuildFile; fileRef = 83F8431F2D5C6272008C123B /* HeadphoneFilter.mm */; }; 83FFED512D5B08BC0044CCAF /* DSPNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83FFED502D5B08BC0044CCAF /* DSPNode.h */; }; 83FFED532D5B09320044CCAF /* DSPNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83FFED522D5B09320044CCAF /* DSPNode.m */; }; 8DC2EF570486A6940098B216 /* Cocoa.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1058C7B1FEA5585E11CA2CBB /* Cocoa.framework */; }; @@ -209,8 +211,6 @@ 839E56E32879450300DFB5F4 /* HrtfData.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = HrtfData.cpp; sourceTree = ""; }; 839E56E42879450300DFB5F4 /* IHrtfData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = IHrtfData.h; sourceTree = ""; }; 839E56E928794F6300DFB5F4 /* HrtfTypes.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HrtfTypes.h; sourceTree = ""; }; - 839E56EB2879515D00DFB5F4 /* HeadphoneFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HeadphoneFilter.h; sourceTree = ""; }; - 839E56EC2879515D00DFB5F4 /* HeadphoneFilter.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = HeadphoneFilter.mm; sourceTree = ""; }; 839E56F6287974A100DFB5F4 /* SandboxBroker.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = SandboxBroker.h; path = ../Utils/SandboxBroker.h; sourceTree = ""; }; 83A349672D5C3F430096D530 /* DSPRubberbandNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPRubberbandNode.h; sourceTree = ""; }; 83A349682D5C3F430096D530 /* DSPRubberbandNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPRubberbandNode.m; sourceTree = ""; }; @@ -218,7 +218,11 @@ 83A3496E2D5C405E0096D530 /* DSPFSurroundNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPFSurroundNode.m; sourceTree = ""; }; 83A349702D5C41810096D530 /* FSurroundFilter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FSurroundFilter.h; sourceTree = ""; }; 83A349712D5C41810096D530 /* FSurroundFilter.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FSurroundFilter.mm; sourceTree = ""; }; + 83A349742D5C50A10096D530 /* DSPHRTFNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPHRTFNode.h; sourceTree = ""; }; + 83A349762D5C50B20096D530 /* DSPHRTFNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPHRTFNode.m; sourceTree = ""; }; 83B74280289E027F005AAC28 /* CogAudio-Bridging-Header.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "CogAudio-Bridging-Header.h"; sourceTree = ""; }; + 83F8431E2D5C6272008C123B /* HeadphoneFilter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = HeadphoneFilter.h; sourceTree = ""; }; + 83F8431F2D5C6272008C123B /* HeadphoneFilter.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = HeadphoneFilter.mm; sourceTree = ""; }; 83FFED502D5B08BC0044CCAF /* DSPNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPNode.h; sourceTree = ""; }; 83FFED522D5B09320044CCAF /* DSPNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPNode.m; sourceTree = ""; }; 8DC2EF5A0486A6940098B216 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist; path = Info.plist; sourceTree = ""; }; @@ -385,8 +389,6 @@ children = ( 835DD2662ACAF1D90057E319 /* OutputCoreAudio.h */, 835DD2652ACAF1D90057E319 /* OutputCoreAudio.m */, - 839E56EB2879515D00DFB5F4 /* HeadphoneFilter.h */, - 839E56EC2879515D00DFB5F4 /* HeadphoneFilter.mm */, ); path = Output; sourceTree = ""; @@ -526,12 +528,16 @@ 83A349692D5C3F430096D530 /* DSP */ = { isa = PBXGroup; children = ( + 83F8431E2D5C6272008C123B /* HeadphoneFilter.h */, + 83F8431F2D5C6272008C123B /* HeadphoneFilter.mm */, 83A349702D5C41810096D530 /* FSurroundFilter.h */, 83A349712D5C41810096D530 /* FSurroundFilter.mm */, 83A349672D5C3F430096D530 /* DSPRubberbandNode.h */, 83A349682D5C3F430096D530 /* DSPRubberbandNode.m */, 83A3496C2D5C40490096D530 /* DSPFSurroundNode.h */, 83A3496E2D5C405E0096D530 /* DSPFSurroundNode.m */, + 83A349742D5C50A10096D530 /* DSPHRTFNode.h */, + 83A349762D5C50B20096D530 /* DSPHRTFNode.m */, ); path = DSP; sourceTree = ""; @@ -544,6 +550,7 @@ buildActionMask = 2147483647; files = ( 839E56E82879450300DFB5F4 /* IHrtfData.h in Headers */, + 83F843202D5C6272008C123B /* HeadphoneFilter.h in Headers */, 17D21CA10B8BE4BA00D1EBDE /* BufferChain.h in Headers */, 83A3496D2D5C40490096D530 /* DSPFSurroundNode.h in Headers */, 831A50142865A7FD0049CFE4 /* rsstate.hpp in Headers */, @@ -564,11 +571,11 @@ 839E56E62879450300DFB5F4 /* Endianness.h in Headers */, 17D21DC70B8BE79700D1EBDE /* CoreAudioUtils.h in Headers */, 835DD2722ACAF5AD0057E319 /* lpc.h in Headers */, - 839E56ED2879515D00DFB5F4 /* HeadphoneFilter.h in Headers */, 17D21EBD0B8BF44000D1EBDE /* AudioPlayer.h in Headers */, 831A50182865A8B30049CFE4 /* rsstate.h in Headers */, 834FD4F027AF93680063BC83 /* ChunkList.h in Headers */, 17F94DD50B8D0F7000A34E87 /* PluginController.h in Headers */, + 83A349752D5C50A10096D530 /* DSPHRTFNode.h in Headers */, 17F94DDD0B8D101100A34E87 /* Plugin.h in Headers */, 8328995727CB51B700D7F028 /* SHA256Digest.h in Headers */, 834FD4EB27AF8F380063BC83 /* AudioChunk.h in Headers */, @@ -668,9 +675,9 @@ buildActionMask = 2147483647; files = ( 17D21CA20B8BE4BA00D1EBDE /* BufferChain.m in Sources */, + 83A349772D5C50B20096D530 /* DSPHRTFNode.m in Sources */, 17D21CA60B8BE4BA00D1EBDE /* InputNode.m in Sources */, 83A3496A2D5C3F430096D530 /* DSPRubberbandNode.m in Sources */, - 839E56EE2879515D00DFB5F4 /* HeadphoneFilter.mm in Sources */, 83504166286447DA006B32CC /* Downmix.m in Sources */, 8399CF2D27B5D1D5008751F1 /* NSDictionary+Merge.m in Sources */, 834A41AB287A90AB00EB9D9B /* channelmaps.cpp in Sources */, @@ -695,6 +702,7 @@ 8328995827CB51B700D7F028 /* SHA256Digest.m in Sources */, 17C940240B900909008627D6 /* AudioMetadataReader.m in Sources */, 17B619310B909BC300BC003F /* AudioPropertiesReader.m in Sources */, + 83F843212D5C6272008C123B /* HeadphoneFilter.mm in Sources */, 17ADB13D0B97926D00257CA2 /* AudioSource.m in Sources */, 834FD4F127AF93680063BC83 /* ChunkList.m in Sources */, 83FFED532D5B09320044CCAF /* DSPNode.m in Sources */, diff --git a/Audio/Output/OutputCoreAudio.h b/Audio/Output/OutputCoreAudio.h index 67051628c..8af8f30e1 100644 --- a/Audio/Output/OutputCoreAudio.h +++ b/Audio/Output/OutputCoreAudio.h @@ -70,9 +70,6 @@ using std::atomic_long; BOOL eqEnabled; BOOL eqInitialized; - BOOL enableHeadTracking; - BOOL lastEnableHeadTracking; - BOOL streamFormatStarted; BOOL streamFormatChanged; @@ -82,7 +79,6 @@ using std::atomic_long; BOOL currentdevicelistenerapplied; BOOL devicealivelistenerapplied; BOOL observersapplied; - BOOL htlistenerapplied; BOOL outputdevicechanged; float volume; @@ -114,9 +110,6 @@ using std::atomic_long; VisualizationController *visController; - BOOL enableHrtf; - HeadphoneFilter *hrtf; - int inputBufferLastTime; int inputRemain; @@ -132,7 +125,6 @@ using std::atomic_long; float *samplePtr; float tempBuffer[512 * 32]; float inputBuffer[4096 * 32]; // 4096 samples times maximum supported channel count - float hrtfBuffer[4096 * 2]; float eqBuffer[4096 * 32]; float eqOutBuffer[4096 * 32]; float downmixBuffer[4096 * 8]; @@ -141,10 +133,6 @@ using std::atomic_long; float visResamplerInput[8192]; float visTemp[8192]; - BOOL referenceMatrixSet; - BOOL rotationMatrixUpdated; - simd_float4x4 rotationMatrix; - simd_float4x4 referenceMatrix; #ifdef OUTPUT_LOG FILE *_logFile; @@ -171,9 +159,4 @@ using std::atomic_long; - (void)sustainHDCD; -- (void)reportMotion:(simd_float4x4)matrix; -- (void)resetReferencePosition:(NSNotification *)notification; - -- (void)setTempo:(double)t; - @end diff --git a/Audio/Output/OutputCoreAudio.m b/Audio/Output/OutputCoreAudio.m index dd144fe7f..74afbea37 100644 --- a/Audio/Output/OutputCoreAudio.m +++ b/Audio/Output/OutputCoreAudio.m @@ -17,81 +17,13 @@ #import -#import - #import "rsstate.h" extern void scale_by_volume(float *buffer, size_t count, float volume); static NSString *CogPlaybackDidBeginNotificiation = @"CogPlaybackDidBeginNotificiation"; -static NSString *CogPlaybackDidResetHeadTracking = @"CogPlaybackDigResetHeadTracking"; - -simd_float4x4 convertMatrix(CMRotationMatrix r) { - simd_float4x4 matrix = { - simd_make_float4(r.m33, -r.m31, r.m32, 0.0f), - simd_make_float4(r.m13, -r.m11, r.m12, 0.0f), - simd_make_float4(r.m23, -r.m21, r.m22, 0.0f), - simd_make_float4(0.0f, 0.0f, 0.0f, 1.0f) - }; - return matrix; -} - -NSLock *motionManagerLock = nil; -API_AVAILABLE(macos(14.0)) CMHeadphoneMotionManager *motionManager = nil; -OutputCoreAudio *registeredMotionListener = nil; - @implementation OutputCoreAudio -+ (void)initialize { - motionManagerLock = [[NSLock alloc] init]; - - if(@available(macOS 14, *)) { - CMAuthorizationStatus status = [CMHeadphoneMotionManager authorizationStatus]; - if(status == CMAuthorizationStatusDenied) { - ALog(@"Headphone motion not authorized"); - return; - } else if(status == CMAuthorizationStatusAuthorized) { - ALog(@"Headphone motion authorized"); - } else if(status == CMAuthorizationStatusRestricted) { - ALog(@"Headphone motion restricted"); - } else if(status == CMAuthorizationStatusNotDetermined) { - ALog(@"Headphone motion status not determined; will prompt for access"); - } - - motionManager = [[CMHeadphoneMotionManager alloc] init]; - } -} - -void registerMotionListener(OutputCoreAudio *listener) { - if(@available(macOS 14, *)) { - [motionManagerLock lock]; - if([motionManager isDeviceMotionActive]) { - [motionManager stopDeviceMotionUpdates]; - } - if([motionManager isDeviceMotionAvailable]) { - registeredMotionListener = listener; - [motionManager startDeviceMotionUpdatesToQueue:[NSOperationQueue mainQueue] withHandler:^(CMDeviceMotion * _Nullable motion, NSError * _Nullable error) { - if(motion) { - [motionManagerLock lock]; - [registeredMotionListener reportMotion:convertMatrix(motion.attitude.rotationMatrix)]; - [motionManagerLock unlock]; - } - }]; - } - [motionManagerLock unlock]; - } -} - -void unregisterMotionListener(void) { - if(@available(macOS 14, *)) { - [motionManagerLock lock]; - if([motionManager isDeviceMotionActive]) { - [motionManager stopDeviceMotionUpdates]; - } - registeredMotionListener = nil; - [motionManagerLock unlock]; - } -} static void *kOutputCoreAudioContext = &kOutputCoreAudioContext; @@ -143,28 +75,12 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA origFormat = format; UInt32 srcChannels = format.mChannelsPerFrame; - uint32_t dmConfig = config; - uint32_t dmChannels = srcChannels; - AudioStreamBasicDescription dmFormat; - dmFormat = format; - [outputLock lock]; - if(hrtf) { - dmChannels = 2; - dmConfig = AudioChannelFrontLeft | AudioChannelFrontRight; - } - [outputLock unlock]; - if(dmChannels != srcChannels) { - dmFormat.mChannelsPerFrame = dmChannels; - dmFormat.mBytesPerFrame = ((dmFormat.mBitsPerChannel + 7) / 8) * dmChannels; - dmFormat.mBytesPerPacket = dmFormat.mBytesPerFrame * dmFormat.mFramesPerPacket; - } UInt32 dstChannels = deviceFormat.mChannelsPerFrame; - if(dmChannels != dstChannels) { + if(srcChannels != dstChannels) { format.mChannelsPerFrame = dstChannels; format.mBytesPerFrame = ((format.mBitsPerChannel + 7) / 8) * dstChannels; format.mBytesPerPacket = format.mBytesPerFrame * format.mFramesPerPacket; - downmixer = [[DownmixProcessor alloc] initWithInputFormat:dmFormat inputConfig:dmConfig andOutputFormat:format outputConfig:deviceChannelConfig]; - format = origFormat; + downmixer = [[DownmixProcessor alloc] initWithInputFormat:origFormat inputConfig:origConfig andOutputFormat:format outputConfig:deviceChannelConfig]; } else { downmixer = nil; } @@ -383,12 +299,6 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons } else if([keyPath isEqualToString:@"values.eqPreamp"]) { float preamp = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] floatForKey:@"eqPreamp"]; eqPreamp = pow(10.0, preamp / 20.0); - } else if([keyPath isEqualToString:@"values.enableHrtf"] || - [keyPath isEqualToString:@"values.enableHeadTracking"]) { - enableHrtf = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] boolForKey:@"enableHrtf"]; - enableHeadTracking = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] boolForKey:@"enableHeadTracking"]; - if(streamFormatStarted) - resetStreamFormat = YES; } else if([keyPath isEqualToString:@"values.tempo"]) { tempo = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] doubleForKey:@"tempo"]; } @@ -763,53 +673,6 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons uint32_t channels = realStreamFormat.mChannelsPerFrame; uint32_t channelConfig = realStreamChannelConfig; - if(enableHrtf) { - NSURL *presetUrl = [[NSBundle mainBundle] URLForResource:@"SADIE_D02-96000" withExtension:@"mhr"]; - - rotationMatrixUpdated = NO; - - simd_float4x4 matrix; - if(!referenceMatrixSet || !enableHeadTracking) { - referenceMatrixSet = NO; - matrix = matrix_identity_float4x4; - self->referenceMatrix = matrix; - if(enableHeadTracking) { - lastEnableHeadTracking = YES; - registerMotionListener(self); - } else if(lastEnableHeadTracking) { - lastEnableHeadTracking = NO; - unregisterMotionListener(); - } - } else { - simd_float4x4 mirrorTransform = { - simd_make_float4(-1.0, 0.0, 0.0, 0.0), - simd_make_float4(0.0, 1.0, 0.0, 0.0), - simd_make_float4(0.0, 0.0, 1.0, 0.0), - simd_make_float4(0.0, 0.0, 0.0, 1.0) - }; - - matrix = simd_mul(mirrorTransform, rotationMatrix); - matrix = simd_mul(matrix, referenceMatrix); - } - - [outputLock lock]; - hrtf = [[HeadphoneFilter alloc] initWithImpulseFile:presetUrl forSampleRate:realStreamFormat.mSampleRate withInputChannels:channels withConfig:channelConfig withMatrix:matrix]; - [outputLock unlock]; - - channels = 2; - channelConfig = AudioChannelSideLeft | AudioChannelSideRight; - } else { - if(lastEnableHeadTracking) { - lastEnableHeadTracking = NO; - unregisterMotionListener(); - } - referenceMatrixSet = NO; - - [outputLock lock]; - hrtf = nil; - [outputLock unlock]; - } - streamFormat = realStreamFormat; streamFormat.mChannelsPerFrame = channels; streamFormat.mBytesPerFrame = sizeof(float) * channels; @@ -932,27 +795,6 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons samplePtr = &inputBuffer[0]; if(samplesRendered) { - [outputLock lock]; - if(hrtf) { - if(rotationMatrixUpdated) { - rotationMatrixUpdated = NO; - simd_float4x4 mirrorTransform = { - simd_make_float4(-1.0, 0.0, 0.0, 0.0), - simd_make_float4(0.0, 1.0, 0.0, 0.0), - simd_make_float4(0.0, 0.0, 1.0, 0.0), - simd_make_float4(0.0, 0.0, 0.0, 1.0) - }; - - simd_float4x4 matrix = simd_mul(mirrorTransform, rotationMatrix); - matrix = simd_mul(matrix, referenceMatrix); - - [hrtf reloadWithMatrix:matrix]; - } - [hrtf process:samplePtr sampleCount:samplesRendered toBuffer:&hrtfBuffer[0]]; - samplePtr = &hrtfBuffer[0]; - } - [outputLock unlock]; - if(eqEnabled && eqInitialized) { const int channels = streamFormat.mChannelsPerFrame; if(channels > 0) { @@ -1091,9 +933,6 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons secondsLatency = 0; visPushed = 0; - referenceMatrixSet = NO; - rotationMatrix = matrix_identity_float4x4; - AudioComponentDescription desc; NSError *err; @@ -1177,15 +1016,10 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons [[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.outputDevice" options:0 context:kOutputCoreAudioContext]; [[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.GraphicEQenable" options:0 context:kOutputCoreAudioContext]; [[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.eqPreamp" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kOutputCoreAudioContext]; - [[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.enableHrtf" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kOutputCoreAudioContext]; - [[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.enableHeadTracking" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kOutputCoreAudioContext]; [[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.tempo" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kOutputCoreAudioContext]; observersapplied = YES; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(resetReferencePosition:) name:CogPlaybackDidResetHeadTracking object:nil]; - htlistenerapplied = YES; - bzero(&timeStamp, sizeof(timeStamp)); timeStamp.mFlags = kAudioTimeStampSampleTimeValid; @@ -1243,20 +1077,10 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons } @synchronized(self) { stopInvoked = YES; - if(hrtf && lastEnableHeadTracking) { - lastEnableHeadTracking = NO; - unregisterMotionListener(); - } - if(htlistenerapplied) { - [[NSNotificationCenter defaultCenter] removeObserver:self name:CogPlaybackDidResetHeadTracking object:nil]; - htlistenerapplied = NO; - } if(observersapplied) { [[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.outputDevice" context:kOutputCoreAudioContext]; [[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.GraphicEQenable" context:kOutputCoreAudioContext]; [[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.eqPreamp" context:kOutputCoreAudioContext]; - [[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.enableHrtf" context:kOutputCoreAudioContext]; - [[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.enableHeadTracking" context:kOutputCoreAudioContext]; [[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.tempo" context:kOutputCoreAudioContext]; observersapplied = NO; } @@ -1360,17 +1184,4 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons shouldPlayOutBuffer = s; } -- (void)reportMotion:(simd_float4x4)matrix { - rotationMatrix = matrix; - if(!referenceMatrixSet) { - referenceMatrix = simd_inverse(matrix); - referenceMatrixSet = YES; - } - rotationMatrixUpdated = YES; -} - -- (void)resetReferencePosition:(NSNotification *)notification { - referenceMatrixSet = NO; -} - @end