New HRTF function #304
19 changed files with 1438 additions and 25 deletions
|
@ -508,7 +508,7 @@ static AppController *kAppController = nil;
|
|||
}
|
||||
|
||||
- (IBAction)privacyPolicy:(id)sender {
|
||||
[[NSWorkspace sharedWorkspace] openURL:[NSURL URLWithString:@"https://www.iubenda.com/privacy-policy/59859310"]];
|
||||
[[NSWorkspace sharedWorkspace] openURL:[NSURL URLWithString:NSLocalizedString(@"PrivacyPolicyURL", @"Privacy policy URL from Iubenda.")]];
|
||||
}
|
||||
|
||||
- (IBAction)feedback:(id)sender {
|
||||
|
|
|
@ -99,6 +99,14 @@
|
|||
839366681815923C006DD712 /* CogPluginMulti.m in Sources */ = {isa = PBXBuildFile; fileRef = 839366661815923C006DD712 /* CogPluginMulti.m */; };
|
||||
8399CF2C27B5D1D5008751F1 /* NSDictionary+Merge.h in Headers */ = {isa = PBXBuildFile; fileRef = 8399CF2A27B5D1D4008751F1 /* NSDictionary+Merge.h */; };
|
||||
8399CF2D27B5D1D5008751F1 /* NSDictionary+Merge.m in Sources */ = {isa = PBXBuildFile; fileRef = 8399CF2B27B5D1D4008751F1 /* NSDictionary+Merge.m */; };
|
||||
839E56E52879450300DFB5F4 /* HrtfData.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E12879450300DFB5F4 /* HrtfData.h */; };
|
||||
839E56E62879450300DFB5F4 /* Endianness.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E22879450300DFB5F4 /* Endianness.h */; };
|
||||
839E56E72879450300DFB5F4 /* HrtfData.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 839E56E32879450300DFB5F4 /* HrtfData.cpp */; };
|
||||
839E56E82879450300DFB5F4 /* IHrtfData.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E42879450300DFB5F4 /* IHrtfData.h */; };
|
||||
839E56EA28794F6300DFB5F4 /* HrtfTypes.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E928794F6300DFB5F4 /* HrtfTypes.h */; };
|
||||
839E56ED2879515D00DFB5F4 /* HeadphoneFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56EB2879515D00DFB5F4 /* HeadphoneFilter.h */; };
|
||||
839E56EE2879515D00DFB5F4 /* HeadphoneFilter.mm in Sources */ = {isa = PBXBuildFile; fileRef = 839E56EC2879515D00DFB5F4 /* HeadphoneFilter.mm */; };
|
||||
839E56F7287974A100DFB5F4 /* SandboxBroker.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56F6287974A100DFB5F4 /* SandboxBroker.h */; };
|
||||
8DC2EF570486A6940098B216 /* Cocoa.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1058C7B1FEA5585E11CA2CBB /* Cocoa.framework */; };
|
||||
8E8D3D2F0CBAEE6E00135C1B /* AudioContainer.h in Headers */ = {isa = PBXBuildFile; fileRef = 8E8D3D2D0CBAEE6E00135C1B /* AudioContainer.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
8E8D3D300CBAEE6E00135C1B /* AudioContainer.m in Sources */ = {isa = PBXBuildFile; fileRef = 8E8D3D2E0CBAEE6E00135C1B /* AudioContainer.m */; };
|
||||
|
@ -227,6 +235,14 @@
|
|||
839366661815923C006DD712 /* CogPluginMulti.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CogPluginMulti.m; sourceTree = "<group>"; };
|
||||
8399CF2A27B5D1D4008751F1 /* NSDictionary+Merge.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "NSDictionary+Merge.h"; path = "../../Utils/NSDictionary+Merge.h"; sourceTree = "<group>"; };
|
||||
8399CF2B27B5D1D4008751F1 /* NSDictionary+Merge.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "NSDictionary+Merge.m"; path = "../../Utils/NSDictionary+Merge.m"; sourceTree = "<group>"; };
|
||||
839E56E12879450300DFB5F4 /* HrtfData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HrtfData.h; sourceTree = "<group>"; };
|
||||
839E56E22879450300DFB5F4 /* Endianness.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Endianness.h; sourceTree = "<group>"; };
|
||||
839E56E32879450300DFB5F4 /* HrtfData.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = HrtfData.cpp; sourceTree = "<group>"; };
|
||||
839E56E42879450300DFB5F4 /* IHrtfData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = IHrtfData.h; sourceTree = "<group>"; };
|
||||
839E56E928794F6300DFB5F4 /* HrtfTypes.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HrtfTypes.h; sourceTree = "<group>"; };
|
||||
839E56EB2879515D00DFB5F4 /* HeadphoneFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HeadphoneFilter.h; sourceTree = "<group>"; };
|
||||
839E56EC2879515D00DFB5F4 /* HeadphoneFilter.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = HeadphoneFilter.mm; sourceTree = "<group>"; };
|
||||
839E56F6287974A100DFB5F4 /* SandboxBroker.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = SandboxBroker.h; path = ../Utils/SandboxBroker.h; sourceTree = "<group>"; };
|
||||
8DC2EF5A0486A6940098B216 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist; path = Info.plist; sourceTree = "<group>"; };
|
||||
8DC2EF5B0486A6940098B216 /* CogAudio.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = CogAudio.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
8E8D3D2D0CBAEE6E00135C1B /* AudioContainer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioContainer.h; sourceTree = "<group>"; };
|
||||
|
@ -321,6 +337,7 @@
|
|||
17F94DD40B8D0F7000A34E87 /* PluginController.mm */,
|
||||
17D21C750B8BE4BA00D1EBDE /* Chain */,
|
||||
17D21C9B0B8BE4BA00D1EBDE /* Output */,
|
||||
839E56F6287974A100DFB5F4 /* SandboxBroker.h */,
|
||||
17D21C9E0B8BE4BA00D1EBDE /* Status.h */,
|
||||
B0575F2C0D687A0800411D77 /* Helper.h */,
|
||||
B0575F2F0D687A4000411D77 /* Helper.m */,
|
||||
|
@ -380,6 +397,8 @@
|
|||
17D21C9B0B8BE4BA00D1EBDE /* Output */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
839E56EB2879515D00DFB5F4 /* HeadphoneFilter.h */,
|
||||
839E56EC2879515D00DFB5F4 /* HeadphoneFilter.mm */,
|
||||
17D21C9C0B8BE4BA00D1EBDE /* OutputAVFoundation.h */,
|
||||
17D21C9D0B8BE4BA00D1EBDE /* OutputAVFoundation.m */,
|
||||
);
|
||||
|
@ -389,6 +408,7 @@
|
|||
17D21CD80B8BE5B400D1EBDE /* ThirdParty */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
839E56E02879450300DFB5F4 /* hrtf */,
|
||||
831A50152865A8800049CFE4 /* r8bstate.cpp */,
|
||||
831A50172865A8B30049CFE4 /* r8bstate.h */,
|
||||
831A50132865A7FD0049CFE4 /* r8bstate.hpp */,
|
||||
|
@ -523,6 +543,18 @@
|
|||
path = Visualization;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
839E56E02879450300DFB5F4 /* hrtf */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
839E56E22879450300DFB5F4 /* Endianness.h */,
|
||||
839E56E32879450300DFB5F4 /* HrtfData.cpp */,
|
||||
839E56E12879450300DFB5F4 /* HrtfData.h */,
|
||||
839E56E928794F6300DFB5F4 /* HrtfTypes.h */,
|
||||
839E56E42879450300DFB5F4 /* IHrtfData.h */,
|
||||
);
|
||||
path = hrtf;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
/* End PBXGroup section */
|
||||
|
||||
/* Begin PBXHeadersBuildPhase section */
|
||||
|
@ -530,6 +562,7 @@
|
|||
isa = PBXHeadersBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
839E56E82879450300DFB5F4 /* IHrtfData.h in Headers */,
|
||||
17D21CA10B8BE4BA00D1EBDE /* BufferChain.h in Headers */,
|
||||
831A4FE02865A7DC0049CFE4 /* pf_double.h in Headers */,
|
||||
831A50142865A7FD0049CFE4 /* r8bstate.hpp in Headers */,
|
||||
|
@ -546,10 +579,13 @@
|
|||
17D21CC50B8BE4BA00D1EBDE /* OutputAVFoundation.h in Headers */,
|
||||
83504165286447DA006B32CC /* Downmix.h in Headers */,
|
||||
831A4FDE2865A7DC0049CFE4 /* pffft_double.h in Headers */,
|
||||
839E56E52879450300DFB5F4 /* HrtfData.h in Headers */,
|
||||
831A4FE12865A7DC0049CFE4 /* pf_neon_double.h in Headers */,
|
||||
17D21CC70B8BE4BA00D1EBDE /* Status.h in Headers */,
|
||||
17D21CF30B8BE5EF00D1EBDE /* Semaphore.h in Headers */,
|
||||
839E56E62879450300DFB5F4 /* Endianness.h in Headers */,
|
||||
17D21DC70B8BE79700D1EBDE /* CoreAudioUtils.h in Headers */,
|
||||
839E56ED2879515D00DFB5F4 /* HeadphoneFilter.h in Headers */,
|
||||
17D21EBD0B8BF44000D1EBDE /* AudioPlayer.h in Headers */,
|
||||
831A50182865A8B30049CFE4 /* r8bstate.h in Headers */,
|
||||
831A4FE52865A7DC0049CFE4 /* pffft_priv_impl.h in Headers */,
|
||||
|
@ -572,6 +608,7 @@
|
|||
17C940230B900909008627D6 /* AudioMetadataReader.h in Headers */,
|
||||
831A500F2865A7DC0049CFE4 /* CDSPFracInterpolator.h in Headers */,
|
||||
831A4FE22865A7DC0049CFE4 /* pf_sse2_double.h in Headers */,
|
||||
839E56F7287974A100DFB5F4 /* SandboxBroker.h in Headers */,
|
||||
839065F32853338700636FBB /* dsd2float.h in Headers */,
|
||||
17B619300B909BC300BC003F /* AudioPropertiesReader.h in Headers */,
|
||||
831A4FDF2865A7DC0049CFE4 /* pf_neon_double_from_avx.h in Headers */,
|
||||
|
@ -587,6 +624,7 @@
|
|||
8E8D3D2F0CBAEE6E00135C1B /* AudioContainer.h in Headers */,
|
||||
B0575F2D0D687A0800411D77 /* Helper.h in Headers */,
|
||||
07DB5F3E0ED353A900C2E3EF /* AudioMetadataWriter.h in Headers */,
|
||||
839E56EA28794F6300DFB5F4 /* HrtfTypes.h in Headers */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
|
@ -663,6 +701,7 @@
|
|||
files = (
|
||||
17D21CA20B8BE4BA00D1EBDE /* BufferChain.m in Sources */,
|
||||
17D21CA60B8BE4BA00D1EBDE /* InputNode.m in Sources */,
|
||||
839E56EE2879515D00DFB5F4 /* HeadphoneFilter.mm in Sources */,
|
||||
831A50112865A7DC0049CFE4 /* r8bbase.cpp in Sources */,
|
||||
83504166286447DA006B32CC /* Downmix.m in Sources */,
|
||||
8399CF2D27B5D1D5008751F1 /* NSDictionary+Merge.m in Sources */,
|
||||
|
@ -682,6 +721,7 @@
|
|||
839366681815923C006DD712 /* CogPluginMulti.m in Sources */,
|
||||
17D21EBE0B8BF44000D1EBDE /* AudioPlayer.m in Sources */,
|
||||
17F94DD60B8D0F7000A34E87 /* PluginController.mm in Sources */,
|
||||
839E56E72879450300DFB5F4 /* HrtfData.cpp in Sources */,
|
||||
831A4FE62865A7DC0049CFE4 /* pffft_double.c in Sources */,
|
||||
17A2D3C60B8D1D37000778C4 /* AudioDecoder.m in Sources */,
|
||||
8328995827CB51B700D7F028 /* SHA256Digest.m in Sources */,
|
||||
|
@ -702,6 +742,7 @@
|
|||
1DEB91AE08733DA50010E9CD /* Debug */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
CLANG_CXX_LANGUAGE_STANDARD = "c++17";
|
||||
COMBINE_HIDPI_IMAGES = YES;
|
||||
COPY_PHASE_STRIP = NO;
|
||||
DEAD_CODE_STRIPPING = YES;
|
||||
|
@ -741,6 +782,7 @@
|
|||
1DEB91AF08733DA50010E9CD /* Release */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
CLANG_CXX_LANGUAGE_STANDARD = "c++17";
|
||||
COMBINE_HIDPI_IMAGES = YES;
|
||||
DEAD_CODE_STRIPPING = YES;
|
||||
DYLIB_COMPATIBILITY_VERSION = 1;
|
||||
|
|
47
Audio/Output/HeadphoneFilter.h
Normal file
47
Audio/Output/HeadphoneFilter.h
Normal file
|
@ -0,0 +1,47 @@
|
|||
//
|
||||
// HeadphoneFilter.h
|
||||
// CogAudio Framework
|
||||
//
|
||||
// Created by Christopher Snowhill on 1/24/22.
|
||||
//
|
||||
|
||||
#ifndef HeadphoneFilter_h
|
||||
#define HeadphoneFilter_h
|
||||
|
||||
#import <Accelerate/Accelerate.h>
|
||||
#import <Cocoa/Cocoa.h>
|
||||
|
||||
@interface HeadphoneFilter : NSObject {
|
||||
vDSP_DFT_Setup dftSetupF;
|
||||
vDSP_DFT_Setup dftSetupB;
|
||||
|
||||
size_t fftSize;
|
||||
size_t fftSizeOver2;
|
||||
size_t bufferSize;
|
||||
size_t paddedBufferSize;
|
||||
int channelCount;
|
||||
|
||||
DSPSplitComplex signal_fft;
|
||||
DSPSplitComplex input_filtered_signal_per_channel[2];
|
||||
DSPSplitComplex input_filtered_signal_totals[2];
|
||||
DSPSplitComplex *impulse_responses;
|
||||
|
||||
float **prevInputs;
|
||||
|
||||
float *left_result;
|
||||
float *right_result;
|
||||
|
||||
float *paddedSignal;
|
||||
}
|
||||
|
||||
+ (BOOL)validateImpulseFile:(NSURL *)url;
|
||||
|
||||
- (id)initWithImpulseFile:(NSURL *)url forSampleRate:(double)sampleRate withInputChannels:(int)channels withConfig:(uint32_t)config;
|
||||
|
||||
- (void)process:(const float *)inBuffer sampleCount:(size_t)count toBuffer:(float *)outBuffer;
|
||||
|
||||
- (void)reset;
|
||||
|
||||
@end
|
||||
|
||||
#endif /* HeadphoneFilter_h */
|
516
Audio/Output/HeadphoneFilter.mm
Normal file
516
Audio/Output/HeadphoneFilter.mm
Normal file
|
@ -0,0 +1,516 @@
|
|||
//
|
||||
// HeadphoneFilter.m
|
||||
// CogAudio Framework
|
||||
//
|
||||
// Created by Christopher Snowhill on 1/24/22.
|
||||
//
|
||||
|
||||
#import "HeadphoneFilter.h"
|
||||
#import "AudioChunk.h"
|
||||
#import "AudioDecoder.h"
|
||||
#import "AudioSource.h"
|
||||
|
||||
#import <stdlib.h>
|
||||
|
||||
#import <fstream>
|
||||
|
||||
#import "r8bstate.h"
|
||||
|
||||
#import "HrtfData.h"
|
||||
|
||||
#import "Logging.h"
|
||||
|
||||
typedef struct speakerPosition {
|
||||
float elevation;
|
||||
float azimuth;
|
||||
float distance;
|
||||
} speakerPosition;
|
||||
|
||||
#define DEGREES(x) ((x)*M_PI / 180.0)
|
||||
|
||||
static const speakerPosition speakerPositions[18] = {
|
||||
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-30.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(+30.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-135.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(+135.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-15.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(+15.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-180.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-90.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(+90.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(-90.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(-45.0), .azimuth = DEGREES(-30.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(-45.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(-45.0), .azimuth = DEGREES(+30.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(-45.0), .azimuth = DEGREES(-135.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(-45.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
|
||||
{ .elevation = DEGREES(-45.0), .azimuth = DEGREES(+135.0), .distance = 1.0 }
|
||||
};
|
||||
|
||||
@interface impulseCacheObject : NSObject {
|
||||
}
|
||||
@property NSURL *URL;
|
||||
@property int sampleCount;
|
||||
@property int channelCount;
|
||||
@property uint32_t channelConfig;
|
||||
@property double sampleRate;
|
||||
@property double targetSampleRate;
|
||||
@property NSData *data;
|
||||
@end
|
||||
|
||||
@implementation impulseCacheObject
|
||||
@synthesize URL;
|
||||
@synthesize sampleCount;
|
||||
@synthesize channelCount;
|
||||
@synthesize channelConfig;
|
||||
@synthesize sampleRate;
|
||||
@synthesize targetSampleRate;
|
||||
@synthesize data;
|
||||
@end
|
||||
|
||||
@interface impulseCache : NSObject {
|
||||
}
|
||||
@property NSMutableArray<impulseCacheObject *> *cacheObjects;
|
||||
+ (impulseCache *)sharedController;
|
||||
- (const float *)getImpulse:(NSURL *)url sampleCount:(int *)sampleCount channelCount:(int)channelCount channelConfig:(uint32_t)channelConfig sampleRate:(double)sampleRate;
|
||||
@end
|
||||
|
||||
// Apparently _mm_malloc is Intel-only on newer macOS targets, so use supported posix_memalign
|
||||
static void *_memalign_malloc(size_t size, size_t align) {
|
||||
void *ret = NULL;
|
||||
if(posix_memalign(&ret, align, size) != 0) {
|
||||
return NULL;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
@implementation impulseCache
|
||||
|
||||
static impulseCache *_sharedController = nil;
|
||||
|
||||
+ (impulseCache *)sharedController {
|
||||
@synchronized(self) {
|
||||
if(!_sharedController) {
|
||||
_sharedController = [[impulseCache alloc] init];
|
||||
}
|
||||
}
|
||||
return _sharedController;
|
||||
}
|
||||
|
||||
- (id)init {
|
||||
self = [super init];
|
||||
if(self) {
|
||||
self.cacheObjects = [[NSMutableArray alloc] init];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (impulseCacheObject *)addImpulse:(NSURL *)url sampleCount:(int)sampleCount channelCount:(int)channelCount channelConfig:(uint32_t)channelConfig originalSampleRate:(double)originalSampleRate targetSampleRate:(double)targetSampleRate impulseBuffer:(const float *)impulseBuffer {
|
||||
impulseCacheObject *obj = [[impulseCacheObject alloc] init];
|
||||
|
||||
obj.URL = url;
|
||||
obj.sampleCount = sampleCount;
|
||||
obj.channelCount = channelCount;
|
||||
obj.sampleRate = originalSampleRate;
|
||||
obj.targetSampleRate = targetSampleRate;
|
||||
obj.data = [NSData dataWithBytes:impulseBuffer length:(sampleCount * channelCount * sizeof(float) * 2)];
|
||||
|
||||
@synchronized(self.cacheObjects) {
|
||||
[self.cacheObjects addObject:obj];
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
- (const float *)getImpulse:(NSURL *)url sampleCount:(int *)retSampleCount channelCount:(int)channelCount channelConfig:(uint32_t)channelConfig sampleRate:(double)sampleRate {
|
||||
BOOL impulseFound = NO;
|
||||
const float *impulseData = NULL;
|
||||
double sampleRateOfSource = 0;
|
||||
int sampleCount = 0;
|
||||
impulseCacheObject *cacheObject = nil;
|
||||
|
||||
@synchronized(self.cacheObjects) {
|
||||
for(impulseCacheObject *obj in self.cacheObjects) {
|
||||
if([obj.URL isEqualTo:url] &&
|
||||
obj.targetSampleRate == sampleRate &&
|
||||
obj.channelCount == channelCount &&
|
||||
obj.channelConfig == channelConfig) {
|
||||
*retSampleCount = obj.sampleCount;
|
||||
return (const float *)[obj.data bytes];
|
||||
}
|
||||
}
|
||||
for(impulseCacheObject *obj in self.cacheObjects) {
|
||||
if([obj.URL isEqualTo:url] &&
|
||||
obj.sampleRate == obj.targetSampleRate &&
|
||||
obj.channelCount == channelCount &&
|
||||
obj.channelConfig == channelConfig) {
|
||||
impulseData = (const float *)[obj.data bytes];
|
||||
sampleCount = obj.sampleCount;
|
||||
sampleRateOfSource = obj.sampleRate;
|
||||
impulseFound = YES;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!impulseFound) {
|
||||
NSString *filePath = [url path];
|
||||
|
||||
try {
|
||||
std::ifstream file([filePath UTF8String], std::fstream::binary);
|
||||
|
||||
if(!file.is_open()) {
|
||||
throw std::logic_error("Cannot open file.");
|
||||
}
|
||||
|
||||
HrtfData data(file);
|
||||
|
||||
file.close();
|
||||
|
||||
sampleRateOfSource = data.get_sample_rate();
|
||||
|
||||
uint32_t sampleCountExact = data.get_response_length();
|
||||
sampleCount = sampleCountExact + ((data.get_longest_delay() + 2) >> 2);
|
||||
|
||||
std::vector<float> hrtfData(sampleCount * channelCount * 2, 0.0);
|
||||
|
||||
for(uint32_t i = 0; i < channelCount; ++i) {
|
||||
uint32_t channelFlag = [AudioChunk extractChannelFlag:i fromConfig:channelConfig];
|
||||
uint32_t channelNumber = [AudioChunk findChannelIndex:channelFlag];
|
||||
|
||||
if(channelNumber < 18) {
|
||||
const speakerPosition &speaker = speakerPositions[channelNumber];
|
||||
DirectionData hrtfLeft;
|
||||
DirectionData hrtfRight;
|
||||
|
||||
data.get_direction_data(speaker.elevation, speaker.azimuth, speaker.distance, hrtfLeft, hrtfRight);
|
||||
|
||||
cblas_scopy(sampleCountExact, &hrtfLeft.impulse_response[0], 1, &hrtfData[((hrtfLeft.delay + 2) >> 2) * channelCount * 2 + i * 2], channelCount * 2);
|
||||
cblas_scopy(sampleCountExact, &hrtfRight.impulse_response[0], 1, &hrtfData[((hrtfLeft.delay + 2) >> 2) * channelCount * 2 + i * 2 + 1], channelCount * 2);
|
||||
}
|
||||
}
|
||||
|
||||
cacheObject = [self addImpulse:url sampleCount:sampleCount channelCount:channelCount channelConfig:channelConfig originalSampleRate:sampleRateOfSource targetSampleRate:sampleRateOfSource impulseBuffer:&hrtfData[0]];
|
||||
|
||||
impulseData = (const float *)[cacheObject.data bytes];
|
||||
} catch(std::exception &e) {
|
||||
ALog(@"Exception caught: %s", e.what());
|
||||
return nil;
|
||||
}
|
||||
}
|
||||
|
||||
if(sampleRateOfSource != sampleRate) {
|
||||
double sampleRatio = sampleRate / sampleRateOfSource;
|
||||
int resampledCount = (int)ceil((double)sampleCount * sampleRatio);
|
||||
|
||||
void *r8bstate = r8bstate_new(channelCount * 2, 1024, sampleRateOfSource, sampleRate);
|
||||
|
||||
float *resampledImpulse = (float *)_memalign_malloc(resampledCount * sizeof(float) * channelCount * 2, 16);
|
||||
if(!resampledImpulse) {
|
||||
r8bstate_delete(r8bstate);
|
||||
return nil;
|
||||
}
|
||||
|
||||
size_t inputDone = 0;
|
||||
size_t outputDone = 0;
|
||||
|
||||
outputDone = r8bstate_resample(r8bstate, impulseData, sampleCount, &inputDone, resampledImpulse, resampledCount);
|
||||
|
||||
while(outputDone < resampledCount) {
|
||||
outputDone += r8bstate_flush(r8bstate, resampledImpulse + outputDone * channelCount * 2, resampledCount - outputDone);
|
||||
}
|
||||
|
||||
r8bstate_delete(r8bstate);
|
||||
|
||||
sampleCount = (int)outputDone;
|
||||
|
||||
// Normalize resampled impulse by sample ratio
|
||||
float fSampleRatio = (float)sampleRatio;
|
||||
vDSP_vsdiv(resampledImpulse, 1, &fSampleRatio, resampledImpulse, 1, sampleCount * channelCount * 2);
|
||||
|
||||
cacheObject = [self addImpulse:url sampleCount:sampleCount channelCount:channelCount channelConfig:channelConfig originalSampleRate:sampleRateOfSource targetSampleRate:sampleRate impulseBuffer:resampledImpulse];
|
||||
|
||||
free(resampledImpulse);
|
||||
|
||||
impulseData = (const float *)[cacheObject.data bytes];
|
||||
}
|
||||
|
||||
*retSampleCount = sampleCount;
|
||||
return impulseData;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation HeadphoneFilter
|
||||
|
||||
+ (BOOL)validateImpulseFile:(NSURL *)url {
|
||||
NSString *filePath = [url path];
|
||||
|
||||
try {
|
||||
std::ifstream file([filePath UTF8String], std::fstream::binary);
|
||||
|
||||
if(!file.is_open()) {
|
||||
throw std::logic_error("Cannot open file.");
|
||||
}
|
||||
|
||||
HrtfData data(file);
|
||||
|
||||
file.close();
|
||||
|
||||
return YES;
|
||||
} catch(std::exception &e) {
|
||||
ALog(@"Exception thrown: %s", e.what());
|
||||
return NO;
|
||||
}
|
||||
}
|
||||
|
||||
- (id)initWithImpulseFile:(NSURL *)url forSampleRate:(double)sampleRate withInputChannels:(int)channels withConfig:(uint32_t)config {
|
||||
self = [super init];
|
||||
|
||||
if(self) {
|
||||
int sampleCount = 0;
|
||||
const float *impulseBuffer = [[impulseCache sharedController] getImpulse:url sampleCount:&sampleCount channelCount:channels channelConfig:config sampleRate:sampleRate];
|
||||
if(!impulseBuffer) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
channelCount = channels;
|
||||
|
||||
bufferSize = 512;
|
||||
fftSize = sampleCount + bufferSize;
|
||||
|
||||
int pow = 1;
|
||||
while(fftSize > 2) {
|
||||
pow++;
|
||||
fftSize /= 2;
|
||||
}
|
||||
fftSize = 2 << pow;
|
||||
|
||||
float *deinterleavedImpulseBuffer = (float *)_memalign_malloc(fftSize * sizeof(float) * channelCount * 2, 16);
|
||||
if(!deinterleavedImpulseBuffer) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
for(int i = 0; i < channelCount; ++i) {
|
||||
cblas_scopy(sampleCount, impulseBuffer + i * 2, (int)channelCount * 2, deinterleavedImpulseBuffer + i * fftSize * 2, 1);
|
||||
vDSP_vclr(deinterleavedImpulseBuffer + i * fftSize * 2 + sampleCount, 1, fftSize - sampleCount);
|
||||
cblas_scopy(sampleCount, impulseBuffer + i * 2 + 1, (int)channelCount * 2, deinterleavedImpulseBuffer + i * fftSize * 2 + fftSize, 1);
|
||||
vDSP_vclr(deinterleavedImpulseBuffer + i * fftSize * 2 + fftSize + sampleCount, 1, fftSize - sampleCount);
|
||||
}
|
||||
|
||||
paddedBufferSize = fftSize;
|
||||
fftSizeOver2 = (fftSize + 1) / 2;
|
||||
const size_t fftSizeOver2Plus1 = fftSizeOver2 + 1; // DFT float overwrites plus one, double doesn't
|
||||
|
||||
dftSetupF = vDSP_DFT_zrop_CreateSetup(nil, fftSize, vDSP_DFT_FORWARD);
|
||||
dftSetupB = vDSP_DFT_zrop_CreateSetup(nil, fftSize, vDSP_DFT_INVERSE);
|
||||
if(!dftSetupF || !dftSetupB) {
|
||||
free(deinterleavedImpulseBuffer);
|
||||
return nil;
|
||||
}
|
||||
|
||||
paddedSignal = (float *)_memalign_malloc(sizeof(float) * paddedBufferSize, 16);
|
||||
if(!paddedSignal) {
|
||||
free(deinterleavedImpulseBuffer);
|
||||
return nil;
|
||||
}
|
||||
|
||||
signal_fft.realp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
signal_fft.imagp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
if(!signal_fft.realp || !signal_fft.imagp) {
|
||||
free(deinterleavedImpulseBuffer);
|
||||
return nil;
|
||||
}
|
||||
|
||||
input_filtered_signal_per_channel[0].realp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
input_filtered_signal_per_channel[0].imagp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
if(!input_filtered_signal_per_channel[0].realp ||
|
||||
!input_filtered_signal_per_channel[0].imagp) {
|
||||
free(deinterleavedImpulseBuffer);
|
||||
return nil;
|
||||
}
|
||||
|
||||
input_filtered_signal_per_channel[1].realp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
input_filtered_signal_per_channel[1].imagp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
if(!input_filtered_signal_per_channel[1].realp ||
|
||||
!input_filtered_signal_per_channel[1].imagp) {
|
||||
free(deinterleavedImpulseBuffer);
|
||||
return nil;
|
||||
}
|
||||
|
||||
input_filtered_signal_totals[0].realp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
input_filtered_signal_totals[0].imagp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
if(!input_filtered_signal_totals[0].realp ||
|
||||
!input_filtered_signal_totals[0].imagp) {
|
||||
free(deinterleavedImpulseBuffer);
|
||||
return nil;
|
||||
}
|
||||
|
||||
input_filtered_signal_totals[1].realp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
input_filtered_signal_totals[1].imagp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
if(!input_filtered_signal_totals[1].realp ||
|
||||
!input_filtered_signal_totals[1].imagp) {
|
||||
free(deinterleavedImpulseBuffer);
|
||||
return nil;
|
||||
}
|
||||
|
||||
impulse_responses = (DSPSplitComplex *)calloc(sizeof(DSPSplitComplex), channels * 2);
|
||||
if(!impulse_responses) {
|
||||
free(deinterleavedImpulseBuffer);
|
||||
return nil;
|
||||
}
|
||||
|
||||
for(int i = 0; i < channels; ++i) {
|
||||
impulse_responses[i * 2 + 0].realp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
impulse_responses[i * 2 + 0].imagp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
impulse_responses[i * 2 + 1].realp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
impulse_responses[i * 2 + 1].imagp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2Plus1, 16);
|
||||
|
||||
if(!impulse_responses[i * 2 + 0].realp || !impulse_responses[i * 2 + 0].imagp ||
|
||||
!impulse_responses[i * 2 + 1].realp || !impulse_responses[i * 2 + 1].imagp) {
|
||||
free(deinterleavedImpulseBuffer);
|
||||
return nil;
|
||||
}
|
||||
|
||||
vDSP_ctoz((DSPComplex *)(deinterleavedImpulseBuffer + i * fftSize * 2), 2, &impulse_responses[i * 2 + 0], 1, fftSizeOver2);
|
||||
vDSP_ctoz((DSPComplex *)(deinterleavedImpulseBuffer + i * fftSize * 2 + fftSize), 2, &impulse_responses[i * 2 + 1], 1, fftSizeOver2);
|
||||
|
||||
vDSP_DFT_Execute(dftSetupF, impulse_responses[i * 2 + 0].realp, impulse_responses[i * 2 + 0].imagp, impulse_responses[i * 2 + 0].realp, impulse_responses[i * 2 + 0].imagp);
|
||||
vDSP_DFT_Execute(dftSetupF, impulse_responses[i * 2 + 1].realp, impulse_responses[i * 2 + 1].imagp, impulse_responses[i * 2 + 1].realp, impulse_responses[i * 2 + 1].imagp);
|
||||
}
|
||||
|
||||
free(deinterleavedImpulseBuffer);
|
||||
|
||||
left_result = (float *)_memalign_malloc(sizeof(float) * fftSize, 16);
|
||||
right_result = (float *)_memalign_malloc(sizeof(float) * fftSize, 16);
|
||||
if(!left_result || !right_result)
|
||||
return nil;
|
||||
|
||||
prevInputs = (float **)calloc(channels, sizeof(float *));
|
||||
if(!prevInputs)
|
||||
return nil;
|
||||
for(int i = 0; i < channels; ++i) {
|
||||
prevInputs[i] = (float *)_memalign_malloc(sizeof(float) * fftSize, 16);
|
||||
if(!prevInputs[i])
|
||||
return nil;
|
||||
vDSP_vclr(prevInputs[i], 1, fftSize);
|
||||
}
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
if(dftSetupF) vDSP_DFT_DestroySetup(dftSetupF);
|
||||
if(dftSetupB) vDSP_DFT_DestroySetup(dftSetupB);
|
||||
|
||||
free(paddedSignal);
|
||||
|
||||
free(signal_fft.realp);
|
||||
free(signal_fft.imagp);
|
||||
|
||||
free(input_filtered_signal_per_channel[0].realp);
|
||||
free(input_filtered_signal_per_channel[0].imagp);
|
||||
free(input_filtered_signal_per_channel[1].realp);
|
||||
free(input_filtered_signal_per_channel[1].imagp);
|
||||
|
||||
free(input_filtered_signal_totals[0].realp);
|
||||
free(input_filtered_signal_totals[0].imagp);
|
||||
free(input_filtered_signal_totals[1].realp);
|
||||
free(input_filtered_signal_totals[1].imagp);
|
||||
|
||||
if(impulse_responses) {
|
||||
for(int i = 0; i < channelCount * 2; ++i) {
|
||||
free(impulse_responses[i].realp);
|
||||
free(impulse_responses[i].imagp);
|
||||
}
|
||||
free(impulse_responses);
|
||||
}
|
||||
|
||||
free(left_result);
|
||||
free(right_result);
|
||||
|
||||
if(prevInputs) {
|
||||
for(int i = 0; i < channelCount; ++i) {
|
||||
free(prevInputs[i]);
|
||||
}
|
||||
free(prevInputs);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)process:(const float *)inBuffer sampleCount:(int)count toBuffer:(float *)outBuffer {
|
||||
const float scale = 1.0 / (4.0 * (float)fftSize);
|
||||
|
||||
while(count > 0) {
|
||||
const int countToDo = (count > bufferSize) ? bufferSize : count;
|
||||
const int prevToDo = fftSize - countToDo;
|
||||
|
||||
vDSP_vclr(input_filtered_signal_totals[0].realp, 1, fftSizeOver2);
|
||||
vDSP_vclr(input_filtered_signal_totals[0].imagp, 1, fftSizeOver2);
|
||||
vDSP_vclr(input_filtered_signal_totals[1].realp, 1, fftSizeOver2);
|
||||
vDSP_vclr(input_filtered_signal_totals[1].imagp, 1, fftSizeOver2);
|
||||
|
||||
for(int i = 0; i < channelCount; ++i) {
|
||||
cblas_scopy((int)prevToDo, prevInputs[i] + countToDo, 1, paddedSignal, 1);
|
||||
cblas_scopy((int)countToDo, inBuffer + i, (int)channelCount, paddedSignal + prevToDo, 1);
|
||||
cblas_scopy((int)fftSize, paddedSignal, 1, prevInputs[i], 1);
|
||||
|
||||
vDSP_ctoz((DSPComplex *)paddedSignal, 2, &signal_fft, 1, fftSizeOver2);
|
||||
|
||||
vDSP_DFT_Execute(dftSetupF, signal_fft.realp, signal_fft.imagp, signal_fft.realp, signal_fft.imagp);
|
||||
|
||||
// One channel forward, then multiply and back twice
|
||||
|
||||
float preserveIRNyq = impulse_responses[i * 2 + 0].imagp[0];
|
||||
float preserveSigNyq = signal_fft.imagp[0];
|
||||
impulse_responses[i * 2 + 0].imagp[0] = 0;
|
||||
signal_fft.imagp[0] = 0;
|
||||
|
||||
vDSP_zvmul(&signal_fft, 1, &impulse_responses[i * 2 + 0], 1, &input_filtered_signal_per_channel[0], 1, fftSizeOver2, 1);
|
||||
|
||||
input_filtered_signal_per_channel[0].imagp[0] = preserveIRNyq * preserveSigNyq;
|
||||
impulse_responses[i * 2 + 0].imagp[0] = preserveIRNyq;
|
||||
|
||||
preserveIRNyq = impulse_responses[i * 2 + 1].imagp[0];
|
||||
impulse_responses[i * 2 + 1].imagp[0] = 0;
|
||||
|
||||
vDSP_zvmul(&signal_fft, 1, &impulse_responses[i * 2 + 1], 1, &input_filtered_signal_per_channel[1], 1, fftSizeOver2, 1);
|
||||
|
||||
input_filtered_signal_per_channel[1].imagp[0] = preserveIRNyq * preserveSigNyq;
|
||||
impulse_responses[i * 2 + 1].imagp[0] = preserveIRNyq;
|
||||
|
||||
vDSP_zvadd(&input_filtered_signal_totals[0], 1, &input_filtered_signal_per_channel[0], 1, &input_filtered_signal_totals[0], 1, fftSizeOver2);
|
||||
vDSP_zvadd(&input_filtered_signal_totals[1], 1, &input_filtered_signal_per_channel[1], 1, &input_filtered_signal_totals[1], 1, fftSizeOver2);
|
||||
}
|
||||
|
||||
vDSP_DFT_Execute(dftSetupB, input_filtered_signal_totals[0].realp, input_filtered_signal_totals[0].imagp, input_filtered_signal_totals[0].realp, input_filtered_signal_totals[0].imagp);
|
||||
vDSP_DFT_Execute(dftSetupB, input_filtered_signal_totals[1].realp, input_filtered_signal_totals[1].imagp, input_filtered_signal_totals[1].realp, input_filtered_signal_totals[1].imagp);
|
||||
|
||||
vDSP_ztoc(&input_filtered_signal_totals[0], 1, (DSPComplex *)left_result, 2, fftSizeOver2);
|
||||
vDSP_ztoc(&input_filtered_signal_totals[1], 1, (DSPComplex *)right_result, 2, fftSizeOver2);
|
||||
|
||||
float *left_ptr = left_result + prevToDo;
|
||||
float *right_ptr = right_result + prevToDo;
|
||||
|
||||
vDSP_vsmul(left_ptr, 1, &scale, left_ptr, 1, countToDo);
|
||||
vDSP_vsmul(right_ptr, 1, &scale, right_ptr, 1, countToDo);
|
||||
|
||||
cblas_scopy((int)countToDo, left_ptr, 1, outBuffer + 0, 2);
|
||||
cblas_scopy((int)countToDo, right_ptr, 1, outBuffer + 1, 2);
|
||||
|
||||
inBuffer += countToDo * channelCount;
|
||||
outBuffer += countToDo * 2;
|
||||
|
||||
count -= countToDo;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)reset {
|
||||
for(int i = 0; i < channelCount; ++i) {
|
||||
vDSP_vclr(prevInputs[i], 1, fftSize);
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
|
@ -26,6 +26,8 @@ using std::atomic_long;
|
|||
|
||||
#import "VisualizationController.h"
|
||||
|
||||
#import "HeadphoneFilter.h"
|
||||
|
||||
//#define OUTPUT_LOG
|
||||
#ifdef OUTPUT_LOG
|
||||
#import <stdio.h>
|
||||
|
@ -55,8 +57,6 @@ using std::atomic_long;
|
|||
BOOL eqEnabled;
|
||||
BOOL eqInitialized;
|
||||
|
||||
BOOL dontRemix;
|
||||
|
||||
BOOL streamFormatStarted;
|
||||
|
||||
double secondsHdcdSustained;
|
||||
|
@ -71,12 +71,16 @@ using std::atomic_long;
|
|||
float eqPreamp;
|
||||
|
||||
AudioDeviceID outputDeviceID;
|
||||
AudioStreamBasicDescription realStreamFormat; // stream format pre-hrtf
|
||||
AudioStreamBasicDescription streamFormat; // stream format last seen in render callback
|
||||
AudioStreamBasicDescription realNewFormat; // in case of resampler flush
|
||||
AudioStreamBasicDescription newFormat; // in case of resampler flush
|
||||
|
||||
AudioStreamBasicDescription visFormat; // Mono format for vis
|
||||
|
||||
uint32_t realStreamChannelConfig;
|
||||
uint32_t streamChannelConfig;
|
||||
uint32_t realNewChannelConfig;
|
||||
uint32_t newChannelConfig;
|
||||
|
||||
AVSampleBufferAudioRenderer *audioRenderer;
|
||||
|
@ -100,7 +104,11 @@ using std::atomic_long;
|
|||
|
||||
VisualizationController *visController;
|
||||
|
||||
BOOL enableHrtf;
|
||||
HeadphoneFilter *hrtf;
|
||||
|
||||
float inputBuffer[2048 * 32]; // 2048 samples times maximum supported channel count
|
||||
float hrtfBuffer[2048 * 2];
|
||||
float eqBuffer[2048 * 32];
|
||||
|
||||
#ifdef OUTPUT_LOG
|
||||
|
|
|
@ -92,7 +92,7 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
|
|||
dstRate = maxSampleRate;
|
||||
formatClipped = YES;
|
||||
}
|
||||
if(!streamFormatStarted || config != streamChannelConfig || memcmp(&newFormat, &format, sizeof(format)) != 0) {
|
||||
if(!streamFormatStarted || config != realStreamChannelConfig || memcmp(&newFormat, &format, sizeof(format)) != 0) {
|
||||
[currentPtsLock lock];
|
||||
if(formatClipped) {
|
||||
ALog(@"Sample rate clipped to no more than %f Hz!", maxSampleRate);
|
||||
|
@ -117,8 +117,8 @@ static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioA
|
|||
|
||||
downmixerForVis = [[DownmixProcessor alloc] initWithInputFormat:format inputConfig:config andOutputFormat:visFormat outputConfig:AudioConfigMono];
|
||||
if(!r8bold) {
|
||||
streamFormat = format;
|
||||
streamChannelConfig = config;
|
||||
realStreamFormat = format;
|
||||
realStreamChannelConfig = config;
|
||||
[self updateStreamFormat];
|
||||
}
|
||||
}
|
||||
|
@ -301,8 +301,10 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
|||
} else if([keyPath isEqualToString:@"values.eqPreamp"]) {
|
||||
float preamp = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] floatForKey:@"eqPreamp"];
|
||||
eqPreamp = pow(10.0, preamp / 20.0);
|
||||
} else if([keyPath isEqualToString:@"values.dontRemix"]) {
|
||||
dontRemix = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] boolForKey:@"dontRemix"];
|
||||
} else if([keyPath isEqualToString:@"values.enableHrtf"]) {
|
||||
enableHrtf = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] boolForKey:@"enableHrtf"];
|
||||
if(streamFormatStarted)
|
||||
[self updateStreamFormat];
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -598,6 +600,25 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
|||
|
||||
- (void)updateStreamFormat {
|
||||
/* Set the channel layout for the audio queue */
|
||||
if(enableHrtf) {
|
||||
NSURL *presetUrl = [[NSBundle mainBundle] URLForResource:@"SADIE_D02-96000" withExtension:@"mhr"];
|
||||
|
||||
hrtf = [[HeadphoneFilter alloc] initWithImpulseFile:presetUrl forSampleRate:realStreamFormat.mSampleRate withInputChannels:realStreamFormat.mChannelsPerFrame withConfig:realStreamChannelConfig];
|
||||
|
||||
streamFormat = realStreamFormat;
|
||||
streamFormat.mChannelsPerFrame = 2;
|
||||
streamFormat.mBytesPerFrame = sizeof(float) * 2;
|
||||
streamFormat.mFramesPerPacket = 1;
|
||||
streamFormat.mBytesPerPacket = streamFormat.mBytesPerFrame;
|
||||
|
||||
streamChannelConfig = AudioChannelSideLeft | AudioChannelSideRight;
|
||||
} else {
|
||||
hrtf = nil;
|
||||
|
||||
streamFormat = realStreamFormat;
|
||||
streamChannelConfig = realStreamChannelConfig;
|
||||
}
|
||||
|
||||
AudioChannelLayoutTag tag = 0;
|
||||
|
||||
AudioChannelLayout layout = { 0 };
|
||||
|
@ -741,13 +762,18 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
|||
samplePtr = &inputBuffer[0];
|
||||
if(r8bDone) {
|
||||
r8bDone = NO;
|
||||
streamFormat = newFormat;
|
||||
streamChannelConfig = newChannelConfig;
|
||||
realStreamFormat = newFormat;
|
||||
realStreamChannelConfig = newChannelConfig;
|
||||
[self updateStreamFormat];
|
||||
}
|
||||
}
|
||||
|
||||
if(samplesRendered) {
|
||||
if(enableHrtf && hrtf) {
|
||||
[hrtf process:samplePtr sampleCount:samplesRendered toBuffer:&hrtfBuffer[0]];
|
||||
samplePtr = &hrtfBuffer[0];
|
||||
}
|
||||
|
||||
if(eqEnabled && eqInitialized) {
|
||||
const int channels = streamFormat.mChannelsPerFrame;
|
||||
if(channels > 0) {
|
||||
|
@ -939,7 +965,7 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
|||
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.outputDevice" options:0 context:kOutputAVFoundationContext];
|
||||
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.GraphicEQenable" options:0 context:kOutputAVFoundationContext];
|
||||
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.eqPreamp" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kOutputAVFoundationContext];
|
||||
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.dontRemix" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kOutputAVFoundationContext];
|
||||
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.enableHrtf" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kOutputAVFoundationContext];
|
||||
observersapplied = YES;
|
||||
|
||||
[renderSynchronizer addRenderer:audioRenderer];
|
||||
|
@ -1060,7 +1086,7 @@ current_device_listener(AudioObjectID inObjectID, UInt32 inNumberAddresses, cons
|
|||
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.outputDevice" context:kOutputAVFoundationContext];
|
||||
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.GraphicEQenable" context:kOutputAVFoundationContext];
|
||||
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.eqPreamp" context:kOutputAVFoundationContext];
|
||||
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.dontRemix" context:kOutputAVFoundationContext];
|
||||
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.enableHrtf" context:kOutputAVFoundationContext];
|
||||
observersapplied = NO;
|
||||
}
|
||||
stopping = YES;
|
||||
|
|
25
Audio/ThirdParty/hrtf/Endianness.h
vendored
Normal file
25
Audio/ThirdParty/hrtf/Endianness.h
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
#pragma once
|
||||
|
||||
// The functions provide little endianness to native endianness conversion and back again
|
||||
#if(defined(_MSC_VER) && defined(_WIN32)) || defined(__APPLE__)
|
||||
template <typename T>
|
||||
inline void from_little_endian_inplace(T& x) {
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
inline T from_little_endian(T x) {
|
||||
return x;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
inline void to_little_endian_inplace(T& x) {
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
inline T to_little_endian(T x) {
|
||||
return x;
|
||||
}
|
||||
|
||||
#else
|
||||
#error "Specify endianness conversion for your platform"
|
||||
#endif
|
640
Audio/ThirdParty/hrtf/HrtfData.cpp
vendored
Normal file
640
Audio/ThirdParty/hrtf/HrtfData.cpp
vendored
Normal file
|
@ -0,0 +1,640 @@
|
|||
|
||||
#include "HrtfData.h"
|
||||
#include "Endianness.h"
|
||||
#include <algorithm>
|
||||
#include <cmath>
|
||||
|
||||
typedef struct {
|
||||
uint8_t bytes[3];
|
||||
} sample_int24_t;
|
||||
|
||||
const double pi = 3.1415926535897932385;
|
||||
|
||||
template <typename T>
|
||||
void read_stream(std::istream& stream, T& value) {
|
||||
stream.read(reinterpret_cast<std::istream::char_type*>(&value), sizeof(value));
|
||||
from_little_endian_inplace(value);
|
||||
}
|
||||
|
||||
HrtfData::HrtfData(std::istream& stream) {
|
||||
const char required_magic00[] = { 'M', 'i', 'n', 'P', 'H', 'R', '0', '0' };
|
||||
const char required_magic01[] = { 'M', 'i', 'n', 'P', 'H', 'R', '0', '1' };
|
||||
const char required_magic02[] = { 'M', 'i', 'n', 'P', 'H', 'R', '0', '2' };
|
||||
const char required_magic03[] = { 'M', 'i', 'n', 'P', 'H', 'R', '0', '3' };
|
||||
char actual_magic[sizeof(required_magic03) / sizeof(required_magic03[0])];
|
||||
|
||||
stream.read(actual_magic, sizeof(actual_magic));
|
||||
if(std::equal(std::begin(required_magic03), std::end(required_magic03), std::begin(actual_magic), std::end(actual_magic))) {
|
||||
LoadHrtf03(stream);
|
||||
} else if(std::equal(std::begin(required_magic02), std::end(required_magic02), std::begin(actual_magic), std::end(actual_magic))) {
|
||||
LoadHrtf02(stream);
|
||||
} else if(std::equal(std::begin(required_magic01), std::end(required_magic01), std::begin(actual_magic), std::end(actual_magic))) {
|
||||
LoadHrtf01(stream);
|
||||
} else if(std::equal(std::begin(required_magic00), std::end(required_magic00), std::begin(actual_magic), std::end(actual_magic))) {
|
||||
LoadHrtf00(stream);
|
||||
} else {
|
||||
throw std::logic_error("Bad file format.");
|
||||
}
|
||||
}
|
||||
|
||||
void HrtfData::LoadHrtf03(std::istream& stream) {
|
||||
// const uint8_t ChanType_LeftOnly{0};
|
||||
const uint8_t ChanType_LeftRight{ 1 };
|
||||
|
||||
uint32_t sample_rate;
|
||||
uint8_t channel_type;
|
||||
uint8_t impulse_response_length;
|
||||
uint8_t distances_count;
|
||||
|
||||
read_stream(stream, sample_rate);
|
||||
read_stream(stream, channel_type);
|
||||
read_stream(stream, impulse_response_length);
|
||||
read_stream(stream, distances_count);
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
if(channel_type > ChanType_LeftRight) {
|
||||
throw std::logic_error("Invalid channel format.");
|
||||
}
|
||||
|
||||
int channel_count = channel_type == ChanType_LeftRight ? 2 : 1;
|
||||
|
||||
std::vector<DistanceData> distances(distances_count);
|
||||
|
||||
for(uint8_t i = 0; i < distances_count; i++) {
|
||||
uint16_t distance;
|
||||
read_stream(stream, distance);
|
||||
distances[i].distance = float(distance) / 1000.0f;
|
||||
|
||||
uint8_t elevations_count;
|
||||
read_stream(stream, elevations_count);
|
||||
distances[i].elevations.resize(elevations_count);
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
for(uint8_t j = 0; j < elevations_count; j++) {
|
||||
uint8_t azimuth_count;
|
||||
read_stream(stream, azimuth_count);
|
||||
distances[i].elevations[j].azimuths.resize(azimuth_count);
|
||||
}
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
}
|
||||
|
||||
const float normalization_factor = 1.0f / 8388608.0f;
|
||||
|
||||
for(auto& distance : distances) {
|
||||
for(auto& elevation : distance.elevations) {
|
||||
for(auto& azimuth : elevation.azimuths) {
|
||||
azimuth.impulse_response.resize(impulse_response_length * channel_count);
|
||||
for(auto& sample : azimuth.impulse_response) {
|
||||
union {
|
||||
sample_int24_t sample;
|
||||
int32_t sample_int;
|
||||
} sample_union;
|
||||
sample_union.sample_int = 0;
|
||||
read_stream(stream, sample_union.sample);
|
||||
sample_union.sample_int <<= 8;
|
||||
sample_union.sample_int >>= 8;
|
||||
sample = sample_union.sample_int * normalization_factor;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
uint8_t longest_delay = 0;
|
||||
for(auto& distance : distances) {
|
||||
for(auto& elevation : distance.elevations) {
|
||||
for(auto& azimuth : elevation.azimuths) {
|
||||
uint8_t delay;
|
||||
read_stream(stream, delay);
|
||||
azimuth.delay = delay;
|
||||
longest_delay = std::max(longest_delay, delay);
|
||||
if(channel_type == ChanType_LeftRight) {
|
||||
read_stream(stream, delay);
|
||||
azimuth.delay_right = delay;
|
||||
longest_delay = std::max(longest_delay, delay);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
std::sort(distances.begin(), distances.end(),
|
||||
[](const DistanceData& lhs, const DistanceData& rhs) noexcept { return lhs.distance > rhs.distance; });
|
||||
|
||||
m_distances = std::move(distances);
|
||||
m_channel_count = channel_count;
|
||||
m_response_length = impulse_response_length;
|
||||
m_sample_rate = sample_rate;
|
||||
m_longest_delay = longest_delay;
|
||||
}
|
||||
|
||||
void HrtfData::LoadHrtf02(std::istream& stream) {
|
||||
// const uint8_t SampleType_S16{0};
|
||||
const uint8_t SampleType_S24{ 1 };
|
||||
// const uint8_t ChanType_LeftOnly{0};
|
||||
const uint8_t ChanType_LeftRight{ 1 };
|
||||
|
||||
uint32_t sample_rate;
|
||||
uint8_t sample_type;
|
||||
uint8_t channel_type;
|
||||
uint8_t impulse_response_length;
|
||||
uint8_t distances_count;
|
||||
|
||||
read_stream(stream, sample_rate);
|
||||
read_stream(stream, sample_type);
|
||||
read_stream(stream, channel_type);
|
||||
read_stream(stream, impulse_response_length);
|
||||
read_stream(stream, distances_count);
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
if(sample_type > SampleType_S24) {
|
||||
throw std::logic_error("Invalid sample type.");
|
||||
}
|
||||
|
||||
if(channel_type > ChanType_LeftRight) {
|
||||
throw std::logic_error("Invalid channel format.");
|
||||
}
|
||||
|
||||
int channel_count = channel_type == ChanType_LeftRight ? 2 : 1;
|
||||
|
||||
std::vector<DistanceData> distances(distances_count);
|
||||
|
||||
for(uint8_t i = 0; i < distances_count; i++) {
|
||||
uint16_t distance;
|
||||
read_stream(stream, distance);
|
||||
distances[i].distance = float(distance) / 1000.0f;
|
||||
|
||||
uint8_t elevations_count;
|
||||
read_stream(stream, elevations_count);
|
||||
distances[i].elevations.resize(elevations_count);
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
for(uint8_t j = 0; j < elevations_count; j++) {
|
||||
uint8_t azimuth_count;
|
||||
read_stream(stream, azimuth_count);
|
||||
distances[i].elevations[j].azimuths.resize(azimuth_count);
|
||||
}
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
}
|
||||
|
||||
const float normalization_factor = (sample_type == SampleType_S24) ? 1.0f / 8388608.0f : 1.0f / 32768.0f;
|
||||
|
||||
for(auto& distance : distances) {
|
||||
for(auto& elevation : distance.elevations) {
|
||||
for(auto& azimuth : elevation.azimuths) {
|
||||
azimuth.impulse_response.resize(impulse_response_length * channel_count);
|
||||
if(sample_type == SampleType_S24) {
|
||||
for(auto& sample : azimuth.impulse_response) {
|
||||
union {
|
||||
sample_int24_t sample;
|
||||
int32_t sample_int;
|
||||
} sample_union;
|
||||
sample_union.sample_int = 0;
|
||||
read_stream(stream, sample_union.sample);
|
||||
sample_union.sample_int <<= 8;
|
||||
sample_union.sample_int >>= 8;
|
||||
sample = sample_union.sample_int * normalization_factor;
|
||||
}
|
||||
} else {
|
||||
for(auto& sample : azimuth.impulse_response) {
|
||||
int16_t sample_from_file;
|
||||
read_stream(stream, sample_from_file);
|
||||
sample = sample_from_file * normalization_factor;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
uint8_t longest_delay = 0;
|
||||
for(auto& distance : distances) {
|
||||
for(auto& elevation : distance.elevations) {
|
||||
for(auto& azimuth : elevation.azimuths) {
|
||||
uint8_t delay;
|
||||
read_stream(stream, delay);
|
||||
azimuth.delay = delay;
|
||||
longest_delay = std::max(longest_delay, delay);
|
||||
if(channel_type == ChanType_LeftRight) {
|
||||
read_stream(stream, delay);
|
||||
azimuth.delay_right = delay;
|
||||
longest_delay = std::max(longest_delay, delay);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
std::sort(distances.begin(), distances.end(),
|
||||
[](const DistanceData& lhs, const DistanceData& rhs) noexcept { return lhs.distance > rhs.distance; });
|
||||
|
||||
m_distances = std::move(distances);
|
||||
m_channel_count = channel_count;
|
||||
m_response_length = impulse_response_length;
|
||||
m_sample_rate = sample_rate;
|
||||
m_longest_delay = longest_delay;
|
||||
}
|
||||
|
||||
void HrtfData::LoadHrtf01(std::istream& stream) {
|
||||
uint32_t sample_rate;
|
||||
uint8_t impulse_response_length;
|
||||
|
||||
read_stream(stream, sample_rate);
|
||||
read_stream(stream, impulse_response_length);
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
std::vector<DistanceData> distances(1);
|
||||
|
||||
distances[0].distance = 1.0;
|
||||
|
||||
uint8_t elevations_count;
|
||||
read_stream(stream, elevations_count);
|
||||
distances[0].elevations.resize(elevations_count);
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
for(uint8_t i = 0; i < elevations_count; i++) {
|
||||
uint8_t azimuth_count;
|
||||
read_stream(stream, azimuth_count);
|
||||
distances[0].elevations[i].azimuths.resize(azimuth_count);
|
||||
}
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
const float normalization_factor = 1.0f / 32768.0f;
|
||||
|
||||
for(auto& elevation : distances[0].elevations) {
|
||||
for(auto& azimuth : elevation.azimuths) {
|
||||
azimuth.impulse_response.resize(impulse_response_length);
|
||||
for(auto& sample : azimuth.impulse_response) {
|
||||
int16_t sample_from_file;
|
||||
read_stream(stream, sample_from_file);
|
||||
sample = sample_from_file * normalization_factor;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
uint8_t longest_delay = 0;
|
||||
for(auto& elevation : distances[0].elevations) {
|
||||
for(auto& azimuth : elevation.azimuths) {
|
||||
uint8_t delay;
|
||||
read_stream(stream, delay);
|
||||
delay <<= 2;
|
||||
azimuth.delay = delay;
|
||||
longest_delay = std::max(longest_delay, delay);
|
||||
}
|
||||
}
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
m_distances = std::move(distances);
|
||||
m_channel_count = 1;
|
||||
m_response_length = impulse_response_length;
|
||||
m_sample_rate = sample_rate;
|
||||
m_longest_delay = longest_delay;
|
||||
}
|
||||
|
||||
void HrtfData::LoadHrtf00(std::istream& stream) {
|
||||
uint32_t sample_rate;
|
||||
uint16_t impulse_response_count;
|
||||
uint16_t impulse_response_length;
|
||||
|
||||
read_stream(stream, sample_rate);
|
||||
read_stream(stream, impulse_response_count);
|
||||
read_stream(stream, impulse_response_length);
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
std::vector<DistanceData> distances(1);
|
||||
|
||||
distances[0].distance = 1.0;
|
||||
|
||||
uint8_t elevations_count;
|
||||
read_stream(stream, elevations_count);
|
||||
distances[0].elevations.resize(elevations_count);
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
std::vector<uint16_t> irOffsets(elevations_count);
|
||||
|
||||
for(uint8_t i = 0; i < elevations_count; i++) {
|
||||
read_stream(stream, irOffsets[i]);
|
||||
}
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
for(size_t i = 1; i < elevations_count; i++) {
|
||||
if(irOffsets[i] <= irOffsets[i - 1]) {
|
||||
throw std::logic_error("Invalid elevation offset.");
|
||||
}
|
||||
}
|
||||
if(impulse_response_count <= irOffsets[elevations_count - 1]) {
|
||||
throw std::logic_error("Invalid elevation offset.");
|
||||
}
|
||||
|
||||
for(size_t i = 1; i < elevations_count; i++) {
|
||||
distances[0].elevations[i - 1].azimuths.resize(irOffsets[i] - irOffsets[i - 1]);
|
||||
}
|
||||
distances[0].elevations[elevations_count - 1].azimuths.resize(impulse_response_count - irOffsets[elevations_count - 1]);
|
||||
|
||||
const float normalization_factor = 1.0f / 32768.0f;
|
||||
|
||||
for(auto& elevation : distances[0].elevations) {
|
||||
for(auto& azimuth : elevation.azimuths) {
|
||||
azimuth.impulse_response.resize(impulse_response_length);
|
||||
for(auto& sample : azimuth.impulse_response) {
|
||||
int16_t sample_from_file;
|
||||
read_stream(stream, sample_from_file);
|
||||
sample = sample_from_file * normalization_factor;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
uint8_t longest_delay = 0;
|
||||
for(auto& elevation : distances[0].elevations) {
|
||||
for(auto& azimuth : elevation.azimuths) {
|
||||
uint8_t delay;
|
||||
read_stream(stream, delay);
|
||||
delay <<= 2;
|
||||
azimuth.delay = delay;
|
||||
longest_delay = std::max(longest_delay, delay);
|
||||
}
|
||||
}
|
||||
|
||||
if(!stream || stream.eof()) {
|
||||
throw std::logic_error("Failed reading file.");
|
||||
}
|
||||
|
||||
m_distances = std::move(distances);
|
||||
m_channel_count = 1;
|
||||
m_response_length = impulse_response_length;
|
||||
m_sample_rate = sample_rate;
|
||||
m_longest_delay = longest_delay;
|
||||
}
|
||||
|
||||
void HrtfData::get_direction_data(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t channel, DirectionData& ref_data) const {
|
||||
assert(elevation >= -angle_t(pi * 0.5));
|
||||
assert(elevation <= angle_t(pi * 0.5));
|
||||
assert(azimuth >= -angle_t(2.0 * pi));
|
||||
assert(azimuth <= angle_t(2.0 * pi));
|
||||
|
||||
const float azimuth_mod = std::fmod(azimuth + angle_t(pi * 2.0), angle_t(pi * 2.0));
|
||||
|
||||
size_t distance_index0 = 0;
|
||||
while(distance_index0 < m_distances.size() - 1 &&
|
||||
m_distances[distance_index0].distance > distance) {
|
||||
distance_index0++;
|
||||
}
|
||||
const size_t distance_index1 = std::min(distance_index0 + 1, m_distances.size() - 1);
|
||||
const distance_t distance0 = m_distances[distance_index0].distance;
|
||||
const distance_t distance1 = m_distances[distance_index1].distance;
|
||||
const distance_t distance_delta = distance0 - distance1;
|
||||
const float distance_fractional_part = distance_delta ? (distance - distance1) / distance_delta : 0;
|
||||
|
||||
const auto& elevations0 = m_distances[distance_index0].elevations;
|
||||
const auto& elevations1 = m_distances[distance_index1].elevations;
|
||||
|
||||
const angle_t elevation_scaled0 = (elevation + angle_t(pi * 0.5)) * (elevations0.size() - 1) / angle_t(pi);
|
||||
const angle_t elevation_scaled1 = (elevation + angle_t(pi * 0.5)) * (elevations1.size() - 1) / angle_t(pi);
|
||||
const size_t elevation_index00 = static_cast<size_t>(elevation_scaled0);
|
||||
const size_t elevation_index10 = static_cast<size_t>(elevation_scaled1);
|
||||
const size_t elevation_index01 = std::min(elevation_index00 + 1, elevations0.size() - 1);
|
||||
const size_t elevation_index11 = std::min(elevation_index10 + 1, elevations1.size() - 1);
|
||||
|
||||
const float elevation_fractional_part0 = std::fmod(elevation_scaled0, 1.0);
|
||||
const float elevation_fractional_part1 = std::fmod(elevation_scaled1, 1.0);
|
||||
|
||||
const angle_t azimuth_scaled00 = azimuth_mod * elevations0[elevation_index00].azimuths.size() / angle_t(2 * pi);
|
||||
const size_t azimuth_index000 = static_cast<size_t>(azimuth_scaled00) % elevations0[elevation_index00].azimuths.size();
|
||||
const size_t azimuth_index001 = static_cast<size_t>(azimuth_scaled00 + 1) % elevations0[elevation_index00].azimuths.size();
|
||||
const float azimuth_fractional_part00 = std::fmod(azimuth_scaled00, 1.0);
|
||||
|
||||
const angle_t azimuth_scaled10 = azimuth_mod * elevations1[elevation_index10].azimuths.size() / angle_t(2 * pi);
|
||||
const size_t azimuth_index100 = static_cast<size_t>(azimuth_scaled10) % elevations1[elevation_index10].azimuths.size();
|
||||
const size_t azimuth_index101 = static_cast<size_t>(azimuth_scaled10 + 1) % elevations1[elevation_index10].azimuths.size();
|
||||
const float azimuth_fractional_part10 = std::fmod(azimuth_scaled10, 1.0);
|
||||
|
||||
const angle_t azimuth_scaled01 = azimuth_mod * elevations0[elevation_index01].azimuths.size() / angle_t(2 * pi);
|
||||
const size_t azimuth_index010 = static_cast<size_t>(azimuth_scaled01) % elevations0[elevation_index01].azimuths.size();
|
||||
const size_t azimuth_index011 = static_cast<size_t>(azimuth_scaled01 + 1) % elevations0[elevation_index01].azimuths.size();
|
||||
const float azimuth_fractional_part01 = std::fmod(azimuth_scaled01, 1.0);
|
||||
|
||||
const angle_t azimuth_scaled11 = azimuth_mod * elevations1[elevation_index11].azimuths.size() / angle_t(2 * pi);
|
||||
const size_t azimuth_index110 = static_cast<size_t>(azimuth_scaled11) % elevations1[elevation_index11].azimuths.size();
|
||||
const size_t azimuth_index111 = static_cast<size_t>(azimuth_scaled11 + 1) % elevations1[elevation_index11].azimuths.size();
|
||||
const float azimuth_fractional_part11 = std::fmod(azimuth_scaled11, 1.0);
|
||||
|
||||
const float blend_factor_000 = (1.0f - elevation_fractional_part0) * (1.0f - azimuth_fractional_part00) * distance_fractional_part;
|
||||
const float blend_factor_001 = (1.0f - elevation_fractional_part0) * azimuth_fractional_part00 * distance_fractional_part;
|
||||
const float blend_factor_010 = elevation_fractional_part0 * (1.0f - azimuth_fractional_part01) * distance_fractional_part;
|
||||
const float blend_factor_011 = elevation_fractional_part0 * azimuth_fractional_part01 * distance_fractional_part;
|
||||
|
||||
const float blend_factor_100 = (1.0f - elevation_fractional_part1) * (1.0f - azimuth_fractional_part10) * (1.0f - distance_fractional_part);
|
||||
const float blend_factor_101 = (1.0f - elevation_fractional_part1) * azimuth_fractional_part10 * (1.0f - distance_fractional_part);
|
||||
const float blend_factor_110 = elevation_fractional_part1 * (1.0f - azimuth_fractional_part11) * (1.0f - distance_fractional_part);
|
||||
const float blend_factor_111 = elevation_fractional_part1 * azimuth_fractional_part11 * (1.0f - distance_fractional_part);
|
||||
|
||||
delay_t delay0;
|
||||
delay_t delay1;
|
||||
|
||||
if(channel == 0) {
|
||||
delay0 =
|
||||
elevations0[elevation_index00].azimuths[azimuth_index000].delay * blend_factor_000 + elevations0[elevation_index00].azimuths[azimuth_index001].delay * blend_factor_001 + elevations0[elevation_index01].azimuths[azimuth_index010].delay * blend_factor_010 + elevations0[elevation_index01].azimuths[azimuth_index011].delay * blend_factor_011;
|
||||
|
||||
delay1 =
|
||||
elevations1[elevation_index10].azimuths[azimuth_index100].delay * blend_factor_100 + elevations1[elevation_index10].azimuths[azimuth_index101].delay * blend_factor_101 + elevations1[elevation_index11].azimuths[azimuth_index110].delay * blend_factor_110 + elevations1[elevation_index11].azimuths[azimuth_index111].delay * blend_factor_111;
|
||||
} else {
|
||||
delay0 =
|
||||
elevations0[elevation_index00].azimuths[azimuth_index000].delay_right * blend_factor_000 + elevations0[elevation_index00].azimuths[azimuth_index001].delay_right * blend_factor_001 + elevations0[elevation_index01].azimuths[azimuth_index010].delay_right * blend_factor_010 + elevations0[elevation_index01].azimuths[azimuth_index011].delay_right * blend_factor_011;
|
||||
|
||||
delay1 =
|
||||
elevations1[elevation_index10].azimuths[azimuth_index100].delay_right * blend_factor_100 + elevations1[elevation_index10].azimuths[azimuth_index101].delay_right * blend_factor_101 + elevations1[elevation_index11].azimuths[azimuth_index110].delay_right * blend_factor_110 + elevations1[elevation_index11].azimuths[azimuth_index111].delay_right * blend_factor_111;
|
||||
}
|
||||
|
||||
ref_data.delay = delay0 + delay1;
|
||||
|
||||
if(ref_data.impulse_response.size() < m_response_length)
|
||||
ref_data.impulse_response.resize(m_response_length);
|
||||
|
||||
for(size_t i = 0, j = channel; i < m_response_length; i++, j += m_channel_count) {
|
||||
float sample0 =
|
||||
elevations0[elevation_index00].azimuths[azimuth_index000].impulse_response[j] * blend_factor_000 + elevations0[elevation_index00].azimuths[azimuth_index001].impulse_response[j] * blend_factor_001 + elevations0[elevation_index01].azimuths[azimuth_index010].impulse_response[j] * blend_factor_010 + elevations0[elevation_index01].azimuths[azimuth_index011].impulse_response[j] * blend_factor_011;
|
||||
float sample1 =
|
||||
elevations1[elevation_index10].azimuths[azimuth_index100].impulse_response[j] * blend_factor_100 + elevations1[elevation_index10].azimuths[azimuth_index101].impulse_response[j] * blend_factor_101 + elevations1[elevation_index11].azimuths[azimuth_index110].impulse_response[j] * blend_factor_110 + elevations1[elevation_index11].azimuths[azimuth_index111].impulse_response[j] * blend_factor_111;
|
||||
|
||||
ref_data.impulse_response[i] = sample0 + sample1;
|
||||
}
|
||||
}
|
||||
|
||||
void HrtfData::get_direction_data(angle_t elevation, angle_t azimuth, distance_t distance, DirectionData& ref_data_left, DirectionData& ref_data_right) const {
|
||||
assert(elevation >= -angle_t(pi * 0.5));
|
||||
assert(elevation <= angle_t(pi * 0.5));
|
||||
assert(azimuth >= -angle_t(2.0 * pi));
|
||||
assert(azimuth <= angle_t(2.0 * pi));
|
||||
|
||||
get_direction_data(elevation, azimuth, distance, 0, ref_data_left);
|
||||
if(m_channel_count == 1) {
|
||||
get_direction_data(elevation, -azimuth, distance, 0, ref_data_right);
|
||||
} else {
|
||||
get_direction_data(elevation, azimuth, distance, 1, ref_data_right);
|
||||
}
|
||||
}
|
||||
|
||||
void HrtfData::sample_direction(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t sample, uint32_t channel, float& value, float& delay) const {
|
||||
assert(elevation >= -angle_t(pi * 0.5));
|
||||
assert(elevation <= angle_t(pi * 0.5));
|
||||
assert(azimuth >= -angle_t(2.0 * pi));
|
||||
assert(azimuth <= angle_t(2.0 * pi));
|
||||
|
||||
size_t distance_index0 = 0;
|
||||
while(distance_index0 < m_distances.size() - 1 &&
|
||||
m_distances[distance_index0].distance > distance) {
|
||||
distance_index0++;
|
||||
}
|
||||
const size_t distance_index1 = std::min(distance_index0 + 1, m_distances.size() - 1);
|
||||
const distance_t distance0 = m_distances[distance_index0].distance;
|
||||
const distance_t distance1 = m_distances[distance_index1].distance;
|
||||
const distance_t distance_delta = distance0 - distance1;
|
||||
const float distance_fractional_part = distance_delta ? (distance - distance1) / distance_delta : 0;
|
||||
|
||||
const auto& elevations0 = m_distances[distance_index0].elevations;
|
||||
const auto& elevations1 = m_distances[distance_index1].elevations;
|
||||
|
||||
const float azimuth_mod = std::fmod(azimuth + angle_t(pi * 2.0), angle_t(pi * 2.0));
|
||||
|
||||
const angle_t elevation_scaled0 = (elevation + angle_t(pi * 0.5)) * (elevations0.size() - 1) / angle_t(pi);
|
||||
const angle_t elevation_scaled1 = (elevation + angle_t(pi * 0.5)) * (elevations1.size() - 1) / angle_t(pi);
|
||||
const size_t elevation_index00 = static_cast<size_t>(elevation_scaled0);
|
||||
const size_t elevation_index10 = static_cast<size_t>(elevation_scaled1);
|
||||
const size_t elevation_index01 = std::min(elevation_index00 + 1, elevations0.size() - 1);
|
||||
const size_t elevation_index11 = std::min(elevation_index10 + 1, elevations1.size() - 1);
|
||||
|
||||
const float elevation_fractional_part0 = std::fmod(elevation_scaled0, 1.0);
|
||||
const float elevation_fractional_part1 = std::fmod(elevation_scaled1, 1.0);
|
||||
|
||||
const angle_t azimuth_scaled00 = azimuth_mod * elevations0[elevation_index00].azimuths.size() / angle_t(2 * pi);
|
||||
const size_t azimuth_index000 = static_cast<size_t>(azimuth_scaled00) % elevations0[elevation_index00].azimuths.size();
|
||||
const size_t azimuth_index001 = static_cast<size_t>(azimuth_scaled00 + 1) % elevations0[elevation_index00].azimuths.size();
|
||||
const float azimuth_fractional_part00 = std::fmod(azimuth_scaled00, 1.0);
|
||||
|
||||
const angle_t azimuth_scaled10 = azimuth_mod * elevations1[elevation_index10].azimuths.size() / angle_t(2 * pi);
|
||||
const size_t azimuth_index100 = static_cast<size_t>(azimuth_scaled10) % elevations1[elevation_index10].azimuths.size();
|
||||
const size_t azimuth_index101 = static_cast<size_t>(azimuth_scaled10 + 1) % elevations1[elevation_index10].azimuths.size();
|
||||
const float azimuth_fractional_part10 = std::fmod(azimuth_scaled10, 1.0);
|
||||
|
||||
const angle_t azimuth_scaled01 = azimuth_mod * elevations0[elevation_index01].azimuths.size() / angle_t(2 * pi);
|
||||
const size_t azimuth_index010 = static_cast<size_t>(azimuth_scaled01) % elevations0[elevation_index01].azimuths.size();
|
||||
const size_t azimuth_index011 = static_cast<size_t>(azimuth_scaled01 + 1) % elevations0[elevation_index01].azimuths.size();
|
||||
const float azimuth_fractional_part01 = std::fmod(azimuth_scaled01, 1.0);
|
||||
|
||||
const angle_t azimuth_scaled11 = azimuth_mod * elevations1[elevation_index11].azimuths.size() / angle_t(2 * pi);
|
||||
const size_t azimuth_index110 = static_cast<size_t>(azimuth_scaled11) % elevations1[elevation_index11].azimuths.size();
|
||||
const size_t azimuth_index111 = static_cast<size_t>(azimuth_scaled11 + 1) % elevations1[elevation_index11].azimuths.size();
|
||||
const float azimuth_fractional_part11 = std::fmod(azimuth_scaled11, 1.0);
|
||||
|
||||
const float blend_factor_000 = (1.0f - elevation_fractional_part0) * (1.0f - azimuth_fractional_part00) * distance_fractional_part;
|
||||
const float blend_factor_001 = (1.0f - elevation_fractional_part0) * azimuth_fractional_part00 * distance_fractional_part;
|
||||
const float blend_factor_010 = elevation_fractional_part0 * (1.0f - azimuth_fractional_part01) * distance_fractional_part;
|
||||
const float blend_factor_011 = elevation_fractional_part0 * azimuth_fractional_part01 * distance_fractional_part;
|
||||
|
||||
const float blend_factor_100 = (1.0f - elevation_fractional_part1) * (1.0f - azimuth_fractional_part10) * (1.0f - distance_fractional_part);
|
||||
const float blend_factor_101 = (1.0f - elevation_fractional_part1) * azimuth_fractional_part10 * (1.0f - distance_fractional_part);
|
||||
const float blend_factor_110 = elevation_fractional_part1 * (1.0f - azimuth_fractional_part11) * (1.0f - distance_fractional_part);
|
||||
const float blend_factor_111 = elevation_fractional_part1 * azimuth_fractional_part11 * (1.0f - distance_fractional_part);
|
||||
|
||||
float delay0;
|
||||
float delay1;
|
||||
|
||||
if(channel == 0) {
|
||||
delay0 =
|
||||
elevations0[elevation_index00].azimuths[azimuth_index000].delay * blend_factor_000 + elevations0[elevation_index00].azimuths[azimuth_index001].delay * blend_factor_001 + elevations0[elevation_index01].azimuths[azimuth_index010].delay * blend_factor_010 + elevations0[elevation_index01].azimuths[azimuth_index011].delay * blend_factor_011;
|
||||
|
||||
delay1 =
|
||||
elevations1[elevation_index10].azimuths[azimuth_index100].delay * blend_factor_100 + elevations1[elevation_index10].azimuths[azimuth_index101].delay * blend_factor_101 + elevations1[elevation_index11].azimuths[azimuth_index110].delay * blend_factor_110 + elevations1[elevation_index11].azimuths[azimuth_index111].delay * blend_factor_111;
|
||||
} else {
|
||||
delay0 =
|
||||
elevations0[elevation_index00].azimuths[azimuth_index000].delay_right * blend_factor_000 + elevations0[elevation_index00].azimuths[azimuth_index001].delay_right * blend_factor_001 + elevations0[elevation_index01].azimuths[azimuth_index010].delay_right * blend_factor_010 + elevations0[elevation_index01].azimuths[azimuth_index011].delay_right * blend_factor_011;
|
||||
|
||||
delay1 =
|
||||
elevations1[elevation_index10].azimuths[azimuth_index100].delay_right * blend_factor_100 + elevations1[elevation_index10].azimuths[azimuth_index101].delay_right * blend_factor_101 + elevations1[elevation_index11].azimuths[azimuth_index110].delay_right * blend_factor_110 + elevations1[elevation_index11].azimuths[azimuth_index111].delay_right * blend_factor_111;
|
||||
}
|
||||
|
||||
delay = delay0 + delay1;
|
||||
|
||||
sample = sample * m_channel_count + channel;
|
||||
|
||||
float value0 =
|
||||
elevations0[elevation_index00].azimuths[azimuth_index000].impulse_response[sample] * blend_factor_000 + elevations0[elevation_index00].azimuths[azimuth_index001].impulse_response[sample] * blend_factor_001 + elevations0[elevation_index01].azimuths[azimuth_index010].impulse_response[sample] * blend_factor_010 + elevations0[elevation_index01].azimuths[azimuth_index011].impulse_response[sample] * blend_factor_011;
|
||||
|
||||
float value1 =
|
||||
elevations1[elevation_index10].azimuths[azimuth_index100].impulse_response[sample] * blend_factor_100 + elevations1[elevation_index10].azimuths[azimuth_index101].impulse_response[sample] * blend_factor_101 + elevations1[elevation_index11].azimuths[azimuth_index110].impulse_response[sample] * blend_factor_110 + elevations1[elevation_index11].azimuths[azimuth_index111].impulse_response[sample] * blend_factor_111;
|
||||
|
||||
value = value0 + value1;
|
||||
}
|
||||
|
||||
void HrtfData::sample_direction(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t sample, float& value_left, float& delay_left, float& value_right, float& delay_right) const {
|
||||
assert(elevation >= -angle_t(pi * 0.5));
|
||||
assert(elevation <= angle_t(pi * 0.5));
|
||||
assert(azimuth >= -angle_t(2.0 * pi));
|
||||
assert(azimuth <= angle_t(2.0 * pi));
|
||||
|
||||
sample_direction(elevation, azimuth, distance, sample, 0, value_left, delay_left);
|
||||
if(m_channel_count == 1) {
|
||||
sample_direction(elevation, -azimuth, distance, sample, 0, value_right, delay_right);
|
||||
} else {
|
||||
sample_direction(elevation, azimuth, distance, sample, 1, value_right, delay_right);
|
||||
}
|
||||
}
|
48
Audio/ThirdParty/hrtf/HrtfData.h
vendored
Normal file
48
Audio/ThirdParty/hrtf/HrtfData.h
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
#pragma once
|
||||
|
||||
#include "HrtfTypes.h"
|
||||
#include "IHrtfData.h"
|
||||
#include <cstdint>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
struct ElevationData {
|
||||
std::vector<DirectionData> azimuths;
|
||||
};
|
||||
|
||||
struct DistanceData {
|
||||
distance_t distance;
|
||||
std::vector<ElevationData> elevations;
|
||||
};
|
||||
|
||||
class HrtfData : public IHrtfData {
|
||||
void LoadHrtf00(std::istream& stream);
|
||||
void LoadHrtf01(std::istream& stream);
|
||||
void LoadHrtf02(std::istream& stream);
|
||||
void LoadHrtf03(std::istream& stream);
|
||||
|
||||
public:
|
||||
HrtfData(std::istream& stream);
|
||||
|
||||
void get_direction_data(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t channel, DirectionData& ref_data) const override;
|
||||
void get_direction_data(angle_t elevation, angle_t azimuth, distance_t distance, DirectionData& ref_data_left, DirectionData& ref_data_right) const override;
|
||||
void sample_direction(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t sample, uint32_t channel, float& value, float& delay) const override;
|
||||
void sample_direction(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t sample, float& value_left, float& delay_left, float& value_right, float& delay_right) const override;
|
||||
|
||||
uint32_t get_sample_rate() const override {
|
||||
return m_sample_rate;
|
||||
}
|
||||
uint32_t get_response_length() const override {
|
||||
return m_response_length;
|
||||
}
|
||||
uint32_t get_longest_delay() const override {
|
||||
return m_longest_delay;
|
||||
}
|
||||
|
||||
private:
|
||||
uint32_t m_sample_rate;
|
||||
uint32_t m_response_length;
|
||||
uint32_t m_longest_delay;
|
||||
uint32_t m_channel_count;
|
||||
std::vector<DistanceData> m_distances;
|
||||
};
|
14
Audio/ThirdParty/hrtf/HrtfTypes.h
vendored
Normal file
14
Audio/ThirdParty/hrtf/HrtfTypes.h
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
#include <vector>
|
||||
|
||||
typedef float distance_t;
|
||||
typedef float angle_t;
|
||||
typedef int delay_t;
|
||||
|
||||
struct DirectionData {
|
||||
std::vector<float> impulse_response;
|
||||
delay_t delay;
|
||||
delay_t delay_right;
|
||||
};
|
19
Audio/ThirdParty/hrtf/IHrtfData.h
vendored
Normal file
19
Audio/ThirdParty/hrtf/IHrtfData.h
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
#pragma once
|
||||
|
||||
#include "HrtfTypes.h"
|
||||
|
||||
class IHrtfData {
|
||||
public:
|
||||
virtual ~IHrtfData() = default;
|
||||
|
||||
virtual void get_direction_data(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t channel, DirectionData& ref_data) const = 0;
|
||||
virtual void get_direction_data(angle_t elevation, angle_t azimuth, distance_t distance, DirectionData& ref_data_left, DirectionData& ref_data_right) const = 0;
|
||||
// Get only once IR sample at given direction. The delay returned is the delay of IR's beginning, not the sample's!
|
||||
virtual void sample_direction(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t sample, uint32_t channel, float& value, float& delay) const = 0;
|
||||
// Get only once IR sample at given direction for both channels. The delay returned is the delay of IR's beginning, not the sample's!
|
||||
virtual void sample_direction(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t sample, float& value_left, float& delay_left, float& value_right, float& delay_right) const = 0;
|
||||
|
||||
virtual uint32_t get_sample_rate() const = 0;
|
||||
virtual uint32_t get_response_length() const = 0;
|
||||
virtual uint32_t get_longest_delay() const = 0;
|
||||
};
|
|
@ -184,6 +184,7 @@
|
|||
8399D4E21805A55000B503B1 /* XmlContainer.m in Sources */ = {isa = PBXBuildFile; fileRef = 8399D4E01805A55000B503B1 /* XmlContainer.m */; };
|
||||
839B837F286D7F8D00F529EE /* NumberHertzToStringTransformer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 839B837E286D7F8D00F529EE /* NumberHertzToStringTransformer.swift */; };
|
||||
839DA7CF274A2D4C001B18E5 /* NSDictionary+Merge.m in Sources */ = {isa = PBXBuildFile; fileRef = 839DA7CE274A2D4C001B18E5 /* NSDictionary+Merge.m */; };
|
||||
839E56F52879625100DFB5F4 /* SADIE_D02-96000.mhr in Resources */ = {isa = PBXBuildFile; fileRef = 839E56F12879625100DFB5F4 /* SADIE_D02-96000.mhr */; };
|
||||
83A360B220E4E81D00192DAB /* Flac.bundle in CopyFiles */ = {isa = PBXBuildFile; fileRef = 8303A30C20E4E3D000951EF8 /* Flac.bundle */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
|
||||
83A3B734283AE89000CC6593 /* ColorToValueTransformer.m in Sources */ = {isa = PBXBuildFile; fileRef = 83A3B72F283AE6AA00CC6593 /* ColorToValueTransformer.m */; };
|
||||
83AA7D04279EBCA900087AA4 /* libavcodec.59.dylib in CopyFiles */ = {isa = PBXBuildFile; fileRef = 83AA7D00279EBC8200087AA4 /* libavcodec.59.dylib */; settings = {ATTRIBUTES = (CodeSignOnCopy, ); }; };
|
||||
|
@ -1026,6 +1027,7 @@
|
|||
839DA7CB274A2D4C001B18E5 /* NSDictionary+Merge.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "NSDictionary+Merge.h"; sourceTree = "<group>"; };
|
||||
839DA7CE274A2D4C001B18E5 /* NSDictionary+Merge.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "NSDictionary+Merge.m"; sourceTree = "<group>"; };
|
||||
839E3B53286595D700880EA2 /* GeneralPane.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GeneralPane.h; path = Preferences/Preferences/GeneralPane.h; sourceTree = "<group>"; };
|
||||
839E56F12879625100DFB5F4 /* SADIE_D02-96000.mhr */ = {isa = PBXFileReference; lastKnownFileType = file; path = "SADIE_D02-96000.mhr"; sourceTree = "<group>"; };
|
||||
83A3B72F283AE6AA00CC6593 /* ColorToValueTransformer.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; name = ColorToValueTransformer.m; path = Preferences/Preferences/ColorToValueTransformer.m; sourceTree = "<group>"; };
|
||||
83A3B733283AE6AA00CC6593 /* ColorToValueTransformer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = ColorToValueTransformer.h; path = Preferences/Preferences/ColorToValueTransformer.h; sourceTree = "<group>"; };
|
||||
83AA7D00279EBC8200087AA4 /* libavcodec.59.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libavcodec.59.dylib; path = ThirdParty/ffmpeg/lib/libavcodec.59.dylib; sourceTree = "<group>"; };
|
||||
|
@ -1536,6 +1538,7 @@
|
|||
29B97317FDCFA39411CA2CEA /* Resources */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
839E56F12879625100DFB5F4 /* SADIE_D02-96000.mhr */,
|
||||
837DC92F285B3F790005C58A /* DataModel.xcdatamodeld */,
|
||||
8316B3922839FFD5004CC392 /* Scenes.scnassets */,
|
||||
832C1252180BD1E2005507C1 /* Cog.help */,
|
||||
|
@ -2486,6 +2489,7 @@
|
|||
8384916D18083EAB00E7332D /* volume1Template.pdf in Resources */,
|
||||
171B57DD0C091F2B00F6AFAF /* flac.icns in Resources */,
|
||||
171B57DE0C091F2B00F6AFAF /* m4a.icns in Resources */,
|
||||
839E56F52879625100DFB5F4 /* SADIE_D02-96000.mhr in Resources */,
|
||||
8384916C18083EAB00E7332D /* stopTemplate.pdf in Resources */,
|
||||
830C37A127B95E3000E02BB0 /* Equalizer.xib in Resources */,
|
||||
171B57DF0C091F2B00F6AFAF /* mp3.icns in Resources */,
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<document type="com.apple.InterfaceBuilder3.Cocoa.XIB" version="3.0" toolsVersion="20037" targetRuntime="MacOSX.Cocoa" propertyAccessControl="none" useAutolayout="YES">
|
||||
<document type="com.apple.InterfaceBuilder3.Cocoa.XIB" version="3.0" toolsVersion="21179.7" targetRuntime="MacOSX.Cocoa" propertyAccessControl="none" useAutolayout="YES">
|
||||
<dependencies>
|
||||
<deployment identifier="macosx"/>
|
||||
<plugIn identifier="com.apple.InterfaceBuilder.CocoaPlugin" version="20037"/>
|
||||
<plugIn identifier="com.apple.InterfaceBuilder.CocoaPlugin" version="21179.7"/>
|
||||
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
|
||||
</dependencies>
|
||||
<objects>
|
||||
|
@ -207,11 +207,11 @@
|
|||
</connections>
|
||||
</customObject>
|
||||
<customView id="58" userLabel="OutputView">
|
||||
<rect key="frame" x="0.0" y="0.0" width="530" height="108"/>
|
||||
<rect key="frame" x="0.0" y="0.0" width="530" height="130"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
<subviews>
|
||||
<popUpButton verticalHuggingPriority="750" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="60">
|
||||
<rect key="frame" x="144" y="35" width="370" height="26"/>
|
||||
<rect key="frame" x="144" y="42" width="370" height="26"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
<popUpButtonCell key="cell" type="push" title="Item1" bezelStyle="rounded" alignment="left" lineBreakMode="truncatingTail" state="on" borderStyle="borderAndBezel" inset="2" arrowPosition="arrowAtCenter" preferredEdge="maxY" selectedItem="62" id="210">
|
||||
<behavior key="behavior" lightByBackground="YES" lightByGray="YES"/>
|
||||
|
@ -232,7 +232,7 @@
|
|||
</connections>
|
||||
</popUpButton>
|
||||
<textField horizontalHuggingPriority="251" verticalHuggingPriority="750" fixedFrame="YES" allowsCharacterPickerTouchBarItem="YES" translatesAutoresizingMaskIntoConstraints="NO" id="zkP-2E-1Kc">
|
||||
<rect key="frame" x="17" y="12" width="123" height="17"/>
|
||||
<rect key="frame" x="17" y="20" width="123" height="17"/>
|
||||
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMinY="YES"/>
|
||||
<textFieldCell key="cell" scrollable="YES" lineBreakMode="clipping" sendsActionOnEndEditing="YES" alignment="right" title="Volume Level:" id="wK4-EF-8Wa">
|
||||
<font key="font" metaFont="system"/>
|
||||
|
@ -241,7 +241,7 @@
|
|||
</textFieldCell>
|
||||
</textField>
|
||||
<popUpButton verticalHuggingPriority="750" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="2v7-Ef-ekr">
|
||||
<rect key="frame" x="144" y="6" width="370" height="26"/>
|
||||
<rect key="frame" x="144" y="14" width="370" height="26"/>
|
||||
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMinY="YES"/>
|
||||
<popUpButtonCell key="cell" type="push" bezelStyle="rounded" alignment="left" lineBreakMode="truncatingTail" borderStyle="borderAndBezel" imageScaling="proportionallyDown" inset="2" id="vmS-eb-zen">
|
||||
<behavior key="behavior" lightByBackground="YES" lightByGray="YES"/>
|
||||
|
@ -262,7 +262,7 @@
|
|||
</connections>
|
||||
</popUpButton>
|
||||
<textField verticalHuggingPriority="750" horizontalCompressionResistancePriority="250" fixedFrame="YES" allowsCharacterPickerTouchBarItem="YES" translatesAutoresizingMaskIntoConstraints="NO" id="65">
|
||||
<rect key="frame" x="6" y="41" width="134" height="17"/>
|
||||
<rect key="frame" x="6" y="48" width="134" height="17"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
<textFieldCell key="cell" sendsActionOnEndEditing="YES" alignment="right" title="Output Device: " id="211">
|
||||
<font key="font" metaFont="system"/>
|
||||
|
@ -271,7 +271,7 @@
|
|||
</textFieldCell>
|
||||
</textField>
|
||||
<button verticalHuggingPriority="750" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="DhK-tx-xFv">
|
||||
<rect key="frame" x="18" y="71" width="492" height="18"/>
|
||||
<rect key="frame" x="18" y="93" width="492" height="18"/>
|
||||
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMinY="YES"/>
|
||||
<buttonCell key="cell" type="check" title="Limit volume control to 100%" bezelStyle="regularSquare" imagePosition="left" state="on" inset="2" id="ds2-aw-ebU">
|
||||
<behavior key="behavior" changeContents="YES" doesNotDimImage="YES" lightByContents="YES"/>
|
||||
|
@ -281,8 +281,19 @@
|
|||
<binding destination="52" name="value" keyPath="values.volumeLimit" id="7Sl-LJ-ljd"/>
|
||||
</connections>
|
||||
</button>
|
||||
<button verticalHuggingPriority="750" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="rG5-80-FId">
|
||||
<rect key="frame" x="18" y="71" width="492" height="18"/>
|
||||
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMinY="YES"/>
|
||||
<buttonCell key="cell" type="check" title="Enable HRTF filter (Not needed with AirPods or Beats)" bezelStyle="regularSquare" imagePosition="left" state="on" inset="2" id="NGx-0c-WVR">
|
||||
<behavior key="behavior" changeContents="YES" doesNotDimImage="YES" lightByContents="YES"/>
|
||||
<font key="font" metaFont="system"/>
|
||||
</buttonCell>
|
||||
<connections>
|
||||
<binding destination="52" name="value" keyPath="values.enableHrtf" id="BD0-cP-SfB"/>
|
||||
</connections>
|
||||
</button>
|
||||
</subviews>
|
||||
<point key="canvasLocation" x="-151" y="319"/>
|
||||
<point key="canvasLocation" x="-151" y="330"/>
|
||||
</customView>
|
||||
<customObject id="i5B-ga-Atm" userLabel="MIDIPane" customClass="MIDIPane">
|
||||
<connections>
|
||||
|
@ -363,7 +374,7 @@
|
|||
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMinY="YES"/>
|
||||
<clipView key="contentView" id="gUE-Yu-LLA">
|
||||
<rect key="frame" x="1" y="1" width="488" height="113"/>
|
||||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||
<autoresizingMask key="autoresizingMask"/>
|
||||
<subviews>
|
||||
<tableView verticalHuggingPriority="750" allowsExpansionToolTips="YES" columnAutoresizingStyle="lastColumnOnly" autosaveColumns="NO" rowHeight="24" rowSizeStyle="automatic" headerView="9rQ-Rq-K6J" viewBased="YES" id="gHG-xw-OyR">
|
||||
<rect key="frame" x="0.0" y="0.0" width="488" height="85"/>
|
||||
|
@ -385,7 +396,7 @@
|
|||
<tableColumnResizingMask key="resizingMask" resizeWithTable="YES" userResizable="YES"/>
|
||||
<prototypeCellViews>
|
||||
<tableCellView id="EEt-uJ-j6o">
|
||||
<rect key="frame" x="8" y="0.0" width="386" height="24"/>
|
||||
<rect key="frame" x="18" y="0.0" width="386" height="24"/>
|
||||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||
<subviews>
|
||||
<textField horizontalHuggingPriority="251" verticalHuggingPriority="750" horizontalCompressionResistancePriority="250" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="7l9-R2-FVF">
|
||||
|
@ -420,7 +431,7 @@
|
|||
<tableColumnResizingMask key="resizingMask" resizeWithTable="YES" userResizable="YES"/>
|
||||
<prototypeCellViews>
|
||||
<tableCellView id="EDi-gi-Vg9">
|
||||
<rect key="frame" x="411" y="0.0" width="38" height="24"/>
|
||||
<rect key="frame" x="421" y="0.0" width="38" height="24"/>
|
||||
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
|
||||
<subviews>
|
||||
<textField horizontalHuggingPriority="251" verticalHuggingPriority="750" horizontalCompressionResistancePriority="250" fixedFrame="YES" translatesAutoresizingMaskIntoConstraints="NO" id="0TJ-dK-Rfk">
|
||||
|
|
|
@ -230,3 +230,5 @@
|
|||
/* Class = "NSButtonCell"; title = "Use 3D rendered spectrum"; ObjectID = "NMg-TO-amV"; */
|
||||
"NMg-TO-amV.title" = "Use 3D rendered spectrum";
|
||||
|
||||
/* Class = "NSButtonCell"; title = "Enable HRTF filter (Not needed with AirPods or Beats)"; ObjectID = "NGx-0c-WVR"; */
|
||||
"NGx-0c-WVR.title" = "Enable HRTF filter (Not needed with AirPods or Beats)";
|
||||
|
|
|
@ -104,7 +104,7 @@
|
|||
"FQF-vJ-hBx.title" = "Variedad de MIDI:";
|
||||
|
||||
/* Class = "NSButtonCell"; title = "Resume playback on startup"; ObjectID = "fUg-Cg-gXa"; */
|
||||
"fUg-Cg-gXa.title" = "Resumir reproducción al iniciar";
|
||||
"fUg-Cg-gXa.title" = "Resumir reproducción al abrir";
|
||||
|
||||
/* Class = "NSButtonCell"; title = "Colorful dock icons"; ObjectID = "GdX-5e-NeU"; */
|
||||
"GdX-5e-NeU.title" = "Colorear icono del dock";
|
||||
|
@ -244,3 +244,8 @@
|
|||
/* Class = "NSButtonCell"; title = "Use 3D rendered spectrum"; ObjectID = "NMg-TO-amV"; */
|
||||
"NMg-TO-amV.title" = "Usar analizador en tres dimensiones";
|
||||
|
||||
/* Class = "NSButtonCell"; title = "Enable HRTF filter (Not needed with AirPods or Beats)"; ObjectID = "NGx-0c-WVR"; */
|
||||
"NGx-0c-WVR.title" = "Activar filtro HRTF (no es necesario con AirPods o Beats)";
|
||||
|
||||
/* Class = "NSButtonCell"; title = "Automatically check for updates on startup"; ObjectID = "207"; */
|
||||
"207.title" = "Buscar actualizaciones al abrir";
|
||||
|
|
BIN
SADIE_D02-96000.mhr
Normal file
BIN
SADIE_D02-96000.mhr
Normal file
Binary file not shown.
|
@ -86,3 +86,5 @@
|
|||
"dhms" = "%@, %@, %@ and %@";
|
||||
"hms" = "%@, %@ and %@";
|
||||
"ms" = "%@ and %@";
|
||||
|
||||
"PrivacyPolicyURL" = "https://www.iubenda.com/privacy-policy/59859310";
|
||||
|
|
|
@ -193,3 +193,5 @@
|
|||
"dhms" = "%@, %@, %@ y %@";
|
||||
"hms" = "%@, %@ y %@";
|
||||
"ms" = "%@ y %@";
|
||||
|
||||
"PrivacyPolicyURL" = "https://www.iubenda.com/privacy-policy/57237510";
|
||||
|
|
|
@ -784,3 +784,5 @@
|
|||
/* Class = "NSToolbarItem"; paletteLabel = "Show Equalizer"; ObjectID = "ZOn-sB-FR3"; */
|
||||
"ZOn-sB-FR3.paletteLabel" = "Mostrar ecualizador";
|
||||
|
||||
/* Class = "NSMenuItem"; title = "Check for Updates..."; ObjectID = "302"; */
|
||||
"302.title" = "Buscar actualizaciones...";
|
||||
|
|
Loading…
Reference in a new issue