Compare commits

..

1 commit

Author SHA1 Message Date
Christopher Snowhill
024e36963a CQT Experiment: Tossed in a branch for later
This experiment with Constant Q Transform was interesting, and it's nice
to see that it actually functions, but it's just too damn slow to really
be useful for visualization purposes. It uses nearly a full core on an
M1 processor, and I'd hate to see what it does on an Intel machine.
Stashing this in a branch and discarding it from the main tree, in case
somebody finds this code useful some day.

Signed-off-by: Christopher Snowhill <kode54@gmail.com>
2022-05-21 21:50:30 -07:00
3318 changed files with 396826 additions and 215199 deletions

View file

@ -1,7 +1,7 @@
#!/bin/sh #!/bin/sh
MATCHES=$(git grep -n -E "(DevelopmentTeam|DEVELOPMENT_TEAM) =" .) MATCHES=$(grep -nR --include project.pbxproj -E "(DevelopmentTeam|DEVELOPMENT_TEAM) =" .)
COUNT=$(echo -n "$MATCHES\c" | grep -cvE '(Shared.xcconfig|= "")') COUNT=$(echo -n "$MATCHES\c" | grep -cvE '= ""')
if [ $COUNT -ne 0 ]; then if [ $COUNT -ne 0 ]; then
ERRORS=$(echo -n "$MATCHES\c" | grep -vE '= ""') ERRORS=$(echo -n "$MATCHES\c" | grep -vE '= ""')

View file

@ -3,7 +3,7 @@ name: Feedback
about: Report bugs or suggest new features about: Report bugs or suggest new features
title: '' title: ''
labels: labels:
assignees: kode54 assignees: kode54, nevack
--- ---

View file

@ -1,44 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 24.2.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 2809.9 600" style="enable-background:new 0 0 2809.9 600;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FF6336;}
.st1{fill:#FFC501;}
.st2{fill:#A4A14A;}
</style>
<g>
<polygon class="st0" points="393.5,468.8 524.7,468.8 524.7,376.1 355.1,376.1 262.3,468.8 169.6,376.1 0,376.1 0,468.8
131.2,468.8 262.3,600 "/>
<rect y="190.6" class="st1" width="524.7" height="92.8"/>
<rect y="5.1" class="st2" width="524.7" height="92.8"/>
</g>
<path d="M733.3,5.1h83.4v455.2h-83.4V5.1z"/>
<path d="M946.7,447.3c-26.3-14.5-47.2-34.5-62.6-59.7c-15.4-25.3-23.1-53.1-23.1-83.4s7.7-58.2,23.1-83.4
c15.4-25.3,36.2-45.2,62.6-59.7c26.3-14.5,54.9-21.8,85.7-21.8c30.8,0,59.3,7.3,85.7,21.8c26.3,14.5,47.2,34.5,62.6,59.7
c15.4,25.3,23.1,53.1,23.1,83.4s-7.7,58.2-23.1,83.4c-15.4,25.3-36.3,45.2-62.6,59.7c-26.3,14.5-54.9,21.8-85.7,21.8
C1001.5,469.1,973,461.8,946.7,447.3z M1076.2,380.9c13.3-7.4,23.9-17.8,31.9-31.3s12-28.7,12-45.5s-4-32-12-45.5
s-18.6-23.9-31.9-31.3c-13.3-7.4-27.9-11.1-43.9-11.1s-30.7,3.7-43.9,11.1c-13.3,7.4-23.9,17.8-31.9,31.3s-12,28.7-12,45.5
c0,16.9,4,32,12,45.5s18.6,23.9,31.9,31.3c13.3,7.4,27.9,11.1,43.9,11.1C1048.3,391.9,1063,388.3,1076.2,380.9z"/>
<path d="M1247.9,5.1h83.4v271.2L1440,147.9h99.2l-122.6,144.8l131.5,167.5h-106.8l-110-144.8v144.8h-83.4L1247.9,5.1L1247.9,5.1z"/>
<path d="M1626.9,448.8c-23.4-13.5-42.4-32.8-56.9-57.8c-14.5-25.1-21.8-54-21.8-86.9c0-29.9,7-57.5,20.9-82.8s32.9-45.3,56.9-60.1
c24-14.7,50.3-22.1,79-22.1c20.2,0,38.7,3.4,55.3,10.1c16.6,6.7,29.4,15.8,38.3,27.2V148h83.4v312.3h-83.4v-28.4
c-13.1,12.2-27,21.5-41.7,27.8c-14.8,6.3-33.7,9.5-56.9,9.5C1674.6,469.1,1650.2,462.3,1626.9,448.8z M1778.9,366.7
c15.6-16.9,23.4-37.7,23.4-62.6s-7.8-45.7-23.4-62.6c-15.6-16.8-36.2-25.3-61.9-25.3c-25.7,0-46.4,8.4-62,25.3s-23.4,37.7-23.4,62.6
s7.8,45.7,23.4,62.6s36.2,25.3,62,25.3C1742.6,391.9,1763.3,383.5,1778.9,366.7z"/>
<path d="M1942.6,5.1h83.4v455.2h-83.4V5.1z"/>
<path d="M2091.2,89.8C2081,79.7,2076,67.4,2076,53.1c0-14.7,5.1-27.3,15.2-37.6C2101.3,5.2,2113.5,0,2127.8,0
c14.7,0,27.3,5.2,37.6,15.5s15.5,22.9,15.5,37.6c0,14.3-5.2,26.5-15.5,36.7c-10.3,10.1-22.9,15.2-37.6,15.2
C2113.5,104.9,2101.3,99.9,2091.2,89.8z M2086.7,147.9h83.4v312.3h-83.4V147.9z"/>
<path d="M2227.1,438.7l19-78.4h3.8c27.4,21.1,55.4,31.6,84.1,31.6c11.8,0,21.4-2.2,28.8-6.6c7.4-4.4,11.1-10.8,11.1-19.3
c0-8.8-4.3-16-13-21.5c-8.6-5.5-24.8-12.2-48.4-20.2c-24-8-42.7-19.6-55.9-34.8c-13.3-15.2-19.9-33.1-19.9-53.7
c0-29.1,10.8-52.5,32.6-70.2c21.7-17.7,49.2-26.6,82.5-26.6c16.9,0,31.8,1.6,44.9,4.7c13.1,3.2,25.5,8.3,37.3,15.5l3.2,79.6h-4.4
c-15.2-9.7-28.7-17-40.5-21.8s-25.1-7.3-39.8-7.3c-10.5,0-19.2,2.1-25.9,6.3s-10.1,9.7-10.1,16.4c0,8.9,4.2,16.1,12.7,21.8
c8.4,5.7,24.2,12.5,47.4,20.5c26.5,8.9,46.7,19.6,60.4,32.2s20.5,32.2,20.5,58.8c0,21.9-5.5,40.7-16.4,56.3
c-11,15.6-25.4,27.3-43.3,35.1c-17.9,7.8-37.6,11.7-59.1,11.7C2294.9,469.1,2257.8,459,2227.1,438.7z"/>
<path d="M2574.8,446.9c-26.1-14.7-46.7-34.9-61.6-60.4c-15-25.5-22.4-53.8-22.4-85c0-30.3,7.1-57.8,21.2-82.5
c14.1-24.7,33.7-44.1,58.8-58.5c25.1-14.3,53.4-21.5,85-21.5c32,0,59.7,7.5,83.1,22.4c23.4,15,41.1,34.9,53.1,59.7
c12,24.9,18,51.6,18,80.3v24.7h-239.6c5.1,23.6,15.9,41.5,32.6,53.7c16.6,12.2,38.7,18.3,66.1,18.3c41.7,0,78.2-13.7,109.4-41.1h8.9
l-3.2,79c-19,11-39,19.2-60.1,24.7s-41.3,8.2-60.7,8.2C2630.4,469.1,2600.9,461.7,2574.8,446.9z M2726.5,269.3
c-2.1-19-10-33.8-23.7-44.6c-13.7-10.7-30.5-16.1-50.3-16.1c-19.4,0-36.4,5.2-50.9,15.5s-24.3,25.4-29.4,45.2H2726.5z"/>
</svg>

Before

Width:  |  Height:  |  Size: 3.6 KiB

View file

@ -11,21 +11,14 @@ on:
jobs: jobs:
build: build:
name: Build Universal Cog.app name: Build Universal Cog.app
runs-on: macos-15 runs-on: macos-11
env: env:
XCODE_DERIVEDDATA_PATH: build XCODE_DERIVEDDATA_PATH: build
steps: steps:
- name: Switch to Xcode 16
uses: maxim-lobanov/setup-xcode@v1
with:
xcode-version: 16
- name: Check out repository - name: Check out repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
submodules: recursive submodules: recursive
- name: Unpack libraries
run: >
cd ThirdParty && tar xvf libraries.tar.xz
- name: Run xcodebuild - name: Run xcodebuild
run: > run: >
xcodebuild xcodebuild
@ -50,7 +43,7 @@ jobs:
$XCODE_DERIVEDDATA_PATH/Build/Products/Debug/Cog.app $XCODE_DERIVEDDATA_PATH/Build/Products/Debug/Cog.app
$XCODE_DERIVEDDATA_PATH/Cog.zip $XCODE_DERIVEDDATA_PATH/Cog.zip
- name: Upload Artifact - name: Upload Artifact
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
with: with:
name: Cog name: Cog
path: ${{ env.XCODE_DERIVEDDATA_PATH }}/Cog.zip path: ${{ env.XCODE_DERIVEDDATA_PATH }}/Cog.zip

49
.gitignore vendored
View file

@ -1,56 +1,9 @@
.DS_Store .DS_Store
xcuserdata xcuserdata
/build ./build
# Special cog exceptions # Special cog exceptions
!Frameworks/OpenMPT/OpenMPT/build !Frameworks/OpenMPT/OpenMPT/build
# User-specific xcconfig files # User-specific xcconfig files
Xcode-config/DEVELOPMENT_TEAM.xcconfig Xcode-config/DEVELOPMENT_TEAM.xcconfig
Xcode-config/SENTRY_SETTINGS.xcconfig
# Plist derived from template at build time
/Info.plist
# This indicates the libraries are up to date
/ThirdParty/libraries.updated
# Temporary file to indicate libraries are being extracted by one process
/ThirdParty/libraries.extracting
# The project will unpack these before building, if necessary
/ThirdParty/BASS/libbass.dylib
/ThirdParty/BASS/libbass_mpc.dylib
/ThirdParty/BASS/libbassflac.dylib
/ThirdParty/BASS/libbassmidi.dylib
/ThirdParty/BASS/libbassopus.dylib
/ThirdParty/BASS/libbasswv.dylib
/ThirdParty/avif/lib/libaom.a
/ThirdParty/avif/lib/libavif.a
/ThirdParty/fdk-aac/lib/libfdk-aac.2.dylib
/ThirdParty/fdk-aac/lib/libfdk-aac.a
/ThirdParty/fdk-aac/lib/libfdk-aac.dylib
/ThirdParty/fdk-aac/lib/libfdk-aac.la
/ThirdParty/fdk-aac/lib/pkgconfig/fdk-aac.pc
/ThirdParty/ffmpeg/lib/libavcodec.61.dylib
/ThirdParty/ffmpeg/lib/libavformat.61.dylib
/ThirdParty/ffmpeg/lib/libavutil.59.dylib
/ThirdParty/ffmpeg/lib/libswresample.5.dylib
/ThirdParty/flac/lib/libFLAC.12.dylib
/ThirdParty/libid3tag/lib/libid3tag.a
/ThirdParty/libmad/lib/libmad.a
/ThirdParty/libopenmpt/lib/libopenmpt.a
/ThirdParty/libopenmpt_old/lib/libopenmpt.old.a
/ThirdParty/libvgm/lib/libvgm-emu.a
/ThirdParty/libvgm/lib/libvgm-player.a
/ThirdParty/libvgm/lib/libvgm-utils.a
/ThirdParty/mpg123/lib/libmpg123.0.dylib
/ThirdParty/ogg/lib/libogg.0.dylib
/ThirdParty/opus/lib/libopus.0.dylib
/ThirdParty/opusfile/lib/libopusfile.0.dylib
/ThirdParty/rubberband/lib/librubberband.3.dylib
/ThirdParty/speex/libspeex.a
/ThirdParty/vorbis/lib/libvorbisfile.3.dylib
/ThirdParty/vorbis/lib/libvorbis.0.dylib
/ThirdParty/soxr/lib/libsoxr.0.dylib
/ThirdParty/WavPack/lib/libwavpack.a

7
.gitmodules vendored
View file

@ -3,7 +3,7 @@
url = https://github.com/kode54/mgba.git url = https://github.com/kode54/mgba.git
[submodule "Frameworks/AdPlug/AdPlug/adplug"] [submodule "Frameworks/AdPlug/AdPlug/adplug"]
path = Frameworks/AdPlug/AdPlug/adplug path = Frameworks/AdPlug/AdPlug/adplug
url = https://github.com/kode54/adplug.git url = https://github.com/adplug/adplug.git
[submodule "Frameworks/libbinio/libbinio/libbinio"] [submodule "Frameworks/libbinio/libbinio/libbinio"]
path = Frameworks/libbinio/libbinio/libbinio path = Frameworks/libbinio/libbinio/libbinio
url = https://github.com/adplug/libbinio.git url = https://github.com/adplug/libbinio.git
@ -15,7 +15,10 @@
url = https://github.com/Thealexbarney/LibAtrac9.git url = https://github.com/Thealexbarney/LibAtrac9.git
[submodule "Frameworks/shpakovski/MASShortcut"] [submodule "Frameworks/shpakovski/MASShortcut"]
path = Frameworks/shpakovski/MASShortcut path = Frameworks/shpakovski/MASShortcut
url = https://github.com/kode54/MASShortcut.git url = https://github.com/shpakovski/MASShortcut.git
[submodule "Frameworks/libsidplayfp/sidplayfp"] [submodule "Frameworks/libsidplayfp/sidplayfp"]
path = Frameworks/libsidplayfp/sidplayfp path = Frameworks/libsidplayfp/sidplayfp
url = https://github.com/kode54/libsidplayfp.git url = https://github.com/kode54/libsidplayfp.git
[submodule "Audio/ThirdParty/r8brain-free-src"]
path = Audio/ThirdParty/r8brain-free-src
url = https://github.com/kode54/r8brain-free-src

View file

@ -7,7 +7,7 @@
@class PlaylistController; @class PlaylistController;
@class PlaylistView; @class PlaylistView;
@class PlaylistLoader; @class PlaylistLoader;
@class PreferencesController; @class SUUpdater;
@interface AppController : NSObject { @interface AppController : NSObject {
IBOutlet NSObjectController *currentEntryController; IBOutlet NSObjectController *currentEntryController;
@ -37,7 +37,6 @@
IBOutlet NSMenuItem *showArtistColumn; IBOutlet NSMenuItem *showArtistColumn;
IBOutlet NSMenuItem *showAlbumColumn; IBOutlet NSMenuItem *showAlbumColumn;
IBOutlet NSMenuItem *showGenreColumn; IBOutlet NSMenuItem *showGenreColumn;
IBOutlet NSMenuItem *showPlayCountColumn;
IBOutlet NSMenuItem *showLengthColumn; IBOutlet NSMenuItem *showLengthColumn;
IBOutlet NSMenuItem *showTrackColumn; IBOutlet NSMenuItem *showTrackColumn;
IBOutlet NSMenuItem *showYearColumn; IBOutlet NSMenuItem *showYearColumn;
@ -49,7 +48,7 @@
IBOutlet FileTreeViewController *fileTreeViewController; IBOutlet FileTreeViewController *fileTreeViewController;
IBOutlet PreferencesController *preferencesController; IBOutlet SUUpdater *updater;
NSOperationQueue *queue; // Since we are the app delegate, we take care of the op queue NSOperationQueue *queue; // Since we are the app delegate, we take care of the op queue
@ -67,7 +66,10 @@
- (IBAction)delEntries:(id)sender; - (IBAction)delEntries:(id)sender;
- (IBAction)savePlaylist:(id)sender; - (IBAction)savePlaylist:(id)sender;
- (IBAction)privacyPolicy:(id)sender; - (IBAction)openLiberapayPage:(id)sender;
- (IBAction)openPaypalPage:(id)sender;
- (IBAction)openKofiPage:(id)sender;
- (IBAction)openPatreonPage:(id)sender;
- (IBAction)feedback:(id)sender; - (IBAction)feedback:(id)sender;
@ -98,16 +100,6 @@
- (IBAction)toggleMiniMode:(id)sender; - (IBAction)toggleMiniMode:(id)sender;
- (IBAction)toggleToolbarStyle:(id)sender; - (IBAction)toggleToolbarStyle:(id)sender;
- (BOOL)pathSuggesterEmpty;
+ (BOOL)globalPathSuggesterEmpty;
- (void)showPathSuggester;
+ (void)globalShowPathSuggester;
- (void)selectTrack:(id)sender;
- (IBAction)showRubberbandSettings:(id)sender;
+ (void)globalShowRubberbandSettings;
@property NSWindow *mainWindow; @property NSWindow *mainWindow;
@property NSWindow *miniWindow; @property NSWindow *miniWindow;

View file

@ -11,9 +11,6 @@
#import "PlaylistEntry.h" #import "PlaylistEntry.h"
#import "PlaylistLoader.h" #import "PlaylistLoader.h"
#import "PlaylistView.h" #import "PlaylistView.h"
#import "RubberbandEngineTransformer.h"
#import "SQLiteStore.h"
#import "SandboxBroker.h"
#import "SpotlightWindowController.h" #import "SpotlightWindowController.h"
#import "StringToURLTransformer.h" #import "StringToURLTransformer.h"
#import <CogAudio/Status.h> #import <CogAudio/Status.h>
@ -22,26 +19,13 @@
#import "Logging.h" #import "Logging.h"
#import "MiniModeMenuTitleTransformer.h" #import "MiniModeMenuTitleTransformer.h"
#import "ColorToValueTransformer.h"
#import "TotalTimeTransformer.h"
#import "Shortcuts.h" #import "Shortcuts.h"
#import <MASShortcut/Shortcut.h> #import <MASShortcut/Shortcut.h>
#import <MASShortcut/MASDictionaryTransformer.h>
#import "PreferencesController.h" #import <Sparkle/Sparkle.h>
#import "FeedbackController.h"
@import Sentry;
void *kAppControllerContext = &kAppControllerContext; void *kAppControllerContext = &kAppControllerContext;
BOOL kAppControllerShuttingDown = NO;
static AppController *kAppController = nil;
@implementation AppController { @implementation AppController {
BOOL _isFullToolbarStyle; BOOL _isFullToolbarStyle;
} }
@ -63,35 +47,14 @@ static AppController *kAppController = nil;
NSValueTransformer *miniModeMenuTitleTransformer = [[MiniModeMenuTitleTransformer alloc] init]; NSValueTransformer *miniModeMenuTitleTransformer = [[MiniModeMenuTitleTransformer alloc] init];
[NSValueTransformer setValueTransformer:miniModeMenuTitleTransformer [NSValueTransformer setValueTransformer:miniModeMenuTitleTransformer
forName:@"MiniModeMenuTitleTransformer"]; forName:@"MiniModeMenuTitleTransformer"];
NSValueTransformer *colorToValueTransformer = [[ColorToValueTransformer alloc] init];
[NSValueTransformer setValueTransformer:colorToValueTransformer
forName:@"ColorToValueTransformer"];
NSValueTransformer *totalTimeTransformer = [[TotalTimeTransformer alloc] init];
[NSValueTransformer setValueTransformer:totalTimeTransformer
forName:@"TotalTimeTransformer"];
NSValueTransformer *numberHertzToStringTransformer = [[NumberHertzToStringTransformer alloc] init];
[NSValueTransformer setValueTransformer:numberHertzToStringTransformer
forName:@"NumberHertzToStringTransformer"];
NSValueTransformer *rubberbandEngineEnabledTransformer = [[RubberbandEngineEnabledTransformer alloc] init];
[NSValueTransformer setValueTransformer:rubberbandEngineEnabledTransformer
forName:@"RubberbandEngineEnabledTransformer"];
NSValueTransformer *rubberbandEngineHiddenTransformer = [[RubberbandEngineHiddenTransformer alloc] init];
[NSValueTransformer setValueTransformer:rubberbandEngineHiddenTransformer
forName:@"RubberbandEngineHiddenTransformer"];
} }
- (id)init { - (id)init {
self = [super init]; self = [super init];
if(self) { if(self) {
[self initDefaults]; [self initDefaults];
queue = [[NSOperationQueue alloc] init]; queue = [[NSOperationQueue alloc] init];
kAppController = self;
} }
return self; return self;
@ -110,10 +73,8 @@ static AppController *kAppController = nil;
[p beginSheetModalForWindow:mainWindow [p beginSheetModalForWindow:mainWindow
completionHandler:^(NSInteger result) { completionHandler:^(NSInteger result) {
if(result == NSModalResponseOK) { if(result == NSModalResponseOK) {
NSDictionary *loadEntryData = @{@"entries": [p URLs], [self->playlistLoader willInsertURLs:[p URLs] origin:URLOriginExternal];
@"sort": @(YES), [self->playlistLoader didInsertURLs:[self->playlistLoader addURLs:[p URLs] sort:YES] origin:URLOriginExternal];
@"origin": @(URLOriginExternal)};
[self->playlistController performSelectorInBackground:@selector(addURLsInBackground:) withObject:loadEntryData];
} else { } else {
[p close]; [p close];
} }
@ -150,10 +111,8 @@ static AppController *kAppController = nil;
- (void)openURLPanelDidEnd:(OpenURLPanel *)panel returnCode:(int)returnCode contextInfo:(void *)contextInfo { - (void)openURLPanelDidEnd:(OpenURLPanel *)panel returnCode:(int)returnCode contextInfo:(void *)contextInfo {
if(returnCode == NSModalResponseOK) { if(returnCode == NSModalResponseOK) {
NSDictionary *loadEntriesData = @{ @"entries": @[[panel url]], [playlistLoader willInsertURLs:@[[panel url]] origin:URLOriginExternal];
@"sort": @(NO), [playlistLoader didInsertURLs:[playlistLoader addURLs:@[[panel url]] sort:NO] origin:URLOriginExternal];
@"origin": @(URLOriginExternal) };
[playlistController performSelectorInBackground:@selector(addURLsInBackground:) withObject:loadEntriesData];
} }
} }
@ -169,13 +128,11 @@ static AppController *kAppController = nil;
return [key isEqualToString:@"currentEntry"]; return [key isEqualToString:@"currentEntry"];
} }
static BOOL consentLastEnabled = NO;
- (void)awakeFromNib { - (void)awakeFromNib {
[[NSUserDefaults standardUserDefaults] registerDefaults:@{ @"sentryConsented": @(NO), #ifdef DEBUG
@"sentryAskedConsent": @(NO) }]; // Prevent updates automatically in debug builds
[updater setAutomaticallyChecksForUpdates:NO];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.sentryConsented" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kAppControllerContext]; #endif
[[totalTimeField cell] setBackgroundStyle:NSBackgroundStyleRaised]; [[totalTimeField cell] setBackgroundStyle:NSBackgroundStyleRaised];
@ -186,10 +143,6 @@ static BOOL consentLastEnabled = NO;
[randomizeButton setToolTip:NSLocalizedString(@"RandomizeButtonTooltip", @"")]; [randomizeButton setToolTip:NSLocalizedString(@"RandomizeButtonTooltip", @"")];
[fileButton setToolTip:NSLocalizedString(@"FileButtonTooltip", @"")]; [fileButton setToolTip:NSLocalizedString(@"FileButtonTooltip", @"")];
[self registerDefaultHotKeys];
[self migrateHotKeys];
[self registerHotKeys]; [self registerHotKeys];
(void)[spotlightWindowController init]; (void)[spotlightWindowController init];
@ -203,66 +156,22 @@ static BOOL consentLastEnabled = NO;
NSString *oldFilename = @"Default.m3u"; NSString *oldFilename = @"Default.m3u";
NSString *newFilename = @"Default.xml"; NSString *newFilename = @"Default.xml";
BOOL dataStorePresent = [playlistLoader addDataStore];
if(!dataStorePresent) {
if([[NSFileManager defaultManager] fileExistsAtPath:[basePath stringByAppendingPathComponent:dbFilename]]) { if([[NSFileManager defaultManager] fileExistsAtPath:[basePath stringByAppendingPathComponent:dbFilename]]) {
[playlistLoader addDatabase]; [playlistLoader addDatabase];
} else if([[NSFileManager defaultManager] fileExistsAtPath:[basePath stringByAppendingPathComponent:newFilename]]) { } else if([[NSFileManager defaultManager] fileExistsAtPath:[basePath stringByAppendingPathComponent:newFilename]]) {
[playlistLoader addURL:[NSURL fileURLWithPath:[basePath stringByAppendingPathComponent:newFilename]]]; [playlistLoader addURL:[NSURL fileURLWithPath:[basePath stringByAppendingPathComponent:newFilename]]];
} else if([[NSFileManager defaultManager] fileExistsAtPath:[basePath stringByAppendingPathComponent:oldFilename]]){
/* Without the above check, it appears the code was retrieving a nil NSURL from the nonexistent path
* Then adding it to the playlist and crashing further down the line
* Nobody on a new setup should be seeing this open anything, so it should fall through to the
* notice below.
*/
[playlistLoader addURL:[NSURL fileURLWithPath:[basePath stringByAppendingPathComponent:oldFilename]]];
} else { } else {
ALog(@"No playlist found, leaving it empty."); [playlistLoader addURL:[NSURL fileURLWithPath:[basePath stringByAppendingPathComponent:oldFilename]]];
} }
}
SandboxBroker *sandboxBroker = [SandboxBroker sharedSandboxBroker];
if(!sandboxBroker) {
ALog(@"Sandbox broker init failed.");
}
[SandboxBroker cleanupFolderAccess];
[[playlistController undoManager] enableUndoRegistration]; [[playlistController undoManager] enableUndoRegistration];
int lastStatus = (int)[[NSUserDefaults standardUserDefaults] integerForKey:@"lastPlaybackStatus"];
if(lastStatus != CogStatusStopped) {
NSPredicate *hasUrlPredicate = [NSPredicate predicateWithFormat:@"urlString != nil && urlString != %@", @""];
NSPredicate *deletedPredicate = [NSPredicate predicateWithFormat:@"deLeted == NO || deLeted == nil"];
NSPredicate *currentPredicate = [NSPredicate predicateWithFormat:@"current == YES"];
NSCompoundPredicate *predicate = [NSCompoundPredicate andPredicateWithSubpredicates:@[deletedPredicate, hasUrlPredicate, currentPredicate]];
NSFetchRequest *request = [NSFetchRequest fetchRequestWithEntityName:@"PlaylistEntry"];
request.predicate = predicate;
NSError *error = nil;
NSArray *results = [playlistController.persistentContainer.viewContext executeFetchRequest:request error:&error];
if(results && [results count] > 0) {
PlaylistEntry *pe = results[0];
if([[NSUserDefaults standardUserDefaults] boolForKey:@"resumePlaybackOnStartup"]) { if([[NSUserDefaults standardUserDefaults] boolForKey:@"resumePlaybackOnStartup"]) {
[playbackController playEntryAtIndex:pe.index startPaused:(lastStatus == CogStatusPaused) andSeekTo:@(pe.currentPosition)]; int lastStatus = (int)[[NSUserDefaults standardUserDefaults] integerForKey:@"lastPlaybackStatus"];
} else { int lastIndex = (int)[[NSUserDefaults standardUserDefaults] integerForKey:@"lastTrackPlaying"];
pe.current = NO;
pe.stopAfter = NO; if(lastStatus != CogStatusStopped && lastIndex >= 0) {
pe.currentPosition = 0.0; [playbackController playEntryAtIndex:lastIndex startPaused:(lastStatus == CogStatusPaused) andSeekTo:[NSNumber numberWithDouble:[[NSUserDefaults standardUserDefaults] floatForKey:@"lastTrackPosition"]]];
pe.countAdded = NO;
[playlistController commitPersistentStore];
}
// Bug fix
if([results count] > 1) {
for(size_t i = 1; i < [results count]; ++i) {
PlaylistEntry *pe = results[i];
[pe setCurrent:NO];
}
}
} }
} }
@ -286,8 +195,8 @@ static BOOL consentLastEnabled = NO;
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(nodeExpanded:) name:NSOutlineViewItemDidExpandNotification object:outlineView]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(nodeExpanded:) name:NSOutlineViewItemDidExpandNotification object:outlineView];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(nodeCollapsed:) name:NSOutlineViewItemDidCollapseNotification object:outlineView]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(nodeCollapsed:) name:NSOutlineViewItemDidCollapseNotification object:outlineView];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(updateDockMenu:) name:CogPlaybackDidBeginNotificiation object:nil]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(updateDockMenu:) name:CogPlaybackDidBeginNotficiation object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(updateDockMenu:) name:CogPlaybackDidStopNotificiation object:nil]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(updateDockMenu:) name:CogPlaybackDidStopNotficiation object:nil];
[self updateDockMenu:nil]; [self updateDockMenu:nil];
@ -332,51 +241,11 @@ static BOOL consentLastEnabled = NO;
return; return;
} }
if([keyPath isEqualToString:@"values.sentryConsented"]) { if([keyPath isEqualToString:@"playlistController.currentEntry"]) {
BOOL enabled = [[NSUserDefaults standardUserDefaults] boolForKey:@"sentryConsented"];
if(enabled != consentLastEnabled) {
if(enabled) {
[SentrySDK startWithConfigureOptions:^(SentryOptions *options) {
options.dsn = @"https://b5eda1c2390eb965a74dd735413b6392@cog-analytics.losno.co/3";
options.debug = YES; // Enabled debug when first installing is always helpful
// Temporary until there's a better solution
options.enableAppHangTracking = NO;
// Set tracesSampleRate to 1.0 to capture 100% of transactions for performance monitoring.
// We recommend adjusting this value in production.
options.tracesSampleRate = @1.0;
options.profilesSampleRate = @1.0;
// Adds IP for users.
// For more information, visit: https://docs.sentry.io/platforms/apple/data-management/data-collected/
options.sendDefaultPii = YES;
// And now to set up user feedback prompting
options.onCrashedLastRun = ^void(SentryEvent * _Nonnull event) {
// capture user feedback
FeedbackController *fbcon = [[FeedbackController alloc] init];
[fbcon performSelectorOnMainThread:@selector(showWindow:) withObject:nil waitUntilDone:YES];
if([fbcon waitForCompletion]) {
SentryFeedback *feedback = [[SentryFeedback alloc] initWithMessage:[fbcon comments] name:[fbcon name] email:[fbcon email] source:SentryFeedbackSourceCustom associatedEventId:event.eventId attachments:nil];
[SentrySDK captureFeedback:feedback];
}
};
}];
} else {
if([SentrySDK isEnabled]) {
[SentrySDK close];
}
}
consentLastEnabled = enabled;
}
} else if([keyPath isEqualToString:@"playlistController.currentEntry"]) {
PlaylistEntry *entry = playlistController.currentEntry; PlaylistEntry *entry = playlistController.currentEntry;
NSString *appTitle = NSLocalizedString(@"CogTitle", @"");
if(!entry) { if(!entry) {
miniWindow.title = appTitle; miniWindow.title = @"Cog";
mainWindow.title = appTitle; mainWindow.title = @"Cog";
if(@available(macOS 11.0, *)) { if(@available(macOS 11.0, *)) {
miniWindow.subtitle = @""; miniWindow.subtitle = @"";
mainWindow.subtitle = @""; mainWindow.subtitle = @"";
@ -401,7 +270,7 @@ static BOOL consentLastEnabled = NO;
} }
if(@available(macOS 11.0, *)) { if(@available(macOS 11.0, *)) {
NSString *title = appTitle; NSString *title = @"Cog";
if(entry.title) { if(entry.title) {
title = entry.title; title = entry.title;
} }
@ -424,19 +293,13 @@ static BOOL consentLastEnabled = NO;
miniWindow.subtitle = subtitle; miniWindow.subtitle = subtitle;
mainWindow.subtitle = subtitle; mainWindow.subtitle = subtitle;
} else { } else {
NSString *title = appTitle; NSString *title = @"Cog";
if(entry.display) { if(entry.display) {
title = entry.display; title = entry.display;
} }
miniWindow.title = title; miniWindow.title = title;
mainWindow.title = title; mainWindow.title = title;
} }
} else if([keyPath isEqualToString:@"finished"]) {
NSProgress *progress = (NSProgress *)object;
if([progress isFinished]) {
playbackController.progressOverall = nil;
[NSApp terminate:nil];
}
} }
} }
@ -454,23 +317,23 @@ static BOOL consentLastEnabled = NO;
[expandedNodes removeObject:url]; [expandedNodes removeObject:url];
} }
- (NSApplicationTerminateReply)applicationShouldTerminate:(NSApplication *)sender {
if(playbackController.progressOverall) {
[playbackController.progressOverall addObserver:self forKeyPath:@"finished" options:0 context:kAppControllerContext];
return NSTerminateLater;
} else {
return NSTerminateNow;
}
}
- (void)applicationWillTerminate:(NSNotification *)aNotification { - (void)applicationWillTerminate:(NSNotification *)aNotification {
kAppControllerShuttingDown = YES;
CogStatus currentStatus = [playbackController playbackStatus]; CogStatus currentStatus = [playbackController playbackStatus];
NSInteger lastTrackPlaying = -1;
double lastTrackPosition = 0;
if(currentStatus == CogStatusStopping) if(currentStatus == CogStatusStopping)
currentStatus = CogStatusStopped; currentStatus = CogStatusStopped;
if(currentStatus != CogStatusStopped) {
PlaylistEntry *pe = [playlistController currentEntry];
lastTrackPlaying = [pe index];
lastTrackPosition = [pe currentPosition];
}
[[NSUserDefaults standardUserDefaults] setInteger:lastTrackPlaying forKey:@"lastTrackPlaying"];
[[NSUserDefaults standardUserDefaults] setDouble:lastTrackPosition forKey:@"lastTrackPosition"];
[playbackController stop:self]; [playbackController stop:self];
[[NSUserDefaults standardUserDefaults] setInteger:currentStatus forKey:@"lastPlaybackStatus"]; [[NSUserDefaults standardUserDefaults] setInteger:currentStatus forKey:@"lastPlaybackStatus"];
@ -485,46 +348,15 @@ static BOOL consentLastEnabled = NO;
[playlistController clearFilterPredicate:self]; [playlistController clearFilterPredicate:self];
NSMutableDictionary<NSString *, AlbumArtwork *> *artLeftovers = [playlistController.persistentArtStorage mutableCopy]; NSString *fileName = @"Default.xml";
NSManagedObjectContext *moc = playlistController.persistentContainer.viewContext;
for(PlaylistEntry *pe in playlistController.arrangedObjects) {
if(pe.deLeted) {
[moc deleteObject:pe];
continue;
}
if([artLeftovers objectForKey:pe.artHash]) {
[artLeftovers removeObjectForKey:pe.artHash];
}
}
for(NSString *key in artLeftovers) {
[moc deleteObject:[artLeftovers objectForKey:key]];
}
[playlistController commitPersistentStore];
if([SQLiteStore databaseStarted]) {
[[SQLiteStore sharedStore] shutdown];
}
NSError *error; NSError *error;
NSString *fileName = @"Default.sqlite";
[[NSFileManager defaultManager] removeItemAtPath:[folder stringByAppendingPathComponent:fileName] error:&error];
fileName = @"Default.xml";
[[NSFileManager defaultManager] removeItemAtPath:[folder stringByAppendingPathComponent:fileName] error:&error]; [[NSFileManager defaultManager] removeItemAtPath:[folder stringByAppendingPathComponent:fileName] error:&error];
fileName = @"Default.m3u"; fileName = @"Default.m3u";
[[NSFileManager defaultManager] removeItemAtPath:[folder stringByAppendingPathComponent:fileName] error:&error]; [[NSFileManager defaultManager] removeItemAtPath:[folder stringByAppendingPathComponent:fileName] error:&error];
DLog(@"Shutting down sandbox broker");
[[SandboxBroker sharedSandboxBroker] shutdown];
DLog(@"Saving expanded nodes: %@", [expandedNodes description]); DLog(@"Saving expanded nodes: %@", [expandedNodes description]);
[[NSUserDefaults standardUserDefaults] setValue:[expandedNodes allObjects] forKey:@"fileTreeViewExpandedNodes"]; [[NSUserDefaults standardUserDefaults] setValue:[expandedNodes allObjects] forKey:@"fileTreeViewExpandedNodes"];
@ -546,10 +378,8 @@ static BOOL consentLastEnabled = NO;
- (BOOL)application:(NSApplication *)theApplication openFile:(NSString *)filename { - (BOOL)application:(NSApplication *)theApplication openFile:(NSString *)filename {
NSArray *urls = @[[NSURL fileURLWithPath:filename]]; NSArray *urls = @[[NSURL fileURLWithPath:filename]];
NSDictionary *loadEntriesData = @{ @"entries": urls, [playlistLoader willInsertURLs:urls origin:URLOriginExternal];
@"sort": @(NO), [playlistLoader didInsertURLs:[playlistLoader addURLs:urls sort:NO] origin:URLOriginExternal];
@"origin": @(URLOriginExternal) };
[playlistController performSelectorInBackground:@selector(addURLsInBackground:) withObject:loadEntriesData];
return YES; return YES;
} }
@ -558,50 +388,27 @@ static BOOL consentLastEnabled = NO;
NSMutableArray *urls = [NSMutableArray array]; NSMutableArray *urls = [NSMutableArray array];
for(NSString *filename in filenames) { for(NSString *filename in filenames) {
NSURL *url = nil; [urls addObject:[NSURL fileURLWithPath:filename]];
if([[NSFileManager defaultManager] fileExistsAtPath:filename]) {
url = [NSURL fileURLWithPath:filename];
} else {
if([filename hasPrefix:@"/http/::"] ||
[filename hasPrefix:@"/https/::"]) {
// Stupid Carbon bodge for AppleScript
NSString *method = nil;
NSString *server = nil;
NSString *path = nil;
NSScanner *objScanner = [NSScanner scannerWithString:filename];
if(![objScanner scanString:@"/" intoString:nil] ||
![objScanner scanUpToString:@"/" intoString:&method] ||
![objScanner scanString:@"/::" intoString:nil] ||
![objScanner scanUpToString:@":" intoString:&server] ||
![objScanner scanString:@":" intoString:nil]) {
continue;
} }
[objScanner scanUpToCharactersFromSet:[NSCharacterSet illegalCharacterSet] intoString:&path]; [playlistLoader willInsertURLs:urls origin:URLOriginExternal];
// Colons in server were converted to shashes, convert back [playlistLoader didInsertURLs:[playlistLoader addURLs:urls sort:YES] origin:URLOriginExternal];
NSString *convertedServer = [server stringByReplacingOccurrencesOfString:@"/" withString:@":"];
// Slashes in path were converted to colons, convert back
NSString *convertedPath = [path stringByReplacingOccurrencesOfString:@":" withString:@"/"];
url = [NSURL URLWithString:[NSString stringWithFormat:@"%@://%@/%@", method, convertedServer, convertedPath]];
}
}
if(url) {
[urls addObject:url];
}
}
NSDictionary *loadEntriesData = @{ @"entries": urls,
@"sort": @(YES),
@"origin": @(URLOriginExternal) };
[playlistController performSelectorInBackground:@selector(addURLsInBackground:) withObject:loadEntriesData];
[theApplication replyToOpenOrPrint:NSApplicationDelegateReplySuccess]; [theApplication replyToOpenOrPrint:NSApplicationDelegateReplySuccess];
} }
- (IBAction)privacyPolicy:(id)sender { - (IBAction)openLiberapayPage:(id)sender {
[[NSWorkspace sharedWorkspace] openURL:[NSURL URLWithString:NSLocalizedString(@"PrivacyPolicyURL", @"Privacy policy URL from Iubenda.")]]; [[NSWorkspace sharedWorkspace] openURL:[NSURL URLWithString:@"https://liberapay.com/kode54"]];
}
- (IBAction)openPaypalPage:(id)sender {
[[NSWorkspace sharedWorkspace] openURL:[NSURL URLWithString:@"https://www.paypal.com/paypalme/kode54"]];
}
- (IBAction)openKofiPage:(id)sender {
[[NSWorkspace sharedWorkspace] openURL:[NSURL URLWithString:@"https://ko-fi.com/kode54"]];
}
- (IBAction)openPatreonPage:(id)sender {
[[NSWorkspace sharedWorkspace] openURL:[NSURL URLWithString:@"https://www.patreon.com/kode54"]];
} }
- (IBAction)feedback:(id)sender { - (IBAction)feedback:(id)sender {
@ -627,49 +434,31 @@ static BOOL consentLastEnabled = NO;
NSMutableDictionary *userDefaultsValuesDict = [NSMutableDictionary dictionary]; NSMutableDictionary *userDefaultsValuesDict = [NSMutableDictionary dictionary];
// Font defaults // Font defaults
float fFontSize = [NSFont systemFontSizeForControlSize:NSControlSizeRegular]; float fFontSize = [NSFont systemFontSizeForControlSize:NSControlSizeSmall];
NSNumber *fontSize = @(fFontSize); NSNumber *fontSize = [NSNumber numberWithFloat:fFontSize];
[userDefaultsValuesDict setObject:fontSize forKey:@"fontSize"]; [userDefaultsValuesDict setObject:fontSize forKey:@"fontSize"];
NSString *feedURLdefault = @"https://cogcdn.cog.losno.co/mercury.xml"; NSString *feedURLdefault = @"https://cogcdn.cog.losno.co/mercury.xml";
[userDefaultsValuesDict setObject:feedURLdefault forKey:@"SUFeedURL"]; [userDefaultsValuesDict setObject:feedURLdefault forKey:@"SUFeedURL"];
[userDefaultsValuesDict setObject:@"enqueueAndPlay" forKey:@"openingFilesBehavior"]; [userDefaultsValuesDict setObject:@"clearAndPlay" forKey:@"openingFilesBehavior"];
[userDefaultsValuesDict setObject:@"enqueue" forKey:@"openingFilesAlteredBehavior"]; [userDefaultsValuesDict setObject:@"enqueue" forKey:@"openingFilesAlteredBehavior"];
[userDefaultsValuesDict setObject:@"albumGainWithPeak" forKey:@"volumeScaling"]; [userDefaultsValuesDict setObject:@"albumGainWithPeak" forKey:@"volumeScaling"];
[userDefaultsValuesDict setObject:@"cubic" forKey:@"resampling"]; [userDefaultsValuesDict setObject:@"cubic" forKey:@"resampling"];
[userDefaultsValuesDict setObject:@(CogStatusStopped) forKey:@"lastPlaybackStatus"]; [userDefaultsValuesDict setObject:[NSNumber numberWithInteger:CogStatusStopped] forKey:@"lastPlaybackStatus"];
[userDefaultsValuesDict setObject:[NSNumber numberWithInteger:-1] forKey:@"lastTrackPlaying"];
[userDefaultsValuesDict setObject:[NSNumber numberWithDouble:0] forKey:@"lastTrackPosition"];
[userDefaultsValuesDict setObject:@"BASSMIDI" forKey:@"midiPlugin"]; [userDefaultsValuesDict setObject:@"dls appl" forKey:@"midiPlugin"];
[userDefaultsValuesDict setObject:@"default" forKey:@"midi.flavor"]; [userDefaultsValuesDict setObject:@"default" forKey:@"midi.flavor"];
[userDefaultsValuesDict setObject:@(NO) forKey:@"resumePlaybackOnStartup"]; [userDefaultsValuesDict setObject:[NSNumber numberWithBool:NO] forKey:@"resumePlaybackOnStartup"];
[userDefaultsValuesDict setObject:@(NO) forKey:@"quitOnNaturalStop"]; [userDefaultsValuesDict setObject:[NSNumber numberWithBool:NO] forKey:@"quitOnNaturalStop"];
[userDefaultsValuesDict setObject:@(NO) forKey:@"spectrumFreqMode"];
[userDefaultsValuesDict setObject:@(YES) forKey:@"spectrumProjectionMode"];
NSValueTransformer *colorToValueTransformer = [NSValueTransformer valueTransformerForName:@"ColorToValueTransformer"];
NSData *barColor = [colorToValueTransformer reverseTransformedValue:[NSColor colorWithSRGBRed:1.0 green:0.5 blue:0 alpha:1.0]];
NSData *dotColor = [colorToValueTransformer reverseTransformedValue:[NSColor systemRedColor]];
[userDefaultsValuesDict setObject:@(YES) forKey:@"spectrumSceneKit"];
[userDefaultsValuesDict setObject:barColor forKey:@"spectrumBarColor"];
[userDefaultsValuesDict setObject:dotColor forKey:@"spectrumDotColor"];
[userDefaultsValuesDict setObject:@(150.0) forKey:@"synthDefaultSeconds"];
[userDefaultsValuesDict setObject:@(8.0) forKey:@"synthDefaultFadeSeconds"];
[userDefaultsValuesDict setObject:@(2) forKey:@"synthDefaultLoopCount"];
[userDefaultsValuesDict setObject:@(44100) forKey:@"synthSampleRate"];
[userDefaultsValuesDict setObject:@NO forKey:@"alwaysStopAfterCurrent"];
[userDefaultsValuesDict setObject:@NO forKey:@"selectionFollowsPlayback"];
// Register and sync defaults // Register and sync defaults
[[NSUserDefaults standardUserDefaults] registerDefaults:userDefaultsValuesDict]; [[NSUserDefaults standardUserDefaults] registerDefaults:userDefaultsValuesDict];
@ -699,100 +488,9 @@ static BOOL consentLastEnabled = NO;
if([[[NSUserDefaults standardUserDefaults] stringForKey:@"midiPlugin"] isEqualToString:@"FluidSynth"]) { if([[[NSUserDefaults standardUserDefaults] stringForKey:@"midiPlugin"] isEqualToString:@"FluidSynth"]) {
[[NSUserDefaults standardUserDefaults] setValue:@"BASSMIDI" forKey:@"midiPlugin"]; [[NSUserDefaults standardUserDefaults] setValue:@"BASSMIDI" forKey:@"midiPlugin"];
} }
NSString *midiPlugin = [[NSUserDefaults standardUserDefaults] stringForKey:@"midiPlugin"];
if([midiPlugin length] == 8 && [[midiPlugin substringFromIndex:4] isEqualToString:@"appl"]) {
[[NSUserDefaults standardUserDefaults] setObject:@"BASSMIDI" forKey:@"midiPlugin"];
}
}
MASShortcut *shortcutWithMigration(NSString *oldKeyCodePrefName,
NSString *oldKeyModifierPrefName,
NSString *newShortcutPrefName,
NSInteger newDefaultKeyCode) {
NSEventModifierFlags defaultModifiers = NSEventModifierFlagControl | NSEventModifierFlagCommand;
NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults];
if([defaults objectForKey:oldKeyCodePrefName]) {
NSInteger oldKeyCode = [defaults integerForKey:oldKeyCodePrefName];
NSEventModifierFlags oldKeyModifiers = [defaults integerForKey:oldKeyModifierPrefName];
// Should we consider temporarily save these values for further migration?
[defaults removeObjectForKey:oldKeyCodePrefName];
[defaults removeObjectForKey:oldKeyModifierPrefName];
return [MASShortcut shortcutWithKeyCode:oldKeyCode modifierFlags:oldKeyModifiers];
} else {
return [MASShortcut shortcutWithKeyCode:newDefaultKeyCode modifierFlags:defaultModifiers];
}
}
static NSDictionary *shortcutDefaults = nil;
- (void)registerDefaultHotKeys {
MASShortcut *playShortcut = shortcutWithMigration(@"hotKeyPlayKeyCode",
@"hotKeyPlayModifiers",
CogPlayShortcutKey,
kVK_ANSI_P);
MASShortcut *nextShortcut = shortcutWithMigration(@"hotKeyNextKeyCode",
@"hotKeyNextModifiers",
CogNextShortcutKey,
kVK_ANSI_N);
MASShortcut *prevShortcut = shortcutWithMigration(@"hotKeyPreviousKeyCode",
@"hotKeyPreviousModifiers",
CogPrevShortcutKey,
kVK_ANSI_R);
MASShortcut *spamShortcut = [MASShortcut shortcutWithKeyCode:kVK_ANSI_C
modifierFlags:NSEventModifierFlagControl | NSEventModifierFlagCommand];
MASShortcut *fadeShortcut = [MASShortcut shortcutWithKeyCode:kVK_ANSI_O
modifierFlags:NSEventModifierFlagControl | NSEventModifierFlagCommand];
MASShortcut *seekBkwdShortcut = [MASShortcut shortcutWithKeyCode:kVK_LeftArrow
modifierFlags:NSEventModifierFlagControl | NSEventModifierFlagCommand];
MASShortcut *seekFwdShortcut = [MASShortcut shortcutWithKeyCode:kVK_RightArrow
modifierFlags:NSEventModifierFlagControl | NSEventModifierFlagCommand];
MASDictionaryTransformer *transformer = [MASDictionaryTransformer new];
NSDictionary *playShortcutDict = [transformer reverseTransformedValue:playShortcut];
NSDictionary *nextShortcutDict = [transformer reverseTransformedValue:nextShortcut];
NSDictionary *prevShortcutDict = [transformer reverseTransformedValue:prevShortcut];
NSDictionary *spamShortcutDict = [transformer reverseTransformedValue:spamShortcut];
NSDictionary *fadeShortcutDict = [transformer reverseTransformedValue:fadeShortcut];
NSDictionary *seekBkwdShortcutDict = [transformer reverseTransformedValue:seekBkwdShortcut];
NSDictionary *seekFwdShortcutDict = [transformer reverseTransformedValue:seekFwdShortcut];
// Register default values to be used for the first app start
NSDictionary<NSString *, NSDictionary *> *defaultShortcuts = @{
CogPlayShortcutKey: playShortcutDict,
CogNextShortcutKey: nextShortcutDict,
CogPrevShortcutKey: prevShortcutDict,
CogSpamShortcutKey: spamShortcutDict,
CogFadeShortcutKey: fadeShortcutDict,
CogSeekBackwardShortcutKey: seekBkwdShortcutDict,
CogSeekForwardShortcutKey: seekFwdShortcutDict
};
shortcutDefaults = defaultShortcuts;
[[NSUserDefaults standardUserDefaults] registerDefaults:defaultShortcuts];
}
- (IBAction)resetHotkeys:(id)sender {
[shortcutDefaults enumerateKeysAndObjectsUsingBlock:^(id _Nonnull key, id _Nonnull obj, BOOL * _Nonnull stop) {
[[NSUserDefaults standardUserDefaults] setObject:obj forKey:key];
}];
}
- (void)migrateHotKeys {
NSArray *inKeys = @[CogPlayShortcutKeyV1, CogNextShortcutKeyV1, CogPrevShortcutKeyV1, CogSpamShortcutKeyV1, CogFadeShortcutKeyV1, CogSeekBackwardShortcutKeyV1, CogSeekForwardShortcutKeyV1];
NSArray *outKeys = @[CogPlayShortcutKey, CogNextShortcutKey, CogPrevShortcutKey, CogSpamShortcutKey, CogFadeShortcutKey, CogSeekBackwardShortcutKey, CogSeekForwardShortcutKey];
for(size_t i = 0, j = [inKeys count]; i < j; ++i) {
NSString *inKey = inKeys[i];
NSString *outKey = outKeys[i];
id value = [[NSUserDefaults standardUserDefaults] objectForKey:inKey];
if(value && value != [NSNull null]) {
[[NSUserDefaults standardUserDefaults] setObject:value forKey:outKey];
[[NSUserDefaults standardUserDefaults] removeObjectForKey:inKey];
}
}
} }
/* Unassign previous handler first, so dealloc can unregister it from the global map before the new instances are assigned */
- (void)registerHotKeys { - (void)registerHotKeys {
MASShortcutBinder *binder = [MASShortcutBinder sharedBinder]; MASShortcutBinder *binder = [MASShortcutBinder sharedBinder];
[binder bindShortcutWithDefaultsKey:CogPlayShortcutKey [binder bindShortcutWithDefaultsKey:CogPlayShortcutKey
@ -814,21 +512,6 @@ static NSDictionary *shortcutDefaults = nil;
toAction:^{ toAction:^{
[self clickSpam]; [self clickSpam];
}]; }];
[binder bindShortcutWithDefaultsKey:CogFadeShortcutKey
toAction:^{
[self clickFade];
}];
[binder bindShortcutWithDefaultsKey:CogSeekBackwardShortcutKey
toAction:^{
[self clickSeekBack];
}];
[binder bindShortcutWithDefaultsKey:CogSeekForwardShortcutKey
toAction:^{
[self clickSeekForward];
}];
} }
- (void)clickPlay { - (void)clickPlay {
@ -855,26 +538,14 @@ static NSDictionary *shortcutDefaults = nil;
[playbackController spam:nil]; [playbackController spam:nil];
} }
- (void)clickFade {
[playbackController fade:nil];
}
- (void)clickSeek:(NSTimeInterval)position { - (void)clickSeek:(NSTimeInterval)position {
[playbackController seek:self toTime:position]; [playbackController seek:self toTime:position];
} }
- (void)clickSeekBack {
[playbackController seekBackward:10.0];
}
- (void)clickSeekForward {
[playbackController seekForward:10.0];
}
- (void)changeFontSize:(float)size { - (void)changeFontSize:(float)size {
NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults]; NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults];
float fCurrentSize = [defaults floatForKey:@"fontSize"]; float fCurrentSize = [defaults floatForKey:@"fontSize"];
NSNumber *newSize = @(fCurrentSize + size); NSNumber *newSize = [NSNumber numberWithFloat:(fCurrentSize + size)];
[defaults setObject:newSize forKey:@"fontSize"]; [defaults setObject:newSize forKey:@"fontSize"];
} }
@ -940,7 +611,7 @@ static NSDictionary *shortcutDefaults = nil;
BOOL hideItem = NO; BOOL hideItem = NO;
if([[notification name] isEqualToString:CogPlaybackDidStopNotificiation] || !pe || ![pe artist] || [[pe artist] isEqualToString:@""]) if([[notification name] isEqualToString:CogPlaybackDidStopNotficiation] || !pe || ![pe artist] || [[pe artist] isEqualToString:@""])
hideItem = YES; hideItem = YES;
if(hideItem && [dockMenu indexOfItem:currentArtistItem] == 0) { if(hideItem && [dockMenu indexOfItem:currentArtistItem] == 0) {
@ -950,37 +621,4 @@ static NSDictionary *shortcutDefaults = nil;
} }
} }
- (BOOL)pathSuggesterEmpty {
return [playlistController pathSuggesterEmpty];
}
+ (BOOL)globalPathSuggesterEmpty {
return [kAppController pathSuggesterEmpty];
}
- (void)showPathSuggester {
[preferencesController showPathSuggester:self];
}
+ (void)globalShowPathSuggester {
[kAppController showPathSuggester];
}
- (void)showRubberbandSettings:(id)sender {
[preferencesController showRubberbandSettings:sender];
}
+ (void)globalShowRubberbandSettings {
[kAppController showRubberbandSettings:kAppController];
}
- (void)selectTrack:(id)sender {
PlaylistEntry *pe = (PlaylistEntry *)sender;
@try {
[playlistView selectRowIndexes:[NSIndexSet indexSetWithIndex:pe.index] byExtendingSelection:NO];
}
@catch(NSException *e) {
}
}
@end @end

View file

@ -13,13 +13,6 @@
@interface DockIconController : NSObject { @interface DockIconController : NSObject {
NSImage *dockImage; NSImage *dockImage;
NSInteger lastDockCustom;
NSInteger lastDockCustomPlaque;
NSInteger dockCustomLoaded;
NSImage *dockCustomStop;
NSImage *dockCustomPlay;
NSImage *dockCustomPause;
IBOutlet PlaybackController *playbackController; IBOutlet PlaybackController *playbackController;
NSInteger lastPlaybackStatus; NSInteger lastPlaybackStatus;

View file

@ -14,32 +14,16 @@
static NSString *DockIconPlaybackStatusObservationContext = @"DockIconPlaybackStatusObservationContext"; static NSString *DockIconPlaybackStatusObservationContext = @"DockIconPlaybackStatusObservationContext";
static NSString *CogCustomDockIconsReloadNotification = @"CogCustomDockIconsReloadNotification";
- (void)startObserving { - (void)startObserving {
[playbackController addObserver:self forKeyPath:@"playbackStatus" options:(NSKeyValueObservingOptionNew | NSKeyValueObservingOptionInitial) context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)]; [playbackController addObserver:self forKeyPath:@"playbackStatus" options:(NSKeyValueObservingOptionNew | NSKeyValueObservingOptionInitial) context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[playbackController addObserver:self forKeyPath:@"progressOverall" options:(NSKeyValueObservingOptionNew | NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionOld) context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)]; [playbackController addObserver:self forKeyPath:@"progressBarStatus" options:(NSKeyValueObservingOptionNew | NSKeyValueObservingOptionInitial) context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.colorfulDockIcons" options:0 context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)]; [[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.colorfulDockIcons" options:0 context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.customDockIcons" options:0 context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.customDockIconsPlaque" options:0 context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(refreshDockIcons:) name:CogCustomDockIconsReloadNotification object:nil];
} }
- (void)stopObserving { - (void)stopObserving {
[playbackController removeObserver:self forKeyPath:@"playbackStatus" context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)]; [playbackController removeObserver:self forKeyPath:@"playbackStatus"];
[playbackController removeObserver:self forKeyPath:@"progressOverall" context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)]; [playbackController removeObserver:self forKeyPath:@"progressBarStatus"];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.colorfulDockIcons" context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)]; [[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.colorfulDockIcons"];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.customDockIcons" context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.customDockIconsPlaque" context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSNotificationCenter defaultCenter] removeObserver:self name:CogCustomDockIconsReloadNotification object:nil];
}
- (void)startObservingProgress:(NSProgress *)progress {
[progress addObserver:self forKeyPath:@"fractionCompleted" options:0 context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
}
- (void)stopObservingProgress:(NSProgress *)progress {
[progress removeObserver:self forKeyPath:@"fractionCompleted" context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
} }
static NSString *getBadgeName(NSString *baseName, BOOL colorfulIcons) { static NSString *getBadgeName(NSString *baseName, BOOL colorfulIcons) {
@ -50,66 +34,11 @@ static NSString *getBadgeName(NSString *baseName, BOOL colorfulIcons) {
} }
} }
static NSString *getCustomIconName(NSString *baseName) {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSApplicationSupportDirectory, NSUserDomainMask, YES);
NSString *basePath = [[paths firstObject] stringByAppendingPathComponent:@"Cog"];
basePath = [basePath stringByAppendingPathComponent:@"Icons"];
basePath = [basePath stringByAppendingPathComponent:baseName];
return [basePath stringByAppendingPathExtension:@"png"];
}
- (BOOL)loadCustomDockIcons {
NSError *error = nil;
NSData *dataStopIcon = [NSData dataWithContentsOfFile:getCustomIconName(@"Stop") options:(NSDataReadingMappedIfSafe) error:&error];
if(!dataStopIcon || error) {
return NO;
}
NSData *dataPlayIcon = [NSData dataWithContentsOfFile:getCustomIconName(@"Play") options:(NSDataReadingMappedIfSafe) error:&error];
if(!dataPlayIcon || error) {
return NO;
}
NSData *dataPauseIcon = [NSData dataWithContentsOfFile:getCustomIconName(@"Pause") options:(NSDataReadingMappedIfSafe) error:&error];
if(!dataPauseIcon || error) {
return NO;
}
dockCustomStop = [[NSImage alloc] initWithData:dataStopIcon];
dockCustomPlay = [[NSImage alloc] initWithData:dataPlayIcon];
dockCustomPause = [[NSImage alloc] initWithData:dataPauseIcon];
return (dockCustomStop && dockCustomPlay && dockCustomPause);
}
- (void)refreshDockIcon:(NSInteger)playbackStatus withProgress:(double)progressStatus { - (void)refreshDockIcon:(NSInteger)playbackStatus withProgress:(double)progressStatus {
// Really weird crash user experienced because the plaque image didn't load?
if(!dockImage || dockImage.size.width == 0 || dockImage.size.height == 0) return;
BOOL displayChanged = NO; BOOL displayChanged = NO;
BOOL drawIcon = NO; BOOL drawIcon = NO;
BOOL removeProgress = NO; BOOL removeProgress = NO;
BOOL useCustomDockIcons = [[NSUserDefaults standardUserDefaults] boolForKey:@"customDockIcons"];
BOOL useCustomDockIconsPlaque = [[NSUserDefaults standardUserDefaults] boolForKey:@"customDockIconsPlaque"];
if(useCustomDockIcons && !dockCustomLoaded) {
dockCustomLoaded = [self loadCustomDockIcons];
if(!dockCustomLoaded) {
useCustomDockIcons = NO;
}
}
if(useCustomDockIcons != lastDockCustom ||
useCustomDockIconsPlaque != lastDockCustomPlaque) {
lastDockCustom = useCustomDockIcons;
lastDockCustomPlaque = useCustomDockIconsPlaque;
drawIcon = YES;
if(!useCustomDockIcons) {
dockCustomLoaded = NO;
dockCustomStop = nil;
dockCustomPlay = nil;
dockCustomPause = nil;
}
}
if(playbackStatus < 0) if(playbackStatus < 0)
playbackStatus = lastPlaybackStatus; playbackStatus = lastPlaybackStatus;
else { else {
@ -122,7 +51,7 @@ static NSString *getCustomIconName(NSString *baseName) {
else { else {
if(progressStatus < 0 && [lastProgressStatus doubleValue] >= 0) if(progressStatus < 0 && [lastProgressStatus doubleValue] >= 0)
removeProgress = YES; removeProgress = YES;
lastProgressStatus = @(progressStatus); lastProgressStatus = [NSNumber numberWithDouble:progressStatus];
} }
BOOL displayProgress = (progressStatus >= 0.0); BOOL displayProgress = (progressStatus >= 0.0);
@ -142,20 +71,20 @@ static NSString *getCustomIconName(NSString *baseName) {
if(drawIcon) { if(drawIcon) {
switch(playbackStatus) { switch(playbackStatus) {
case CogStatusPlaying: case CogStatusPlaying:
badgeImage = useCustomDockIcons ? dockCustomPlay : [NSImage imageNamed:getBadgeName(@"Play", colorfulIcons)]; badgeImage = [NSImage imageNamed:getBadgeName(@"Play", colorfulIcons)];
break; break;
case CogStatusPaused: case CogStatusPaused:
badgeImage = useCustomDockIcons ? dockCustomPause : [NSImage imageNamed:getBadgeName(@"Pause", colorfulIcons)]; badgeImage = [NSImage imageNamed:getBadgeName(@"Pause", colorfulIcons)];
break; break;
default: default:
badgeImage = useCustomDockIcons ? dockCustomStop : [NSImage imageNamed:getBadgeName(@"Stop", colorfulIcons)]; badgeImage = [NSImage imageNamed:getBadgeName(@"Stop", colorfulIcons)];
break; break;
} }
NSSize badgeSize = [badgeImage size]; NSSize badgeSize = [badgeImage size];
NSImage *newDockImage = (useCustomDockIcons && !useCustomDockIconsPlaque) ? [[NSImage alloc] initWithSize:NSMakeSize(1024, 1024)] : [dockImage copy]; NSImage *newDockImage = [dockImage copy];
[newDockImage lockFocus]; [newDockImage lockFocus];
[badgeImage drawInRect:NSMakeRect(0, 0, 1024, 1024) [badgeImage drawInRect:NSMakeRect(0, 0, 1024, 1024)
@ -170,7 +99,7 @@ static NSString *getCustomIconName(NSString *baseName) {
[dockTile setContentView:imageView]; [dockTile setContentView:imageView];
progressIndicator = [[NSProgressIndicator alloc] initWithFrame:NSMakeRect(0.0, 0.0, dockTile.size.width, 10.0)]; progressIndicator = [[NSProgressIndicator alloc] initWithFrame:NSMakeRect(0.0, 0.0, dockTile.size.width, 10.0)];
[progressIndicator setStyle:NSProgressIndicatorStyleBar]; [progressIndicator setStyle:NSProgressIndicatorBarStyle];
[progressIndicator setIndeterminate:NO]; [progressIndicator setIndeterminate:NO];
[progressIndicator setBezeled:YES]; [progressIndicator setBezeled:YES];
[progressIndicator setMinValue:0]; [progressIndicator setMinValue:0];
@ -222,53 +151,22 @@ static NSString *getCustomIconName(NSString *baseName) {
NSInteger playbackStatus = [[change objectForKey:NSKeyValueChangeNewKey] integerValue]; NSInteger playbackStatus = [[change objectForKey:NSKeyValueChangeNewKey] integerValue];
[self refreshDockIcon:playbackStatus withProgress:-10]; [self refreshDockIcon:playbackStatus withProgress:-10];
} else if([keyPath isEqualToString:@"progressOverall"]) { } else if([keyPath isEqualToString:@"progressBarStatus"]) {
double progressStatus = [lastProgressStatus doubleValue]; double progressStatus = [[change objectForKey:NSKeyValueChangeNewKey] doubleValue];
id objNew = [change objectForKey:NSKeyValueChangeNewKey];
id objOld = [change objectForKey:NSKeyValueChangeOldKey];
NSProgress *progressNew = nil, *progressOld = nil;
if(objNew && [objNew isKindOfClass:[NSProgress class]])
progressNew = (NSProgress *)objNew;
if(objOld && [objOld isKindOfClass:[NSProgress class]])
progressOld = (NSProgress *)objOld;
if(progressOld) {
[self stopObservingProgress:progressOld];
progressStatus = -1;
}
if(progressNew) {
[self startObservingProgress:progressNew];
progressStatus = progressNew.fractionCompleted * 100.0;
}
[self refreshDockIcon:-1 withProgress:progressStatus]; [self refreshDockIcon:-1 withProgress:progressStatus];
} else if([keyPath isEqualToString:@"values.colorfulDockIcons"] || } else if([keyPath isEqualToString:@"values.colorfulDockIcons"]) {
[keyPath isEqualToString:@"values.customDockIcons"] ||
[keyPath isEqualToString:@"values.customDockIconsPlaque"]) {
[self refreshDockIcon:-1 withProgress:-10]; [self refreshDockIcon:-1 withProgress:-10];
} else if([keyPath isEqualToString:@"fractionCompleted"]) {
double progressStatus = [(NSProgress *)object fractionCompleted];
[self refreshDockIcon:-1 withProgress:progressStatus * 100.0];
} }
} else { } else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context]; [super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
} }
} }
- (void)refreshDockIcons:(NSNotification *)notification {
lastDockCustom = NO;
dockCustomLoaded = NO;
[self refreshDockIcon:-1 withProgress:-10];
}
- (void)awakeFromNib { - (void)awakeFromNib {
dockImage = [[NSImage imageNamed:@"Plaque"] copy]; dockImage = [[NSImage imageNamed:@"Plaque"] copy];
lastColorfulStatus = -1; lastColorfulStatus = -1;
lastProgressStatus = @(-1.0); lastProgressStatus = [NSNumber numberWithDouble:-1];
imageView = nil; imageView = nil;
progressIndicator = nil; progressIndicator = nil;
[self startObserving]; [self startObserving];

View file

@ -3,6 +3,7 @@
#import <Cocoa/Cocoa.h> #import <Cocoa/Cocoa.h>
#import "AppController.h" #import "AppController.h"
#import "AudioScrobbler.h"
#import "CogAudio/AudioPlayer.h" #import "CogAudio/AudioPlayer.h"
#import "CogAudio/Status.h" #import "CogAudio/Status.h"
#import "TrackingSlider.h" #import "TrackingSlider.h"
@ -14,21 +15,13 @@
#import "EqualizerWindowController.h" #import "EqualizerWindowController.h"
#import "PlaylistEntry.h"
#define DEFAULT_VOLUME_DOWN 5 #define DEFAULT_VOLUME_DOWN 5
#define DEFAULT_VOLUME_UP DEFAULT_VOLUME_DOWN #define DEFAULT_VOLUME_UP DEFAULT_VOLUME_DOWN
#define DEFAULT_PITCH_DOWN 0.2 extern NSString *CogPlaybackDidBeginNotficiation;
#define DEFAULT_PITCH_UP DEFAULT_PITCH_DOWN extern NSString *CogPlaybackDidPauseNotficiation;
extern NSString *CogPlaybackDidResumeNotficiation;
#define DEFAULT_TEMPO_DOWN 0.2 extern NSString *CogPlaybackDidStopNotficiation;
#define DEFAULT_TEMPO_UP DEFAULT_TEMPO_DOWN
extern NSString *CogPlaybackDidBeginNotificiation;
extern NSString *CogPlaybackDidPauseNotificiation;
extern NSString *CogPlaybackDidResumeNotificiation;
extern NSString *CogPlaybackDidStopNotificiation;
extern NSDictionary *makeRGInfo(PlaylistEntry *pe); extern NSDictionary *makeRGInfo(PlaylistEntry *pe);
@ -46,9 +39,6 @@ extern NSDictionary *makeRGInfo(PlaylistEntry *pe);
IBOutlet EqualizerWindowController *equalizerWindowController; IBOutlet EqualizerWindowController *equalizerWindowController;
IBOutlet NSSlider *volumeSlider; IBOutlet NSSlider *volumeSlider;
IBOutlet NSSlider *pitchSlider;
IBOutlet NSSlider *tempoSlider;
IBOutlet NSButton *lockButton;
IBOutlet NSArrayController *outputDevices; IBOutlet NSArrayController *outputDevices;
@ -63,29 +53,19 @@ extern NSDictionary *makeRGInfo(PlaylistEntry *pe);
BOOL fading; BOOL fading;
// progress bar display // progress bar display
NSProgress *progressOverall; double progressBarStatus;
NSProgress *progressJob;
AudioUnit _eq; AudioUnit _eq;
} }
@property CogStatus playbackStatus; @property CogStatus playbackStatus;
@property NSProgress *progressOverall; @property double progressBarStatus;
@property NSProgress *progressJob;
- (IBAction)changeVolume:(id)sender; - (IBAction)changeVolume:(id)sender;
- (IBAction)volumeDown:(id)sender; - (IBAction)volumeDown:(id)sender;
- (IBAction)volumeUp:(id)sender; - (IBAction)volumeUp:(id)sender;
- (IBAction)changePitch:(id)sender;
- (IBAction)pitchDown:(id)sender;
- (IBAction)pitchUp:(id)sender;
- (IBAction)changeTempo:(id)sender;
- (IBAction)tempoDown:(id)sender;
- (IBAction)tempoUp:(id)sender;
- (IBAction)playPauseResume:(id)sender; - (IBAction)playPauseResume:(id)sender;
- (IBAction)pauseResume:(id)sender; - (IBAction)pauseResume:(id)sender;
- (IBAction)skipToNextAlbum:(id)sender; - (IBAction)skipToNextAlbum:(id)sender;

View file

@ -19,81 +19,18 @@
#import "Logging.h" #import "Logging.h"
@import Sentry;
// Sentry captureMessage is too spammy to use for anything but actual errors
extern BOOL kAppControllerShuttingDown;
@implementation NSObject (NxAdditions)
#if 0
-(void)performSelectorInBackground:(SEL)selector withObjects:(id)object, ...
{
NSMethodSignature *signature = [self methodSignatureForSelector:selector];
// Setup the invocation
NSInvocation *invocation = [NSInvocation invocationWithMethodSignature:signature];
invocation.target = self;
invocation.selector = selector;
// Associate the arguments
va_list objects;
va_start(objects, object);
unsigned int objectCounter = 2;
for (id obj = object; obj != nil; obj = va_arg(objects, id))
{
[invocation setArgument:&obj atIndex:objectCounter++];
}
va_end(objects);
// Make sure to invoke on a background queue
NSInvocationOperation *operation = [[NSInvocationOperation alloc] initWithInvocation:invocation];
NSOperationQueue *backgroundQueue = [[NSOperationQueue alloc] init];
[backgroundQueue addOperation:operation];
}
#endif
-(void)performSelectorOnMainThread:(SEL)selector withObjects:(id)object, ...
{
NSMethodSignature *signature = [self methodSignatureForSelector:selector];
// Setup the invocation
NSInvocation *invocation = [NSInvocation invocationWithMethodSignature:signature];
invocation.target = self;
invocation.selector = selector;
// Associate the arguments
va_list objects;
va_start(objects, object);
unsigned int objectCounter = 2;
for (id obj = object; obj != nil; obj = va_arg(objects, id))
{
[invocation setArgument:&obj atIndex:objectCounter++];
}
va_end(objects);
// Invoke on the main operation queue
NSInvocationOperation *operation = [[NSInvocationOperation alloc] initWithInvocation:invocation];
NSOperationQueue *mainQueue = [NSOperationQueue mainQueue];
[mainQueue addOperation:operation];
}
@end
@implementation PlaybackController @implementation PlaybackController
#define DEFAULT_SEEK 5 #define DEFAULT_SEEK 5
NSString *CogPlaybackDidBeginNotificiation = @"CogPlaybackDidBeginNotificiation"; NSString *CogPlaybackDidBeginNotficiation = @"CogPlaybackDidBeginNotficiation";
NSString *CogPlaybackDidPauseNotificiation = @"CogPlaybackDidPauseNotificiation"; NSString *CogPlaybackDidPauseNotficiation = @"CogPlaybackDidPauseNotficiation";
NSString *CogPlaybackDidResumeNotificiation = @"CogPlaybackDidResumeNotificiation"; NSString *CogPlaybackDidResumeNotficiation = @"CogPlaybackDidResumeNotficiation";
NSString *CogPlaybackDidStopNotificiation = @"CogPlaybackDidStopNotificiation"; NSString *CogPlaybackDidStopNotficiation = @"CogPlaybackDidStopNotficiation";
@synthesize playbackStatus; @synthesize playbackStatus;
@synthesize progressOverall; @synthesize progressBarStatus;
@synthesize progressJob;
+ (NSSet *)keyPathsForValuesAffectingSeekable { + (NSSet *)keyPathsForValuesAffectingSeekable {
return [NSSet setWithObjects:@"playlistController.currentEntry", @"playlistController.currentEntry.seekable", nil]; return [NSSet setWithObjects:@"playlistController.currentEntry", @"playlistController.currentEntry.seekable", nil];
@ -107,8 +44,7 @@ NSString *CogPlaybackDidStopNotificiation = @"CogPlaybackDidStopNotificiation";
seekable = NO; seekable = NO;
fading = NO; fading = NO;
progressOverall = nil; progressBarStatus = -1;
progressJob = nil;
audioPlayer = [[AudioPlayer alloc] init]; audioPlayer = [[AudioPlayer alloc] init];
[audioPlayer setDelegate:self]; [audioPlayer setDelegate:self];
@ -119,52 +55,16 @@ NSString *CogPlaybackDidStopNotificiation = @"CogPlaybackDidStopNotificiation";
} }
- (void)initDefaults { - (void)initDefaults {
NSDictionary *defaultsDictionary = @{ @"volume": @(75.0), NSDictionary *defaultsDictionary = @{ @"volume": [NSNumber numberWithDouble:75.0],
@"pitch": @(1.0), @"GraphicEQenable": [NSNumber numberWithBool:NO],
@"tempo": @(1.0), @"GraphicEQpreset": [NSNumber numberWithInt:-1],
@"speedLock": @(YES), @"GraphicEQtrackgenre": [NSNumber numberWithBool:NO],
@"GraphicEQenable": @(NO), @"volumeLimit": [NSNumber numberWithBool:YES],
@"GraphicEQpreset": @(-1), @"headphoneVirtualization": [NSNumber numberWithBool:NO] };
@"GraphicEQtrackgenre": @(NO),
@"volumeLimit": @(YES),
@"enableHrtf": @(NO),
@"enableHeadTracking": @(NO),
@"enableHDCD": @(NO),
@"rubberbandEngine": @"disabled",
@"rubberbandTransients": @"crisp",
@"rubberbandDetector": @"compound",
@"rubberbandPhase": @"laminar",
@"rubberbandWindow": @"standard",
@"rubberbandSmoothing": @"off",
@"rubberbandFormant": @"shifted",
@"rubberbandPitch": @"highspeed",
@"rubberbandChannels": @"apart"
};
[[NSUserDefaults standardUserDefaults] registerDefaults:defaultsDictionary]; [[NSUserDefaults standardUserDefaults] registerDefaults:defaultsDictionary];
} }
static double speedScale(double input, double min, double max) {
input = (input - min) * 100.0 / (max - min);
return ((input * input) * (5.0 - 0.2) / 10000.0) + 0.2;
}
static double reverseSpeedScale(double input, double min, double max) {
input = sqrtf((input - 0.2) * 10000.0 / (5.0 - 0.2));
return (input * (max - min) / 100.0) + min;
}
- (void)snapSpeeds {
double pitch = [[NSUserDefaults standardUserDefaults] doubleForKey:@"pitch"];
double tempo = [[NSUserDefaults standardUserDefaults] doubleForKey:@"tempo"];
if(fabs(pitch - 1.0) < 1e-6) {
[[NSUserDefaults standardUserDefaults] setDouble:1.0 forKey:@"pitch"];
}
if(fabs(tempo - 1.0) < 1e-6) {
[[NSUserDefaults standardUserDefaults] setDouble:1.0 forKey:@"tempo"];
}
}
- (void)awakeFromNib { - (void)awakeFromNib {
BOOL volumeLimit = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] boolForKey:@"volumeLimit"]; BOOL volumeLimit = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] boolForKey:@"volumeLimit"];
const double MAX_VOLUME = (volumeLimit) ? 100.0 : 800.0; const double MAX_VOLUME = (volumeLimit) ? 100.0 : 800.0;
@ -174,16 +74,6 @@ static double reverseSpeedScale(double input, double min, double max) {
[volumeSlider setDoubleValue:logarithmicToLinear(volume, MAX_VOLUME)]; [volumeSlider setDoubleValue:logarithmicToLinear(volume, MAX_VOLUME)];
[audioPlayer setVolume:volume]; [audioPlayer setVolume:volume];
double pitch = [[NSUserDefaults standardUserDefaults] doubleForKey:@"pitch"];
[pitchSlider setDoubleValue:reverseSpeedScale(pitch, [pitchSlider minValue], [pitchSlider maxValue])];
double tempo = [[NSUserDefaults standardUserDefaults] doubleForKey:@"tempo"];
[tempoSlider setDoubleValue:reverseSpeedScale(tempo, [tempoSlider minValue], [tempoSlider maxValue])];
[self snapSpeeds];
BOOL speedLock = [[NSUserDefaults standardUserDefaults] boolForKey:@"speedLock"];
[lockButton setTitle:speedLock ? @"🔒" : @"🔓"];
[self setSeekable:NO]; [self setSeekable:NO];
} }
@ -205,11 +95,6 @@ static double reverseSpeedScale(double input, double min, double max) {
} }
- (IBAction)pause:(id)sender { - (IBAction)pause:(id)sender {
if(![self seekable]) {
[self stop:sender];
return;
}
[[NSUserDefaults standardUserDefaults] setInteger:CogStatusPaused forKey:@"lastPlaybackStatus"]; [[NSUserDefaults standardUserDefaults] setInteger:CogStatusPaused forKey:@"lastPlaybackStatus"];
[audioPlayer pause]; [audioPlayer pause];
@ -238,13 +123,13 @@ static double reverseSpeedScale(double input, double min, double max) {
// called by double-clicking on table // called by double-clicking on table
- (void)playEntryAtIndex:(NSInteger)i { - (void)playEntryAtIndex:(NSInteger)i {
[self playEntryAtIndex:i startPaused:NO andSeekTo:@(0.0)]; [self playEntryAtIndex:i startPaused:NO andSeekTo:[NSNumber numberWithDouble:0.0]];
} }
- (void)playEntryAtIndex:(NSInteger)i startPaused:(BOOL)paused { - (void)playEntryAtIndex:(NSInteger)i startPaused:(BOOL)paused {
PlaylistEntry *pe = [playlistController entryAtIndex:i]; PlaylistEntry *pe = [playlistController entryAtIndex:i];
[self playEntry:pe startPaused:paused andSeekTo:@(0.0)]; [self playEntry:pe startPaused:paused andSeekTo:[NSNumber numberWithDouble:0.0]];
} }
- (void)playEntryAtIndex:(NSInteger)i startPaused:(BOOL)paused andSeekTo:(id)offset { - (void)playEntryAtIndex:(NSInteger)i startPaused:(BOOL)paused andSeekTo:(id)offset {
@ -273,40 +158,31 @@ static double reverseSpeedScale(double input, double min, double max) {
NSDictionary *makeRGInfo(PlaylistEntry *pe) { NSDictionary *makeRGInfo(PlaylistEntry *pe) {
NSMutableDictionary *dictionary = [NSMutableDictionary dictionary]; NSMutableDictionary *dictionary = [NSMutableDictionary dictionary];
if(pe.replayGainAlbumGain != 0) if([pe replayGainAlbumGain] != 0)
[dictionary setObject:@(pe.replayGainAlbumGain) forKey:@"replayGainAlbumGain"]; [dictionary setObject:[NSNumber numberWithFloat:[pe replayGainAlbumGain]] forKey:@"replayGainAlbumGain"];
if(pe.replayGainAlbumPeak != 0) if([pe replayGainAlbumPeak] != 0)
[dictionary setObject:@(pe.replayGainAlbumPeak) forKey:@"replayGainAlbumPeak"]; [dictionary setObject:[NSNumber numberWithFloat:[pe replayGainAlbumPeak]] forKey:@"replayGainAlbumPeak"];
if(pe.replayGainTrackGain != 0) if([pe replayGainTrackGain] != 0)
[dictionary setObject:@(pe.replayGainTrackGain) forKey:@"replayGainTrackGain"]; [dictionary setObject:[NSNumber numberWithFloat:[pe replayGainTrackGain]] forKey:@"replayGainTrackGain"];
if(pe.replayGainTrackPeak != 0) if([pe replayGainTrackPeak] != 0)
[dictionary setObject:@(pe.replayGainTrackPeak) forKey:@"replayGainTrackPeak"]; [dictionary setObject:[NSNumber numberWithFloat:[pe replayGainTrackPeak]] forKey:@"replayGainTrackPeak"];
if(pe.volume != 1) if([pe volume] != 1)
[dictionary setObject:@(pe.volume) forKey:@"volume"]; [dictionary setObject:[NSNumber numberWithFloat:[pe volume]] forKey:@"volume"];
return dictionary; return dictionary;
} }
- (void)playEntry:(PlaylistEntry *)pe { - (void)playEntry:(PlaylistEntry *)pe {
[self playEntry:pe startPaused:NO andSeekTo:@(0.0)]; [self playEntry:pe startPaused:NO andSeekTo:[NSNumber numberWithDouble:0.0]];
} }
- (void)playEntry:(PlaylistEntry *)pe startPaused:(BOOL)paused { - (void)playEntry:(PlaylistEntry *)pe startPaused:(BOOL)paused {
[self playEntry:pe startPaused:paused andSeekTo:@(0.0)]; [self playEntry:pe startPaused:paused andSeekTo:[NSNumber numberWithDouble:0.0]];
} }
- (void)playEntry:(PlaylistEntry *)pe startPaused:(BOOL)paused andSeekTo:(id)offset { - (void)playEntry:(PlaylistEntry *)pe startPaused:(BOOL)paused andSeekTo:(id)offset {
if(playbackStatus != CogStatusStopped && playbackStatus != CogStatusStopping) if(playbackStatus != CogStatusStopped && playbackStatus != CogStatusStopping)
[self stop:self]; [self stop:self];
if(!pe.url) {
pe.error = YES;
pe.errorMessage = NSLocalizedStringFromTableInBundle(@"ErrorMessageBadFile", nil, [NSBundle bundleForClass:[self class]], @"");
[SentrySDK captureMessage:@"Attempted to play a bad file with no URL"];
return;
}
//[SentrySDK captureMessage:[NSString stringWithFormat:@"Playing track: %@", pe.url]];
DLog(@"PLAYLIST CONTROLLER: %@", [playlistController class]); DLog(@"PLAYLIST CONTROLLER: %@", [playlistController class]);
[playlistController setCurrentEntry:pe]; [playlistController setCurrentEntry:pe];
@ -318,14 +194,14 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
return; return;
BOOL loadData = YES; BOOL loadData = YES;
NSString *urlScheme = [pe.url scheme]; NSString *urlScheme = [[pe URL] scheme];
if([urlScheme isEqualToString:@"http"] || if([urlScheme isEqualToString:@"http"] ||
[urlScheme isEqualToString:@"https"]) [urlScheme isEqualToString:@"https"])
loadData = NO; loadData = NO;
#if 0 #if 0
// Race here, but the worst that could happen is we re-read the data // Race here, but the worst that could happen is we re-read the data
if([pe metadataLoaded] != YES) { if ([pe metadataLoaded] != YES) {
[pe performSelectorOnMainThread:@selector(setMetadata:) withObject:[playlistLoader readEntryInfo:pe] waitUntilDone:YES]; [pe performSelectorOnMainThread:@selector(setMetadata:) withObject:[playlistLoader readEntryInfo:pe] waitUntilDone:YES];
} }
#elif 0 #elif 0
@ -344,9 +220,9 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
[self sendMetaData]; [self sendMetaData];
double seekTime = pe.seekable ? [offset doubleValue] : 0.0; double seekTime = [pe seekable] ? [offset doubleValue] : 0.0;
[audioPlayer performSelectorOnMainThread:@selector(playBG:withUserInfo:withRGInfo:startPaused:andSeekTo:) withObjects:pe.url, pe, makeRGInfo(pe), @(paused), @(seekTime), nil]; [audioPlayer play:[pe URL] withUserInfo:pe withRGInfo:makeRGInfo(pe) startPaused:paused andSeekTo:seekTime];
} }
- (IBAction)next:(id)sender { - (IBAction)next:(id)sender {
@ -357,12 +233,8 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
} }
- (IBAction)prev:(id)sender { - (IBAction)prev:(id)sender {
double pos = [audioPlayer amountPlayed];
if(pos < 5.0) {
if([playlistController prev] == NO) if([playlistController prev] == NO)
return; return;
}
[self playEntry:[playlistController currentEntry]]; [self playEntry:[playlistController currentEntry]];
} }
@ -372,30 +244,19 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
[self setPosition:pos]; [self setPosition:pos];
if(!kAppControllerShuttingDown) { [[playlistController currentEntry] setCurrentPosition:pos];
PlaylistEntry *pe = [playlistController currentEntry];
if(pe) pe.currentPosition = pos;
}
} }
- (IBAction)seek:(id)sender { - (IBAction)seek:(id)sender {
if(![sender respondsToSelector:@selector(doubleValue)]) {
ALog(@"Someone sent [PlaybackController seek:] a non-seekbar object: %@", sender);
return;
}
double time = [sender doubleValue]; double time = [sender doubleValue];
[audioPlayer performSelectorOnMainThread:@selector(seekToTimeBG:) withObjects:@(time), nil]; [audioPlayer seekToTime:time];
lastPosition = -10; lastPosition = -10;
[self setPosition:time]; [self setPosition:time];
if(!kAppControllerShuttingDown) { [[playlistController currentEntry] setCurrentPosition:time];
PlaylistEntry *pe = [playlistController currentEntry];
if(pe) pe.currentPosition = time;
}
} }
- (IBAction)seek:(id)sender toTime:(NSTimeInterval)position { - (IBAction)seek:(id)sender toTime:(NSTimeInterval)position {
@ -403,25 +264,19 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
lastPosition = -10; lastPosition = -10;
[audioPlayer performSelectorOnMainThread:@selector(seekToTimeBG:) withObjects:@(time), nil]; [audioPlayer seekToTime:time];
[self setPosition:time]; [self setPosition:time];
if(!kAppControllerShuttingDown) { [[playlistController currentEntry] setCurrentPosition:time];
PlaylistEntry *pe = [playlistController currentEntry];
if(pe) pe.currentPosition = time;
}
} }
- (IBAction)spam:(id)sender { - (IBAction)spam:(id)sender {
PlaylistEntry *pe = [playlistController currentEntry];
if(pe) {
NSPasteboard *pboard = [NSPasteboard generalPasteboard]; NSPasteboard *pboard = [NSPasteboard generalPasteboard];
[pboard clearContents]; [pboard clearContents];
[pboard writeObjects:@[[pe spam]]]; [pboard writeObjects:@[[[playlistController currentEntry] spam]]];
}
} }
- (IBAction)eventSeekForward:(id)sender { - (IBAction)eventSeekForward:(id)sender {
@ -429,17 +284,15 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
} }
- (void)seekForward:(double)amount { - (void)seekForward:(double)amount {
if([self seekable]) {
double seekTo = [audioPlayer amountPlayed] + amount; double seekTo = [audioPlayer amountPlayed] + amount;
if(seekTo > [[[playlistController currentEntry] length] doubleValue]) { if(seekTo > [[[playlistController currentEntry] length] doubleValue]) {
[self next:self]; [self next:self];
} else { } else {
lastPosition = -10; lastPosition = -10;
[audioPlayer performSelectorOnMainThread:@selector(seekToTimeBG:) withObjects:@(seekTo), nil]; [audioPlayer seekToTime:seekTo];
[self setPosition:seekTo]; [self setPosition:seekTo];
} }
}
} }
- (IBAction)eventSeekBackward:(id)sender { - (IBAction)eventSeekBackward:(id)sender {
@ -447,7 +300,6 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
} }
- (void)seekBackward:(double)amount { - (void)seekBackward:(double)amount {
if([self seekable]) {
double seekTo = [audioPlayer amountPlayed] - amount; double seekTo = [audioPlayer amountPlayed] - amount;
if(seekTo < 0) if(seekTo < 0)
@ -455,9 +307,8 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
lastPosition = -10; lastPosition = -10;
[audioPlayer performSelectorOnMainThread:@selector(seekToTimeBG:) withObjects:@(seekTo), nil]; [audioPlayer seekToTime:seekTo];
[self setPosition:seekTo]; [self setPosition:seekTo];
}
} }
/* /*
@ -466,7 +317,7 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
NSImage *img = [NSImage imageNamed:name]; NSImage *img = [NSImage imageNamed:name];
// [img retain]; // [img retain];
if(img == nil) if (img == nil)
{ {
DLog(@"Error loading image!"); DLog(@"Error loading image!");
} }
@ -545,7 +396,7 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
return; return;
fading = YES; fading = YES;
NSNumber *originalVolume = @([audioPlayer volume]); NSNumber *originalVolume = [NSNumber numberWithDouble:[audioPlayer volume]];
NSTimer *fadeTimer; NSTimer *fadeTimer;
if(playbackStatus == CogStatusPlaying) { if(playbackStatus == CogStatusPlaying) {
@ -567,32 +418,6 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
} }
} }
- (IBAction)changePitch:(id)sender {
const double pitch = speedScale([sender doubleValue], [pitchSlider minValue], [pitchSlider maxValue]);
DLog(@"PITCH: %lf", pitch);
[[NSUserDefaults standardUserDefaults] setDouble:pitch forKey:@"pitch"];
if([[NSUserDefaults standardUserDefaults] boolForKey:@"speedLock"]) {
[[NSUserDefaults standardUserDefaults] setDouble:pitch forKey:@"tempo"];
}
[self snapSpeeds];
}
- (IBAction)changeTempo:(id)sender {
const double tempo = speedScale([sender doubleValue], [tempoSlider minValue], [tempoSlider maxValue]);
DLog(@"TEMPO: %lf", tempo);
[[NSUserDefaults standardUserDefaults] setDouble:tempo forKey:@"tempo"];
if([[NSUserDefaults standardUserDefaults] boolForKey:@"speedLock"]) {
[[NSUserDefaults standardUserDefaults] setDouble:tempo forKey:@"pitch"];
}
[self snapSpeeds];
}
- (IBAction)skipToNextAlbum:(id)sender { - (IBAction)skipToNextAlbum:(id)sender {
BOOL found = NO; BOOL found = NO;
@ -694,75 +519,8 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
[[NSUserDefaults standardUserDefaults] setDouble:[audioPlayer volume] forKey:@"volume"]; [[NSUserDefaults standardUserDefaults] setDouble:[audioPlayer volume] forKey:@"volume"];
} }
- (IBAction)pitchDown:(id)sender {
double pitch = speedScale([pitchSlider doubleValue], [pitchSlider minValue], [pitchSlider maxValue]);
double newPitch = pitch - DEFAULT_PITCH_DOWN;
if(newPitch < 0.2) {
newPitch = 0.2;
}
[pitchSlider setDoubleValue:reverseSpeedScale(newPitch, [pitchSlider minValue], [pitchSlider maxValue])];
[[NSUserDefaults standardUserDefaults] setDouble:newPitch forKey:@"pitch"];
if([[NSUserDefaults standardUserDefaults] boolForKey:@"speedLock"]) {
[tempoSlider setDoubleValue:reverseSpeedScale(newPitch, [tempoSlider minValue], [tempoSlider maxValue])];
[[NSUserDefaults standardUserDefaults] setDouble:newPitch forKey:@"tempo"];
}
}
- (IBAction)pitchUp:(id)sender {
double pitch = speedScale([pitchSlider doubleValue], [pitchSlider minValue], [pitchSlider maxValue]);
double newPitch = pitch + DEFAULT_PITCH_UP;
if(newPitch > 5.0) {
newPitch = 5.0;
}
[pitchSlider setDoubleValue:reverseSpeedScale(newPitch, [pitchSlider minValue], [pitchSlider maxValue])];
[[NSUserDefaults standardUserDefaults] setDouble:newPitch forKey:@"pitch"];
if([[NSUserDefaults standardUserDefaults] boolForKey:@"speedLock"]) {
[tempoSlider setDoubleValue:reverseSpeedScale(newPitch, [tempoSlider minValue], [tempoSlider maxValue])];
[[NSUserDefaults standardUserDefaults] setDouble:newPitch forKey:@"tempo"];
}
}
- (IBAction)tempoDown:(id)sender {
double tempo = speedScale([tempoSlider doubleValue], [tempoSlider minValue], [tempoSlider maxValue]);
double newTempo = tempo - DEFAULT_TEMPO_DOWN;
if(newTempo < 0.2) {
newTempo = 0.2;
}
[tempoSlider setDoubleValue:reverseSpeedScale(newTempo, [tempoSlider minValue], [tempoSlider maxValue])];
[[NSUserDefaults standardUserDefaults] setDouble:newTempo forKey:@"tempo"];
if([[NSUserDefaults standardUserDefaults] boolForKey:@"speedLock"]) {
[pitchSlider setDoubleValue:reverseSpeedScale(newTempo, [pitchSlider minValue], [pitchSlider maxValue])];
[[NSUserDefaults standardUserDefaults] setDouble:newTempo forKey:@"pitch"];
}
}
- (IBAction)tempoUp:(id)sender {
double tempo = speedScale([tempoSlider doubleValue], [tempoSlider minValue], [tempoSlider maxValue]);
double newTempo = tempo + DEFAULT_TEMPO_UP;
if(newTempo > 5.0) {
newTempo = 5.0;
}
[tempoSlider setDoubleValue:reverseSpeedScale(newTempo, [tempoSlider minValue], [tempoSlider maxValue])];
[[NSUserDefaults standardUserDefaults] setDouble:newTempo forKey:@"tempo"];
if([[NSUserDefaults standardUserDefaults] boolForKey:@"speedLock"]) {
[pitchSlider setDoubleValue:reverseSpeedScale(newTempo, [pitchSlider minValue], [pitchSlider maxValue])];
[[NSUserDefaults standardUserDefaults] setDouble:newTempo forKey:@"pitch"];
}
}
- (void)audioPlayer:(AudioPlayer *)player displayEqualizer:(AudioUnit)eq { - (void)audioPlayer:(AudioPlayer *)player displayEqualizer:(AudioUnit)eq {
if(_eq && _eq != eq) { if(_eq && _eq != eq) {
[equalizerWindowController setEQ:nil]; [equalizerWindowController setEQ:nil];
} }
@ -780,6 +538,19 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
- (void)audioPlayer:(AudioPlayer *)player removeEqualizer:(AudioUnit)eq { - (void)audioPlayer:(AudioPlayer *)player removeEqualizer:(AudioUnit)eq {
if(eq == _eq) { if(eq == _eq) {
OSStatus err;
CFPropertyListRef classData;
UInt32 size;
size = sizeof(classData);
err = AudioUnitGetProperty(eq, kAudioUnitProperty_ClassInfo, kAudioUnitScope_Global, 0, &classData, &size);
if(err == noErr) {
CFPreferencesSetAppValue(CFSTR("GraphEQ_Preset"), classData, kCFPreferencesCurrentApplication);
CFRelease(classData);
}
CFPreferencesAppSynchronize(kCFPreferencesCurrentApplication);
[equalizerWindowController setEQ:nil]; [equalizerWindowController setEQ:nil];
_eq = nil; _eq = nil;
@ -800,45 +571,27 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
} }
} }
if(pe && pe.url) { if(pe)
//[SentrySDK captureMessage:[NSString stringWithFormat:@"Beginning decoding track: %@", pe.url]]; [player setNextStream:[pe URL] withUserInfo:pe withRGInfo:makeRGInfo(pe)];
[player setNextStream:pe.url withUserInfo:pe withRGInfo:makeRGInfo(pe)]; else
} else if(pe) {
[SentrySDK captureMessage:@"Invalid playlist entry reached"];
[player setNextStream:nil]; [player setNextStream:nil];
pe.error = YES;
pe.errorMessage = NSLocalizedStringFromTableInBundle(@"ErrorMessageBadFile", nil, [NSBundle bundleForClass:[self class]], @"");
} else {
//[SentrySDK captureMessage:@"End of playlist reached"];
[player setNextStream:nil];
}
} }
- (void)audioPlayer:(AudioPlayer *)player didBeginStream:(id)userInfo { - (void)audioPlayer:(AudioPlayer *)player didBeginStream:(id)userInfo {
PlaylistEntry *pe = (PlaylistEntry *)userInfo; PlaylistEntry *pe = (PlaylistEntry *)userInfo;
// Delay the action until this function has returned to the audio thread [playlistController setCurrentEntry:pe];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 5 * NSEC_PER_MSEC), dispatch_get_main_queue(), ^{
if(pe) {
//[SentrySDK captureMessage:[NSString stringWithFormat:@"Updating UI with track: %@", pe.url]];
}
[self->playlistController setCurrentEntry:pe]; if(_eq)
equalizerApplyGenre(_eq, [pe genre]);
if(pe && self->_eq) { lastPosition = -10;
equalizerApplyGenre(self->_eq, [pe genre]);
}
self->lastPosition = -10;
[self setPosition:0]; [self setPosition:0];
[self removeHDCD:nil]; [self removeHDCD:nil];
});
if(pe) { [[NSNotificationCenter defaultCenter] postNotificationName:CogPlaybackDidBeginNotficiation object:pe];
[[NSNotificationCenter defaultCenter] postNotificationName:CogPlaybackDidBeginNotificiation object:pe];
}
} }
- (void)audioPlayer:(AudioPlayer *)player didChangeStatus:(NSNumber *)s userInfo:(id)userInfo { - (void)audioPlayer:(AudioPlayer *)player didChangeStatus:(NSNumber *)s userInfo:(id)userInfo {
@ -851,26 +604,21 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
} }
if(status == CogStatusStopped) { if(status == CogStatusStopped) {
//[SentrySDK captureMessage:@"Playback stopped"];
[self setPosition:0]; [self setPosition:0];
[self setSeekable:NO]; // the player stopped, disable the slider [self setSeekable:NO]; // the player stopped, disable the slider
[[NSNotificationCenter defaultCenter] postNotificationName:CogPlaybackDidStopNotificiation object:nil]; [[NSNotificationCenter defaultCenter] postNotificationName:CogPlaybackDidStopNotficiation object:nil];
} else // paused } else // paused
{ {
//[SentrySDK captureMessage:@"Playback paused"]; [[NSNotificationCenter defaultCenter] postNotificationName:CogPlaybackDidPauseNotficiation object:nil];
[[NSNotificationCenter defaultCenter] postNotificationName:CogPlaybackDidPauseNotificiation object:nil];
} }
} else if(status == CogStatusPlaying) { } else if(status == CogStatusPlaying) {
//[SentrySDK captureMessage:@"Playback started"];
if(!positionTimer) { if(!positionTimer) {
positionTimer = [NSTimer timerWithTimeInterval:0.2 target:self selector:@selector(updatePosition:) userInfo:nil repeats:YES]; positionTimer = [NSTimer timerWithTimeInterval:0.2 target:self selector:@selector(updatePosition:) userInfo:nil repeats:YES];
[[NSRunLoop currentRunLoop] addTimer:positionTimer forMode:NSRunLoopCommonModes]; [[NSRunLoop currentRunLoop] addTimer:positionTimer forMode:NSRunLoopCommonModes];
} }
[[NSNotificationCenter defaultCenter] postNotificationName:CogPlaybackDidResumeNotificiation object:nil]; [[NSNotificationCenter defaultCenter] postNotificationName:CogPlaybackDidResumeNotficiation object:nil];
} }
if(status == CogStatusStopped) { if(status == CogStatusStopped) {
@ -892,6 +640,14 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
break; break;
} }
if(status == CogStatusStopped) {
status = CogStatusStopping;
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 3 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
if([self playbackStatus] == CogStatusStopping)
[self setPlaybackStatus:CogStatusStopped];
});
}
[self setPlaybackStatus:status]; [self setPlaybackStatus:status];
// If we don't send it here, if we've stopped, then the NPIC will be stuck at the last file we played. // If we don't send it here, if we've stopped, then the NPIC will be stuck at the last file we played.
[self sendMetaData]; [self sendMetaData];
@ -899,7 +655,6 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
- (void)audioPlayer:(AudioPlayer *)player didStopNaturally:(id)userInfo { - (void)audioPlayer:(AudioPlayer *)player didStopNaturally:(id)userInfo {
if([[NSUserDefaults standardUserDefaults] boolForKey:@"quitOnNaturalStop"]) { if([[NSUserDefaults standardUserDefaults] boolForKey:@"quitOnNaturalStop"]) {
//[SentrySDK captureMessage:@"Playback stopped naturally, terminating app"];
[NSApp terminate:nil]; [NSApp terminate:nil];
} }
} }
@ -914,37 +669,16 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
- (void)audioPlayer:(AudioPlayer *)player restartPlaybackAtCurrentPosition:(id)userInfo { - (void)audioPlayer:(AudioPlayer *)player restartPlaybackAtCurrentPosition:(id)userInfo {
PlaylistEntry *pe = [playlistController currentEntry]; PlaylistEntry *pe = [playlistController currentEntry];
BOOL paused = playbackStatus == CogStatusPaused; BOOL paused = playbackStatus == CogStatusPaused;
//[SentrySDK captureMessage:[NSString stringWithFormat:@"Playback restarting for track: %@", pe.url]]; [player play:[pe URL] withUserInfo:pe withRGInfo:makeRGInfo(pe) startPaused:paused andSeekTo:[pe seekable] ? [pe currentPosition] : 0.0];
[player performSelectorOnMainThread:@selector(playBG:withUserInfo:withRGInfo:startPaused:andSeekTo:) withObjects:pe.url, pe, makeRGInfo(pe), @(paused), @(pe.seekable ? pe.currentPosition : 0.0), nil];
} }
- (void)audioPlayer:(AudioPlayer *)player pushInfo:(NSDictionary *)info toTrack:(id)userInfo { - (void)audioPlayer:(AudioPlayer *)player pushInfo:(NSDictionary *)info toTrack:(id)userInfo {
PlaylistEntry *pe = (PlaylistEntry *)userInfo; PlaylistEntry *pe = (PlaylistEntry *)userInfo;
if(!pe) pe = [playlistController currentEntry]; if (!pe) pe = [playlistController currentEntry];
[pe setMetadata:info]; [pe setMetadata:info];
[playlistView refreshTrack:pe]; [playlistView refreshCurrentTrack:self];
// Delay the action until this function has returned to the audio thread
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 50 * NSEC_PER_MSEC), dispatch_get_main_queue(), ^{
self->playlistController.currentEntry = pe;
[self sendMetaData]; [self sendMetaData];
[[NSNotificationCenter defaultCenter] postNotificationName:CogPlaybackDidBeginNotificiation object:pe]; [[NSNotificationCenter defaultCenter] postNotificationName:CogPlaybackDidBeginNotficiation object:pe];
});
}
- (void)audioPlayer:(AudioPlayer *)player reportPlayCountForTrack:(id)userInfo {
if(userInfo) {
PlaylistEntry *pe = (PlaylistEntry *)userInfo;
[playlistController updatePlayCountForTrack:pe];
}
}
- (void)audioPlayer:(AudioPlayer *)player updatePosition:(id)userInfo {
if(userInfo) {
PlaylistEntry *pe = (PlaylistEntry *)userInfo;
if([pe current]) {
[self updatePosition:userInfo];
}
}
} }
- (void)audioPlayer:(AudioPlayer *)player setError:(NSNumber *)status toTrack:(id)userInfo { - (void)audioPlayer:(AudioPlayer *)player setError:(NSNumber *)status toTrack:(id)userInfo {
@ -964,23 +698,23 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
} }
- (void)setPosition:(double)p { - (void)setPosition:(double)p {
position = p;
if(kAppControllerShuttingDown) return;
PlaylistEntry *pe = [playlistController currentEntry];
if(pe) pe.currentPosition = p;
if(p > lastPosition && (p - lastPosition) >= 10.0) { if(p > lastPosition && (p - lastPosition) >= 10.0) {
PlaylistEntry *pe = [playlistController currentEntry];
NSInteger lastTrackPlaying = [pe index];
[[NSUserDefaults standardUserDefaults] setInteger:CogStatusPlaying forKey:@"lastPlaybackStatus"]; [[NSUserDefaults standardUserDefaults] setInteger:CogStatusPlaying forKey:@"lastPlaybackStatus"];
[[NSUserDefaults standardUserDefaults] setInteger:lastTrackPlaying forKey:@"lastTrackPlaying"];
[[NSUserDefaults standardUserDefaults] setDouble:p forKey:@"lastTrackPosition"];
// If we handle this here, then it will send on all seek operations, which also reset lastPosition // If we handle this here, then it will send on all seek operations, which also reset lastPosition
[self sendMetaData]; [self sendMetaData];
lastPosition = p; lastPosition = p;
[playlistController commitPersistentStore];
} }
position = p;
[[playlistController currentEntry] setCurrentPosition:p];
} }
- (double)position { - (double)position {
@ -1002,38 +736,36 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
NSMutableDictionary *songInfo = [[NSMutableDictionary alloc] init]; NSMutableDictionary *songInfo = [[NSMutableDictionary alloc] init];
if(entry) { if(entry) {
if(entry.title && [entry.title length]) if([entry title])
[songInfo setObject:entry.title forKey:MPMediaItemPropertyTitle]; [songInfo setObject:[entry title] forKey:MPMediaItemPropertyTitle];
if(entry.artist && [entry.artist length]) if([entry artist])
[songInfo setObject:entry.artist forKey:MPMediaItemPropertyArtist]; [songInfo setObject:[entry artist] forKey:MPMediaItemPropertyArtist];
if(entry.album && [entry.album length]) if([entry album])
[songInfo setObject:entry.album forKey:MPMediaItemPropertyAlbumTitle]; [songInfo setObject:[entry album] forKey:MPMediaItemPropertyAlbumTitle];
if(entry.albumArt) { if([entry albumArt]) {
// can't do && with @available // can't do && with @available
if(@available(macOS 10.13.2, *)) { if(@available(macOS 10.13.2, *)) {
CGSize artworkSize = CGSizeMake(500, 500); CGSize artworkSize = CGSizeMake(500, 500);
MPMediaItemArtwork *mpArtwork = [[MPMediaItemArtwork alloc] initWithBoundsSize:artworkSize MPMediaItemArtwork *mpArtwork = [[MPMediaItemArtwork alloc] initWithBoundsSize:artworkSize
requestHandler:^NSImage *_Nonnull(CGSize size) { requestHandler:^NSImage *_Nonnull(CGSize size) {
return entry.albumArt; return [entry albumArt];
}]; }];
[songInfo setObject:mpArtwork forKey:MPMediaItemPropertyArtwork]; [songInfo setObject:mpArtwork forKey:MPMediaItemPropertyArtwork];
} }
} }
// I don't know what NPIC does with these since they aren't exposed in UI, but if we have them, use it. // I don't know what NPIC does with these since they aren't exposed in UI, but if we have them, use it.
// There's a bunch of other metadata, but PlaylistEntry can't represent a lot of it. // There's a bunch of other metadata, but PlaylistEntry can't represent a lot of it.
if(entry.genre && [entry.genre length]) if([entry genre])
[songInfo setObject:entry.genre forKey:MPMediaItemPropertyGenre]; [songInfo setObject:[entry genre] forKey:MPMediaItemPropertyGenre];
if(entry.year) { if([entry year]) {
// If PlaylistEntry can represent a full date like some tag formats can do, change it // If PlaylistEntry can represent a full date like some tag formats can do, change it
NSCalendar *calendar = [NSCalendar currentCalendar]; NSCalendar *calendar = [NSCalendar currentCalendar];
NSDate *releaseYear = [calendar dateWithEra:1 year:entry.year month:1 day:1 hour:0 minute:0 second:0 nanosecond:0]; NSDate *releaseYear = [calendar dateWithEra:1 year:[[entry year] intValue] month:0 day:0 hour:0 minute:0 second:0 nanosecond:0];
if(releaseYear) {
[songInfo setObject:releaseYear forKey:MPMediaItemPropertyReleaseDate]; [songInfo setObject:releaseYear forKey:MPMediaItemPropertyReleaseDate];
} }
} [songInfo setObject:[NSNumber numberWithFloat:[entry currentPosition]] forKey:MPNowPlayingInfoPropertyElapsedPlaybackTime];
[songInfo setObject:@(entry.currentPosition) forKey:MPNowPlayingInfoPropertyElapsedPlaybackTime]; [songInfo setObject:[entry length] forKey:MPMediaItemPropertyPlaybackDuration];
[songInfo setObject:entry.length forKey:MPMediaItemPropertyPlaybackDuration]; [songInfo setObject:[NSNumber numberWithFloat:[entry index]] forKey:MPMediaItemPropertyPersistentID];
[songInfo setObject:@(entry.index) forKey:MPMediaItemPropertyPersistentID];
} }
switch(playbackStatus) { switch(playbackStatus) {
@ -1043,6 +775,7 @@ NSDictionary *makeRGInfo(PlaylistEntry *pe) {
case CogStatusPaused: case CogStatusPaused:
defaultCenter.playbackState = MPNowPlayingPlaybackStatePaused; defaultCenter.playbackState = MPNowPlayingPlaybackStatePaused;
break; break;
default: default:
defaultCenter.playbackState = MPNowPlayingPlaybackStateStopped; defaultCenter.playbackState = MPNowPlayingPlaybackStateStopped;
break; break;

View file

@ -10,6 +10,9 @@
#import <UserNotifications/UserNotifications.h> #import <UserNotifications/UserNotifications.h>
#import "PlaybackController.h" #import "PlaybackController.h"
#import "PlaylistEntry.h"
@class AudioScrobbler;
@interface PlaybackEventController @interface PlaybackEventController
: NSObject <NSUserNotificationCenterDelegate, UNUserNotificationCenterDelegate> { : NSObject <NSUserNotificationCenterDelegate, UNUserNotificationCenterDelegate> {

View file

@ -7,9 +7,8 @@
#import "PlaybackEventController.h" #import "PlaybackEventController.h"
#import "PlaylistEntry.h" #import "AudioScrobbler.h"
#if 0
NSString *TrackNotification = @"com.apple.iTunes.playerInfo"; NSString *TrackNotification = @"com.apple.iTunes.playerInfo";
NSString *TrackArtist = @"Artist"; NSString *TrackArtist = @"Artist";
@ -24,9 +23,10 @@ NSString *TrackState = @"Player State";
typedef NS_ENUM(NSInteger, TrackStatus) { TrackPlaying, typedef NS_ENUM(NSInteger, TrackStatus) { TrackPlaying,
TrackPaused, TrackPaused,
TrackStopped }; TrackStopped };
#endif
@implementation PlaybackEventController { @implementation PlaybackEventController {
AudioScrobbler *scrobbler;
NSOperationQueue *queue; NSOperationQueue *queue;
PlaylistEntry *entry; PlaylistEntry *entry;
@ -36,6 +36,8 @@ typedef NS_ENUM(NSInteger, TrackStatus) { TrackPlaying,
- (void)initDefaults { - (void)initDefaults {
NSDictionary *defaultsDictionary = @{ NSDictionary *defaultsDictionary = @{
@"enableAudioScrobbler": @YES,
@"automaticallyLaunchLastFM": @NO,
@"notifications.enable": @YES, @"notifications.enable": @YES,
@"notifications.itunes-style": @YES, @"notifications.itunes-style": @YES,
@"notifications.show-album-art": @YES @"notifications.show-album-art": @YES
@ -81,6 +83,7 @@ typedef NS_ENUM(NSInteger, TrackStatus) { TrackPlaying,
queue = [[NSOperationQueue alloc] init]; queue = [[NSOperationQueue alloc] init];
[queue setMaxConcurrentOperationCount:1]; [queue setMaxConcurrentOperationCount:1];
scrobbler = [[AudioScrobbler alloc] init];
[[NSUserNotificationCenter defaultUserNotificationCenter] setDelegate:self]; [[NSUserNotificationCenter defaultUserNotificationCenter] setDelegate:self];
entry = nil; entry = nil;
@ -107,20 +110,19 @@ didReceiveNotificationResponse:(UNNotificationResponse *)response
} }
} }
#if 0
- (NSDictionary *)fillNotificationDictionary:(PlaylistEntry *)pe status:(TrackStatus)status { - (NSDictionary *)fillNotificationDictionary:(PlaylistEntry *)pe status:(TrackStatus)status {
NSMutableDictionary *dict = [NSMutableDictionary dictionary]; NSMutableDictionary *dict = [NSMutableDictionary dictionary];
if(pe == nil || pe.deLeted || pe.url == nil) return dict; if(pe == nil) return dict;
[dict setObject:[pe.url absoluteString] forKey:TrackPath]; [dict setObject:[[pe URL] absoluteString] forKey:TrackPath];
if(pe.title) [dict setObject:pe.title forKey:TrackTitle]; if([pe title]) [dict setObject:[pe title] forKey:TrackTitle];
if(pe.artist) [dict setObject:pe.artist forKey:TrackArtist]; if([pe artist]) [dict setObject:[pe artist] forKey:TrackArtist];
if(pe.album) [dict setObject:pe.album forKey:TrackAlbum]; if([pe album]) [dict setObject:[pe album] forKey:TrackAlbum];
if(pe.genre) [dict setObject:pe.genre forKey:TrackGenre]; if([pe genre]) [dict setObject:[pe genre] forKey:TrackGenre];
if(pe.track) if([pe track])
[dict setObject:pe.trackText forKey:TrackNumber]; [dict setObject:[pe trackText] forKey:TrackNumber];
if(pe.length) if([pe length])
[dict setObject:@((NSInteger)([pe.length doubleValue] * 1000.0)) [dict setObject:[NSNumber numberWithInteger:(NSInteger)([[pe length] doubleValue] * 1000.0)]
forKey:TrackLength]; forKey:TrackLength];
NSString *state = nil; NSString *state = nil;
@ -143,23 +145,25 @@ didReceiveNotificationResponse:(UNNotificationResponse *)response
return dict; return dict;
} }
#endif
- (void)performPlaybackDidBeginActions:(PlaylistEntry *)pe { - (void)performPlaybackDidBeginActions:(PlaylistEntry *)pe {
if(NO == [pe error]) { if(NO == [pe error]) {
entry = pe; entry = pe;
#if 0
[[NSDistributedNotificationCenter defaultCenter] [[NSDistributedNotificationCenter defaultCenter]
postNotificationName:TrackNotification postNotificationName:TrackNotification
object:nil object:nil
userInfo:[self fillNotificationDictionary:pe status:TrackPlaying] userInfo:[self fillNotificationDictionary:pe status:TrackPlaying]
deliverImmediately:YES]; deliverImmediately:YES];
#endif
NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults]; NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults];
if([defaults boolForKey:@"notifications.enable"]) { if([defaults boolForKey:@"notifications.enable"]) {
if([defaults boolForKey:@"enableAudioScrobbler"]) {
[scrobbler start:pe];
if([AudioScrobbler isRunning]) return;
}
if(@available(macOS 10.14, *)) { if(@available(macOS 10.14, *)) {
if(didGainUN) { if(didGainUN) {
UNUserNotificationCenter *center = UNUserNotificationCenter *center =
@ -171,14 +175,12 @@ didReceiveNotificationResponse:(UNNotificationResponse *)response
content.title = @"Now Playing"; content.title = @"Now Playing";
NSString *subtitle; NSString *subtitle;
NSString *artist = (pe.artist && [pe.artist length]) ? pe.artist : nil; if([pe artist] && [pe album]) {
NSString *album = (pe.album && [pe.album length]) ? pe.album : nil; subtitle = [NSString stringWithFormat:@"%@ - %@", [pe artist], [pe album]];
if(artist && album) { } else if([pe artist]) {
subtitle = [NSString stringWithFormat:@"%@ - %@", artist, album]; subtitle = [pe artist];
} else if(artist) { } else if([pe album]) {
subtitle = artist; subtitle = [pe album];
} else if(album) {
subtitle = album;
} else { } else {
subtitle = @""; subtitle = @"";
} }
@ -247,14 +249,12 @@ didReceiveNotificationResponse:(UNNotificationResponse *)response
notif.title = [pe title]; notif.title = [pe title];
NSString *subtitle; NSString *subtitle;
NSString *artist = (pe.artist && [pe.artist length]) ? pe.artist : nil; if([pe artist] && [pe album]) {
NSString *album = (pe.album && [pe.album length]) ? pe.album : nil; subtitle = [NSString stringWithFormat:@"%@ - %@", [pe artist], [pe album]];
if(artist && album) { } else if([pe artist]) {
subtitle = [NSString stringWithFormat:@"%@ - %@", artist, album]; subtitle = [pe artist];
} else if(artist) { } else if([pe album]) {
subtitle = artist; subtitle = [pe album];
} else if(album) {
subtitle = album;
} else { } else {
subtitle = @""; subtitle = @"";
} }
@ -279,7 +279,7 @@ didReceiveNotificationResponse:(UNNotificationResponse *)response
} }
} }
notif.actionButtonTitle = NSLocalizedString(@"SkipAction", @""); notif.actionButtonTitle = @"Skip";
[[NSUserNotificationCenter defaultUserNotificationCenter] [[NSUserNotificationCenter defaultUserNotificationCenter]
scheduleNotification:notif]; scheduleNotification:notif];
@ -289,52 +289,55 @@ didReceiveNotificationResponse:(UNNotificationResponse *)response
} }
- (void)performPlaybackDidPauseActions { - (void)performPlaybackDidPauseActions {
#if 0
[[NSDistributedNotificationCenter defaultCenter] [[NSDistributedNotificationCenter defaultCenter]
postNotificationName:TrackNotification postNotificationName:TrackNotification
object:nil object:nil
userInfo:[self fillNotificationDictionary:entry status:TrackPaused] userInfo:[self fillNotificationDictionary:entry status:TrackPaused]
deliverImmediately:YES]; deliverImmediately:YES];
#endif if([[NSUserDefaults standardUserDefaults] boolForKey:@"enableAudioScrobbler"]) {
[scrobbler pause];
}
} }
- (void)performPlaybackDidResumeActions { - (void)performPlaybackDidResumeActions {
#if 0
[[NSDistributedNotificationCenter defaultCenter] [[NSDistributedNotificationCenter defaultCenter]
postNotificationName:TrackNotification postNotificationName:TrackNotification
object:nil object:nil
userInfo:[self fillNotificationDictionary:entry status:TrackPlaying] userInfo:[self fillNotificationDictionary:entry status:TrackPlaying]
deliverImmediately:YES]; deliverImmediately:YES];
#endif if([[NSUserDefaults standardUserDefaults] boolForKey:@"enableAudioScrobbler"]) {
[scrobbler resume];
}
} }
- (void)performPlaybackDidStopActions { - (void)performPlaybackDidStopActions {
#if 0
[[NSDistributedNotificationCenter defaultCenter] [[NSDistributedNotificationCenter defaultCenter]
postNotificationName:TrackNotification postNotificationName:TrackNotification
object:nil object:nil
userInfo:[self fillNotificationDictionary:entry status:TrackStopped] userInfo:[self fillNotificationDictionary:entry status:TrackStopped]
deliverImmediately:YES]; deliverImmediately:YES];
#endif
entry = nil; entry = nil;
if([[NSUserDefaults standardUserDefaults] boolForKey:@"enableAudioScrobbler"]) {
[scrobbler stop];
}
} }
- (void)awakeFromNib { - (void)awakeFromNib {
[[NSNotificationCenter defaultCenter] addObserver:self [[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(playbackDidBegin:) selector:@selector(playbackDidBegin:)
name:CogPlaybackDidBeginNotificiation name:CogPlaybackDidBeginNotficiation
object:nil]; object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self [[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(playbackDidPause:) selector:@selector(playbackDidPause:)
name:CogPlaybackDidPauseNotificiation name:CogPlaybackDidPauseNotficiation
object:nil]; object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self [[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(playbackDidResume:) selector:@selector(playbackDidResume:)
name:CogPlaybackDidResumeNotificiation name:CogPlaybackDidResumeNotficiation
object:nil]; object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self [[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(playbackDidStop:) selector:@selector(playbackDidStop:)
name:CogPlaybackDidStopNotificiation name:CogPlaybackDidStopNotficiation
object:nil]; object:nil];
} }

View file

@ -12,22 +12,22 @@
@implementation NSApplication (APLApplicationExtensions) @implementation NSApplication (APLApplicationExtensions)
- (id)playbackStart:(NSScriptCommand *)command { - (id)playbackStart:(NSScriptCommand *)command {
[(AppController *)[NSApp delegate] clickPlay]; [(AppController *)[NSApp delegate] clickPlay];
return @(YES); return [NSNumber numberWithBool:YES];
} }
- (id)playbackPause:(NSScriptCommand *)command { - (id)playbackPause:(NSScriptCommand *)command {
[(AppController *)[NSApp delegate] clickPause]; [(AppController *)[NSApp delegate] clickPause];
return @(YES); return [NSNumber numberWithBool:YES];
} }
- (id)playbackStop:(NSScriptCommand *)command { - (id)playbackStop:(NSScriptCommand *)command {
[(AppController *)[NSApp delegate] clickStop]; [(AppController *)[NSApp delegate] clickStop];
return @(YES); return [NSNumber numberWithBool:YES];
} }
- (id)playbackPrevious:(NSScriptCommand *)command { - (id)playbackPrevious:(NSScriptCommand *)command {
[(AppController *)[NSApp delegate] clickPrev]; [(AppController *)[NSApp delegate] clickPrev];
return @(YES); return [NSNumber numberWithBool:YES];
} }
- (id)playbackNext:(NSScriptCommand *)command { - (id)playbackNext:(NSScriptCommand *)command {
[(AppController *)[NSApp delegate] clickNext]; [(AppController *)[NSApp delegate] clickNext];
return @(YES); return [NSNumber numberWithBool:YES];
} }
@end @end

View file

@ -13,6 +13,5 @@
} }
+ (NSArray *)urlsForContainerURL:(NSURL *)url; + (NSArray *)urlsForContainerURL:(NSURL *)url;
+ (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url;
@end @end

View file

@ -18,10 +18,4 @@
} }
} }
+ (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url {
@autoreleasepool {
return [[PluginController sharedPluginController] dependencyUrlsForContainerURL:url];
}
}
@end @end

View file

@ -8,7 +8,7 @@
#import <Cocoa/Cocoa.h> #import <Cocoa/Cocoa.h>
#import <CogAudio/Plugin.h> #import "Plugin.h"
@interface AudioDecoder : NSObject { @interface AudioDecoder : NSObject {
} }

View file

@ -8,7 +8,7 @@
#import <Cocoa/Cocoa.h> #import <Cocoa/Cocoa.h>
#import <CogAudio/CogSemaphore.h> #import <CogAudio/Semaphore.h>
#import <AVFoundation/AVFoundation.h> #import <AVFoundation/AVFoundation.h>
#import <AudioToolbox/AudioToolbox.h> #import <AudioToolbox/AudioToolbox.h>
@ -26,8 +26,6 @@
OutputNode *output; OutputNode *output;
double volume; double volume;
double pitch;
double tempo;
NSMutableArray *chainQueue; NSMutableArray *chainQueue;
@ -35,8 +33,6 @@
id nextStreamUserInfo; id nextStreamUserInfo;
NSDictionary *nextStreamRGInfo; NSDictionary *nextStreamRGInfo;
id previousUserInfo; // Track currently last heard track for play counts
id delegate; id delegate;
BOOL outputLaunched; BOOL outputLaunched;
@ -63,21 +59,18 @@
- (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi; - (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi;
- (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(BOOL)paused; - (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(BOOL)paused;
- (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(BOOL)paused andSeekTo:(double)time; - (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(BOOL)paused andSeekTo:(double)time;
- (void)playBG:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(NSNumber *)paused andSeekTo:(NSNumber *)time;
- (void)stop; - (void)stop;
- (void)pause; - (void)pause;
- (void)resume; - (void)resume;
- (void)seekToTime:(double)time; - (void)seekToTime:(double)time;
- (void)seekToTimeBG:(NSNumber *)time;
- (void)setVolume:(double)v; - (void)setVolume:(double)v;
- (double)volume; - (double)volume;
- (double)volumeUp:(double)amount; - (double)volumeUp:(double)amount;
- (double)volumeDown:(double)amount; - (double)volumeDown:(double)amount;
- (double)amountPlayed; - (double)amountPlayed;
- (double)amountPlayedInterval;
- (void)setNextStream:(NSURL *)url; - (void)setNextStream:(NSURL *)url;
- (void)setNextStream:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi; - (void)setNextStream:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi;
@ -116,9 +109,7 @@
- (void)setShouldContinue:(BOOL)s; - (void)setShouldContinue:(BOOL)s;
//- (BufferChain *)bufferChain; //- (BufferChain *)bufferChain;
- (void)launchOutputThread; - (void)launchOutputThread;
- (BOOL)selectNextBuffer;
- (void)endOfInputPlayed; - (void)endOfInputPlayed;
- (void)reportPlayCount;
- (void)sendDelegateMethod:(SEL)selector withVoid:(void *)obj waitUntilDone:(BOOL)wait; - (void)sendDelegateMethod:(SEL)selector withVoid:(void *)obj waitUntilDone:(BOOL)wait;
- (void)sendDelegateMethod:(SEL)selector withObject:(id)obj waitUntilDone:(BOOL)wait; - (void)sendDelegateMethod:(SEL)selector withObject:(id)obj waitUntilDone:(BOOL)wait;
- (void)sendDelegateMethod:(SEL)selector withObject:(id)obj withObject:(id)obj2 waitUntilDone:(BOOL)wait; - (void)sendDelegateMethod:(SEL)selector withObject:(id)obj withObject:(id)obj2 waitUntilDone:(BOOL)wait;
@ -137,7 +128,5 @@
- (void)audioPlayer:(AudioPlayer *)player sustainHDCD:(id)userInfo; - (void)audioPlayer:(AudioPlayer *)player sustainHDCD:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player restartPlaybackAtCurrentPosition:(id)userInfo; - (void)audioPlayer:(AudioPlayer *)player restartPlaybackAtCurrentPosition:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player pushInfo:(NSDictionary *)info toTrack:(id)userInfo; - (void)audioPlayer:(AudioPlayer *)player pushInfo:(NSDictionary *)info toTrack:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player reportPlayCountForTrack:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player updatePosition:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player setError:(NSNumber *)status toTrack:(id)userInfo; - (void)audioPlayer:(AudioPlayer *)player setError:(NSNumber *)status toTrack:(id)userInfo;
@end @end

View file

@ -25,10 +25,6 @@
outputLaunched = NO; outputLaunched = NO;
endOfInputReached = NO; endOfInputReached = NO;
// Safety
pitch = 1.0;
tempo = 1.0;
chainQueue = [[NSMutableArray alloc] init]; chainQueue = [[NSMutableArray alloc] init];
semaphore = [[Semaphore alloc] init]; semaphore = [[Semaphore alloc] init];
@ -60,29 +56,16 @@
[self play:url withUserInfo:userInfo withRGInfo:rgi startPaused:paused andSeekTo:0.0]; [self play:url withUserInfo:userInfo withRGInfo:rgi startPaused:paused andSeekTo:0.0];
} }
- (void)playBG:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(NSNumber *)paused andSeekTo:(NSNumber *)time {
@synchronized (self) {
[self play:url withUserInfo:userInfo withRGInfo:rgi startPaused:[paused boolValue] andSeekTo:[time doubleValue]];
}
}
- (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(BOOL)paused andSeekTo:(double)time { - (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(BOOL)paused andSeekTo:(double)time {
[self play:url withUserInfo:userInfo withRGInfo:rgi startPaused:paused andSeekTo:time andResumeInterval:NO];
}
- (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(BOOL)paused andSeekTo:(double)time andResumeInterval:(BOOL)resumeInterval {
ALog(@"Opening file for playback: %@ at seek offset %f%@", url, time, (paused) ? @", starting paused" : @""); ALog(@"Opening file for playback: %@ at seek offset %f%@", url, time, (paused) ? @", starting paused" : @"");
[self waitUntilCallbacksExit]; [self waitUntilCallbacksExit];
if(output) { if(output) {
[output fadeOutBackground]; [output setShouldContinue:NO];
output = nil;
} }
if(!output) {
output = [[OutputNode alloc] initWithController:self previous:nil]; output = [[OutputNode alloc] initWithController:self previous:nil];
if(![output setupWithInterval:resumeInterval]) { [output setup];
return;
}
}
[output setVolume:volume]; [output setVolume:volume];
@synchronized(chainQueue) { @synchronized(chainQueue) {
for(id anObject in chainQueue) { for(id anObject in chainQueue) {
@ -98,19 +81,13 @@
} }
bufferChain = [[BufferChain alloc] initWithController:self]; bufferChain = [[BufferChain alloc] initWithController:self];
if(!resumeInterval) {
[self notifyStreamChanged:userInfo]; [self notifyStreamChanged:userInfo];
}
while(![bufferChain open:url withOutputFormat:[output format] withUserInfo:userInfo withRGInfo:rgi]) { while(![bufferChain open:url withOutputFormat:[output format] withOutputConfig:[output config] withRGInfo:rgi]) {
bufferChain = nil; bufferChain = nil;
[self requestNextStream:userInfo]; [self requestNextStream:userInfo];
if([nextStream isEqualTo:url]) {
return;
}
url = nextStream; url = nextStream;
if(url == nil) { if(url == nil) {
return; return;
@ -124,6 +101,8 @@
bufferChain = [[BufferChain alloc] initWithController:self]; bufferChain = [[BufferChain alloc] initWithController:self];
} }
[bufferChain setUserInfo:userInfo];
if(time > 0.0) { if(time > 0.0) {
[output seek:time]; [output seek:time];
[bufferChain seek:time]; [bufferChain seek:time];
@ -131,23 +110,14 @@
[self setShouldContinue:YES]; [self setShouldContinue:YES];
if(!resumeInterval) {
outputLaunched = NO; outputLaunched = NO;
}
startedPaused = paused; startedPaused = paused;
initialBufferFilled = NO; initialBufferFilled = NO;
previousUserInfo = userInfo;
[bufferChain launchThreads]; [bufferChain launchThreads];
if(paused) { if(paused)
[self setPlaybackStatus:CogStatusPaused waitUntilDone:YES]; [self setPlaybackStatus:CogStatusPaused waitUntilDone:YES];
if(time > 0.0) {
[self updatePosition:userInfo];
}
} else if(resumeInterval) {
[output fadeIn];
}
} }
- (void)stop { - (void)stop {
@ -165,15 +135,11 @@
bufferChain = nil; bufferChain = nil;
} }
} }
if(output) {
[output setShouldContinue:NO];
[output close];
}
output = nil; output = nil;
} }
- (void)pause { - (void)pause {
[output fadeOut]; [output pause];
[self setPlaybackStatus:CogStatusPaused waitUntilDone:YES]; [self setPlaybackStatus:CogStatusPaused waitUntilDone:YES];
} }
@ -185,38 +151,33 @@
[self launchOutputThread]; [self launchOutputThread];
} }
[output fadeIn];
[output resume]; [output resume];
[self setPlaybackStatus:CogStatusPlaying waitUntilDone:YES]; [self setPlaybackStatus:CogStatusPlaying waitUntilDone:YES];
} }
- (void)seekToTimeBG:(NSNumber *)time {
[self seekToTime:[time doubleValue]];
}
- (void)seekToTime:(double)time { - (void)seekToTime:(double)time {
[output fadeOutBackground]; if(endOfInputReached) {
[output setVolume:volume]; // This is a dirty hack in case the playback has finished with the track
// that the user thinks they're seeking into
[output seek:time];
[bufferChain seek:time];
CogStatus status = (CogStatus)currentPlaybackStatus; CogStatus status = (CogStatus)currentPlaybackStatus;
BOOL paused = status == CogStatusPaused; NSURL *url;
id userInfo; id userInfo;
NSDictionary *rgi;
@synchronized(chainQueue) { @synchronized(chainQueue) {
url = [bufferChain streamURL];
userInfo = [bufferChain userInfo]; userInfo = [bufferChain userInfo];
rgi = [bufferChain rgInfo];
} }
if(paused) { [self stop];
[self setPlaybackStatus:CogStatusPaused waitUntilDone:YES];
if(time > 0.0) { [self play:url withUserInfo:userInfo withRGInfo:rgi startPaused:(status == CogStatusPaused) andSeekTo:time];
[self updatePosition:userInfo];
}
} else { } else {
[output fadeIn]; // Still decoding the current file, safe to seek within it
[output seek:time];
[bufferChain seek:time];
} }
} }
@ -260,21 +221,13 @@
} }
- (void)restartPlaybackAtCurrentPosition { - (void)restartPlaybackAtCurrentPosition {
[self sendDelegateMethod:@selector(audioPlayer:restartPlaybackAtCurrentPosition:) withObject:previousUserInfo waitUntilDone:NO]; [self sendDelegateMethod:@selector(audioPlayer:restartPlaybackAtCurrentPosition:) withObject:[bufferChain userInfo] waitUntilDone:NO];
}
- (void)updatePosition:(id)userInfo {
[self sendDelegateMethod:@selector(audioPlayer:updatePosition:) withObject:userInfo waitUntilDone:NO];
} }
- (void)pushInfo:(NSDictionary *)info toTrack:(id)userInfo { - (void)pushInfo:(NSDictionary *)info toTrack:(id)userInfo {
[self sendDelegateMethod:@selector(audioPlayer:pushInfo:toTrack:) withObject:info withObject:userInfo waitUntilDone:NO]; [self sendDelegateMethod:@selector(audioPlayer:pushInfo:toTrack:) withObject:info withObject:userInfo waitUntilDone:NO];
} }
- (void)reportPlayCountForTrack:(id)userInfo {
[self sendDelegateMethod:@selector(audioPlayer:reportPlayCountForTrack:) withObject:userInfo waitUntilDone:NO];
}
- (void)setShouldContinue:(BOOL)s { - (void)setShouldContinue:(BOOL)s {
shouldContinue = s; shouldContinue = s;
@ -289,10 +242,6 @@
return [output amountPlayed]; return [output amountPlayed];
} }
- (double)amountPlayedInterval {
return [output amountPlayedInterval];
}
- (void)launchOutputThread { - (void)launchOutputThread {
initialBufferFilled = YES; initialBufferFilled = YES;
if(outputLaunched == NO && startedPaused == NO) { if(outputLaunched == NO && startedPaused == NO) {
@ -327,6 +276,8 @@
} }
- (void)addChainToQueue:(BufferChain *)newChain { - (void)addChainToQueue:(BufferChain *)newChain {
[newChain setUserInfo:nextStreamUserInfo];
[newChain setShouldContinue:YES]; [newChain setShouldContinue:YES];
[newChain launchThreads]; [newChain launchThreads];
@ -335,8 +286,6 @@
- (BOOL)endOfInputReached:(BufferChain *)sender // Sender is a BufferChain - (BOOL)endOfInputReached:(BufferChain *)sender // Sender is a BufferChain
{ {
previousUserInfo = [sender userInfo];
BufferChain *newChain = nil; BufferChain *newChain = nil;
if(atomic_load_explicit(&resettingNow, memory_order_relaxed)) if(atomic_load_explicit(&resettingNow, memory_order_relaxed))
@ -349,8 +298,6 @@
// if there's already one at the head of chainQueue... r-r-right? // if there's already one at the head of chainQueue... r-r-right?
for(BufferChain *chain in chainQueue) { for(BufferChain *chain in chainQueue) {
if([chain isRunning]) { if([chain isRunning]) {
if(output)
[output setShouldPlayOutBuffer:YES];
atomic_fetch_sub(&refCount, 1); atomic_fetch_sub(&refCount, 1);
return YES; return YES;
} }
@ -374,8 +321,6 @@
while(duration >= 30.0 && shouldContinue) { while(duration >= 30.0 && shouldContinue) {
[semaphore wait]; [semaphore wait];
if(atomic_load_explicit(&resettingNow, memory_order_relaxed)) { if(atomic_load_explicit(&resettingNow, memory_order_relaxed)) {
if(output)
[output setShouldPlayOutBuffer:YES];
atomic_fetch_sub(&refCount, 1); atomic_fetch_sub(&refCount, 1);
return YES; return YES;
} }
@ -395,24 +340,19 @@
[self requestNextStream:nextStreamUserInfo]; [self requestNextStream:nextStreamUserInfo];
if(!nextStream) { if(!nextStream) {
if(output)
[output setShouldPlayOutBuffer:YES];
atomic_fetch_sub(&refCount, 1); atomic_fetch_sub(&refCount, 1);
return YES; return YES;
} }
BufferChain *lastChain;
@synchronized(chainQueue) { @synchronized(chainQueue) {
newChain = [[BufferChain alloc] initWithController:self]; newChain = [[BufferChain alloc] initWithController:self];
endOfInputReached = YES; endOfInputReached = YES;
lastChain = [chainQueue lastObject]; BufferChain *lastChain = [chainQueue lastObject];
if(lastChain == nil) { if(lastChain == nil) {
lastChain = bufferChain; lastChain = bufferChain;
} }
}
BOOL pathsEqual = NO; BOOL pathsEqual = NO;
@ -422,36 +362,14 @@
if([unixPathNext isEqualToString:unixPathPrev]) if([unixPathNext isEqualToString:unixPathPrev])
pathsEqual = YES; pathsEqual = YES;
} else if(![nextStream isFileURL] && ![[lastChain streamURL] isFileURL]) {
@try {
NSURL *lastURL = [lastChain streamURL];
NSString *nextScheme = [nextStream scheme];
NSString *lastScheme = [lastURL scheme];
NSString *nextHost = [nextStream host];
NSString *lastHost = [lastURL host];
NSString *nextPath = [nextStream path];
NSString *lastPath = [lastURL path];
if(nextScheme && lastScheme && [nextScheme isEqualToString:lastScheme]) {
if((!nextHost && !lastHost) ||
(nextHost && lastHost && [nextHost isEqualToString:lastHost])) {
if(nextPath && lastPath && [nextPath isEqualToString:lastPath]) {
pathsEqual = YES;
}
}
}
}
@catch(NSException *e) {
DLog(@"Exception thrown checking file match: %@", e);
}
} }
if(pathsEqual) { if(pathsEqual || ([[nextStream scheme] isEqualToString:[[lastChain streamURL] scheme]] && (([nextStream host] == nil && [[lastChain streamURL] host] == nil) || [[nextStream host] isEqualToString:[[lastChain streamURL] host]]) && [[nextStream path] isEqualToString:[[lastChain streamURL] path]])) {
if([lastChain setTrack:nextStream] && [newChain openWithInput:[lastChain inputNode] withOutputFormat:[output format] withUserInfo:nextStreamUserInfo withRGInfo:nextStreamRGInfo]) { if([lastChain setTrack:nextStream] && [newChain openWithInput:[lastChain inputNode] withOutputFormat:[output format] withOutputConfig:[output config] withRGInfo:nextStreamRGInfo]) {
[newChain setStreamURL:nextStream]; [newChain setStreamURL:nextStream];
[newChain setUserInfo:nextStreamUserInfo];
@synchronized(chainQueue) {
[self addChainToQueue:newChain]; [self addChainToQueue:newChain];
}
DLog(@"TRACK SET!!! %@", newChain); DLog(@"TRACK SET!!! %@", newChain);
// Keep on-playin // Keep on-playin
newChain = nil; newChain = nil;
@ -463,13 +381,9 @@
lastChain = nil; lastChain = nil;
NSURL *url = nextStream; while(shouldContinue && ![newChain open:nextStream withOutputFormat:[output format] withOutputConfig:[output config] withRGInfo:nextStreamRGInfo]) {
while(shouldContinue && ![newChain open:url withOutputFormat:[output format] withUserInfo:nextStreamUserInfo withRGInfo:nextStreamRGInfo]) {
if(nextStream == nil) { if(nextStream == nil) {
newChain = nil; newChain = nil;
if(output)
[output setShouldPlayOutBuffer:YES];
atomic_fetch_sub(&refCount, 1); atomic_fetch_sub(&refCount, 1);
return YES; return YES;
} }
@ -477,22 +391,10 @@
newChain = nil; newChain = nil;
[self requestNextStream:nextStreamUserInfo]; [self requestNextStream:nextStreamUserInfo];
if([nextStream isEqualTo:url]) {
newChain = nil;
if(output)
[output setShouldPlayOutBuffer:YES];
atomic_fetch_sub(&refCount, 1);
return YES;
}
url = nextStream;
newChain = [[BufferChain alloc] initWithController:self]; newChain = [[BufferChain alloc] initWithController:self];
} }
@synchronized(chainQueue) {
[self addChainToQueue:newChain]; [self addChainToQueue:newChain];
}
newChain = nil; newChain = nil;
@ -506,31 +408,32 @@
// - self.nextStream == next playlist entry's URL // - self.nextStream == next playlist entry's URL
// - self.nextStreamUserInfo == next playlist entry // - self.nextStreamUserInfo == next playlist entry
// - head of chainQueue is the buffer chain for the next entry (which has launched its threads already) // - head of chainQueue is the buffer chain for the next entry (which has launched its threads already)
}
if(output)
[output setShouldPlayOutBuffer:YES];
atomic_fetch_sub(&refCount, 1); atomic_fetch_sub(&refCount, 1);
return YES; return YES;
} }
- (void)reportPlayCount { - (void)endOfInputPlayed {
[self reportPlayCountForTrack:previousUserInfo]; // Once we get here:
} // - the buffer chain for the next playlist entry (started in endOfInputReached) have been working for some time
// already, so that there is some decoded and converted data to play
// - the buffer chain for the next entry is the first item in chainQueue
- (BOOL)selectNextBuffer {
BOOL signalStopped = NO;
do {
@synchronized(chainQueue) { @synchronized(chainQueue) {
endOfInputReached = NO; endOfInputReached = NO;
if([chainQueue count] <= 0) { if([chainQueue count] <= 0) {
// End of playlist // End of playlist
signalStopped = YES; [self stop];
break;
bufferChain = nil;
[self notifyPlaybackStopped:nil];
return;
} }
[bufferChain setShouldContinue:NO];
bufferChain = nil; bufferChain = nil;
bufferChain = [chainQueue objectAtIndex:0]; bufferChain = [chainQueue objectAtIndex:0];
@ -539,35 +442,9 @@
[semaphore signal]; [semaphore signal];
} }
} while(0);
if(signalStopped) {
double latency = 0;
if(output) latency = [output latency];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, latency * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
[self stop];
self->bufferChain = nil;
[self notifyPlaybackStopped:nil];
});
return YES;
}
[self notifyStreamChanged:[bufferChain userInfo]];
[output setEndOfStream:NO]; [output setEndOfStream:NO];
return NO;
}
- (void)endOfInputPlayed {
// Once we get here:
// - the buffer chain for the next playlist entry (started in endOfInputReached) have been working for some time
// already, so that there is some decoded and converted data to play
// - the buffer chain for the next entry is the first item in chainQueue
previousUserInfo = [bufferChain userInfo];
[self notifyStreamChanged:previousUserInfo];
} }
- (BOOL)chainQueueHasTracks { - (BOOL)chainQueueHasTracks {
@ -614,7 +491,7 @@
- (void)setPlaybackStatus:(int)status waitUntilDone:(BOOL)wait { - (void)setPlaybackStatus:(int)status waitUntilDone:(BOOL)wait {
currentPlaybackStatus = status; currentPlaybackStatus = status;
[self sendDelegateMethod:@selector(audioPlayer:didChangeStatus:userInfo:) withObject:@(status) withObject:[bufferChain userInfo] waitUntilDone:wait]; [self sendDelegateMethod:@selector(audioPlayer:didChangeStatus:userInfo:) withObject:[NSNumber numberWithInt:status] withObject:[bufferChain userInfo] waitUntilDone:wait];
} }
- (void)sustainHDCD { - (void)sustainHDCD {
@ -622,7 +499,7 @@
} }
- (void)setError:(BOOL)status { - (void)setError:(BOOL)status {
[self sendDelegateMethod:@selector(audioPlayer:setError:toTrack:) withObject:@(status) withObject:[bufferChain userInfo] waitUntilDone:NO]; [self sendDelegateMethod:@selector(audioPlayer:setError:toTrack:) withObject:[NSNumber numberWithBool:status] withObject:[bufferChain userInfo] waitUntilDone:NO];
} }
- (void)setPlaybackStatus:(int)status { - (void)setPlaybackStatus:(int)status {
@ -704,7 +581,7 @@
while(atomic_load_explicit(&refCount, memory_order_relaxed) != 0) { while(atomic_load_explicit(&refCount, memory_order_relaxed) != 0) {
[semaphore signal]; // Gotta poke this periodically [semaphore signal]; // Gotta poke this periodically
if(mainThread) if(mainThread)
[[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.01]]; [[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.001]];
else else
usleep(500); usleep(500);
} }

View file

@ -65,17 +65,12 @@ enum {
AudioStreamBasicDescription format; AudioStreamBasicDescription format;
NSMutableData *chunkData; NSMutableData *chunkData;
uint32_t channelConfig; uint32_t channelConfig;
double streamTimestamp;
double streamTimeRatio;
BOOL formatAssigned; BOOL formatAssigned;
BOOL lossless; BOOL lossless;
BOOL hdcd;
} }
@property AudioStreamBasicDescription format; @property AudioStreamBasicDescription format;
@property uint32_t channelConfig; @property uint32_t channelConfig;
@property double streamTimestamp;
@property double streamTimeRatio;
@property BOOL lossless; @property BOOL lossless;
+ (uint32_t)guessChannelConfig:(uint32_t)channelCount; + (uint32_t)guessChannelConfig:(uint32_t)channelCount;
@ -85,25 +80,16 @@ enum {
+ (uint32_t)findChannelIndex:(uint32_t)flag; + (uint32_t)findChannelIndex:(uint32_t)flag;
- (id)init; - (id)init;
- (id)initWithProperties:(NSDictionary *)properties;
- (void)assignSamples:(const void *_Nonnull)data frameCount:(size_t)count; - (void)assignSamples:(const void *)data frameCount:(size_t)count;
- (void)assignData:(NSData *)data;
- (NSData *)removeSamples:(size_t)frameCount; - (NSData *)removeSamples:(size_t)frameCount;
- (BOOL)isEmpty; - (BOOL)isEmpty;
- (size_t)frameCount; - (size_t)frameCount;
- (void)setFrameCount:(size_t)count; // For truncation only
- (double)duration; - (double)duration;
- (double)durationRatioed;
- (BOOL)isHDCD;
- (void)setHDCD;
- (AudioChunk *)copy;
@end @end

View file

@ -7,8 +7,6 @@
#import "AudioChunk.h" #import "AudioChunk.h"
#import "CoreAudioUtils.h"
@implementation AudioChunk @implementation AudioChunk
- (id)init { - (id)init {
@ -18,40 +16,11 @@
chunkData = [[NSMutableData alloc] init]; chunkData = [[NSMutableData alloc] init];
formatAssigned = NO; formatAssigned = NO;
lossless = NO; lossless = NO;
hdcd = NO;
streamTimestamp = 0.0;
streamTimeRatio = 1.0;
} }
return self; return self;
} }
- (id)initWithProperties:(NSDictionary *)properties {
self = [super init];
if(self) {
chunkData = [[NSMutableData alloc] init];
[self setFormat:propertiesToASBD(properties)];
lossless = [[properties objectForKey:@"encoding"] isEqualToString:@"lossless"];
hdcd = NO;
streamTimestamp = 0.0;
streamTimeRatio = 1.0;
}
return self;
}
- (AudioChunk *)copy {
AudioChunk *outputChunk = [[AudioChunk alloc] init];
[outputChunk setFormat:format];
[outputChunk setChannelConfig:channelConfig];
if(hdcd) [outputChunk setHDCD];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:streamTimeRatio];
[outputChunk assignData:chunkData];
return outputChunk;
}
static const uint32_t AudioChannelConfigTable[] = { static const uint32_t AudioChannelConfigTable[] = {
0, 0,
AudioConfigMono, AudioConfigMono,
@ -133,8 +102,6 @@ static const uint32_t AudioChannelConfigTable[] = {
} }
@synthesize lossless; @synthesize lossless;
@synthesize streamTimestamp;
@synthesize streamTimeRatio;
- (AudioStreamBasicDescription)format { - (AudioStreamBasicDescription)format {
return format; return format;
@ -159,30 +126,21 @@ static const uint32_t AudioChannelConfigTable[] = {
channelConfig = config; channelConfig = config;
} }
- (void)assignSamples:(const void *_Nonnull)data frameCount:(size_t)count { - (void)assignSamples:(const void *)data frameCount:(size_t)count {
if(formatAssigned) { if(formatAssigned) {
const size_t bytesPerPacket = format.mBytesPerPacket; const size_t bytesPerPacket = format.mBytesPerPacket;
[chunkData appendBytes:data length:bytesPerPacket * count]; [chunkData appendBytes:data length:bytesPerPacket * count];
} }
} }
- (void)assignData:(NSData *)data {
[chunkData appendData:data];
}
- (NSData *)removeSamples:(size_t)frameCount { - (NSData *)removeSamples:(size_t)frameCount {
if(formatAssigned) { if(formatAssigned) {
@autoreleasepool {
const double secondsDuration = (double)(frameCount) / format.mSampleRate;
const double DSDrate = (format.mBitsPerChannel == 1) ? 8.0 : 1.0;
const size_t bytesPerPacket = format.mBytesPerPacket; const size_t bytesPerPacket = format.mBytesPerPacket;
const size_t byteCount = bytesPerPacket * frameCount; const size_t byteCount = bytesPerPacket * frameCount;
NSData *ret = [chunkData subdataWithRange:NSMakeRange(0, byteCount)]; NSData *ret = [chunkData subdataWithRange:NSMakeRange(0, byteCount)];
[chunkData replaceBytesInRange:NSMakeRange(0, byteCount) withBytes:NULL length:0]; [chunkData replaceBytesInRange:NSMakeRange(0, byteCount) withBytes:NULL length:0];
streamTimestamp += secondsDuration * streamTimeRatio * DSDrate;
return ret; return ret;
} }
}
return [NSData data]; return [NSData data];
} }
@ -198,36 +156,13 @@ static const uint32_t AudioChannelConfigTable[] = {
return 0; return 0;
} }
- (void)setFrameCount:(size_t)count {
if(formatAssigned) {
count *= format.mBytesPerPacket;
size_t currentLength = [chunkData length];
if(count < currentLength) {
[chunkData replaceBytesInRange:NSMakeRange(count, currentLength - count) withBytes:NULL length:0];
}
}
}
- (double)duration { - (double)duration {
if(formatAssigned && [chunkData length]) { if(formatAssigned) {
const size_t bytesPerPacket = format.mBytesPerPacket; const size_t bytesPerPacket = format.mBytesPerPacket;
const double sampleRate = format.mSampleRate; const double sampleRate = format.mSampleRate;
const double DSDrate = (format.mBitsPerChannel == 1) ? 8.0 : 1.0; return (double)([chunkData length] / bytesPerPacket) / sampleRate;
return ((double)([chunkData length] / bytesPerPacket) / sampleRate) * DSDrate;
} }
return 0.0; return 0.0;
} }
- (double)durationRatioed {
return [self duration] * streamTimeRatio;
}
- (BOOL)isHDCD {
return hdcd;
}
- (void)setHDCD {
hdcd = YES;
}
@end @end

View file

@ -8,9 +8,9 @@
#import <Cocoa/Cocoa.h> #import <Cocoa/Cocoa.h>
#import <CogAudio/AudioPlayer.h> #import "AudioPlayer.h"
#import <CogAudio/ConverterNode.h> #import "ConverterNode.h"
#import <CogAudio/InputNode.h> #import "InputNode.h"
@interface BufferChain : NSObject { @interface BufferChain : NSObject {
InputNode *inputNode; InputNode *inputNode;
@ -26,17 +26,17 @@
} }
- (id)initWithController:(id)c; - (id)initWithController:(id)c;
- (BOOL)buildChain; - (void)buildChain;
- (BOOL)open:(NSURL *)url withOutputFormat:(AudioStreamBasicDescription)outputFormat withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi; - (BOOL)open:(NSURL *)url withOutputFormat:(AudioStreamBasicDescription)outputFormat withOutputConfig:(uint32_t)outputConfig withRGInfo:(NSDictionary *)rgi;
// Used when changing tracks to reuse the same decoder // Used when changing tracks to reuse the same decoder
- (BOOL)openWithInput:(InputNode *)i withOutputFormat:(AudioStreamBasicDescription)outputFormat withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi; - (BOOL)openWithInput:(InputNode *)i withOutputFormat:(AudioStreamBasicDescription)outputFormat withOutputConfig:(uint32_t)outputConfig withRGInfo:(NSDictionary *)rgi;
// Used when resetting the decoder on seek // Used when resetting the decoder on seek
- (BOOL)openWithDecoder:(id<CogDecoder>)decoder - (BOOL)openWithDecoder:(id<CogDecoder>)decoder
withOutputFormat:(AudioStreamBasicDescription)outputFormat withOutputFormat:(AudioStreamBasicDescription)outputFormat
withUserInfo:(id)userInfo withOutputConfig:(uint32_t)outputConfig
withRGInfo:(NSDictionary *)rgi; withRGInfo:(NSDictionary *)rgi;
- (void)seek:(double)time; - (void)seek:(double)time;

View file

@ -9,11 +9,8 @@
#import "BufferChain.h" #import "BufferChain.h"
#import "AudioSource.h" #import "AudioSource.h"
#import "CoreAudioUtils.h" #import "CoreAudioUtils.h"
#import "DSPDownmixNode.h"
#import "OutputNode.h" #import "OutputNode.h"
#import "AudioPlayer.h"
#import "Logging.h" #import "Logging.h"
@implementation BufferChain @implementation BufferChain
@ -33,36 +30,20 @@
return self; return self;
} }
- (BOOL)buildChain { - (void)buildChain {
// Cut off output source
finalNode = nil;
// Tear them down in reverse
converterNode = nil;
inputNode = nil; inputNode = nil;
converterNode = nil;
inputNode = [[InputNode alloc] initWithController:self previous:nil]; inputNode = [[InputNode alloc] initWithController:self previous:nil];
if(!inputNode) return NO;
converterNode = [[ConverterNode alloc] initWithController:self previous:inputNode]; converterNode = [[ConverterNode alloc] initWithController:self previous:inputNode];
if(!converterNode) return NO;
finalNode = converterNode; finalNode = converterNode;
return YES;
} }
- (BOOL)open:(NSURL *)url withOutputFormat:(AudioStreamBasicDescription)outputFormat withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi { - (BOOL)open:(NSURL *)url withOutputFormat:(AudioStreamBasicDescription)outputFormat withOutputConfig:(uint32_t)outputConfig withRGInfo:(NSDictionary *)rgi {
if(!url) {
DLog(@"Player attempted to play invalid file...");
return NO;
}
[self setStreamURL:url]; [self setStreamURL:url];
[self setUserInfo:userInfo];
if(![self buildChain]) { [self buildChain];
DLog(@"Couldn't build processing chain...");
return NO;
}
id<CogSource> source = [AudioSource audioSourceForURL:url]; id<CogSource> source = [AudioSource audioSourceForURL:url];
DLog(@"Opening: %@", url); DLog(@"Opening: %@", url);
@ -77,9 +58,21 @@
if(![inputNode openWithSource:source]) if(![inputNode openWithSource:source])
return NO; return NO;
if(![self initConverter:outputFormat]) NSDictionary *properties = [inputNode properties];
AudioStreamBasicDescription inputFormat = [inputNode nodeFormat];
uint32_t inputChannelConfig = 0;
if([properties valueForKey:@"channelConfig"])
inputChannelConfig = [[properties valueForKey:@"channelConfig"] unsignedIntValue];
outputFormat.mChannelsPerFrame = inputFormat.mChannelsPerFrame;
outputFormat.mBytesPerFrame = ((outputFormat.mBitsPerChannel + 7) / 8) * outputFormat.mChannelsPerFrame;
outputFormat.mBytesPerPacket = outputFormat.mBytesPerFrame * outputFormat.mFramesPerPacket;
outputConfig = inputChannelConfig;
if(![converterNode setupWithInputFormat:inputFormat withInputConfig:inputChannelConfig outputFormat:outputFormat outputConfig:outputConfig isLossless:[[properties valueForKey:@"encoding"] isEqualToString:@"lossless"]])
return NO; return NO;
[self initDownmixer];
[self setRGInfo:rgi]; [self setRGInfo:rgi];
@ -88,20 +81,29 @@
return YES; return YES;
} }
- (BOOL)openWithInput:(InputNode *)i withOutputFormat:(AudioStreamBasicDescription)outputFormat withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi { - (BOOL)openWithInput:(InputNode *)i withOutputFormat:(AudioStreamBasicDescription)outputFormat withOutputConfig:(uint32_t)outputConfig withRGInfo:(NSDictionary *)rgi {
DLog(@"New buffer chain!"); DLog(@"New buffer chain!");
[self setUserInfo:userInfo]; [self buildChain];
if(![self buildChain]) {
DLog(@"Couldn't build processing chain...");
return NO;
}
if(![inputNode openWithDecoder:[i decoder]]) if(![inputNode openWithDecoder:[i decoder]])
return NO; return NO;
if(![self initConverter:outputFormat]) NSDictionary *properties = [inputNode properties];
AudioStreamBasicDescription inputFormat = [inputNode nodeFormat];
uint32_t inputChannelConfig = 0;
if([properties valueForKey:@"channelConfig"])
inputChannelConfig = [[properties valueForKey:@"channelConfig"] unsignedIntValue];
outputFormat.mChannelsPerFrame = inputFormat.mChannelsPerFrame;
outputFormat.mBytesPerFrame = ((outputFormat.mBitsPerChannel + 7) / 8) * outputFormat.mChannelsPerFrame;
outputFormat.mBytesPerPacket = outputFormat.mBytesPerFrame * outputFormat.mFramesPerPacket;
outputConfig = inputChannelConfig;
DLog(@"Input Properties: %@", properties);
if(![converterNode setupWithInputFormat:inputFormat withInputConfig:inputChannelConfig outputFormat:outputFormat outputConfig:outputConfig isLossless:[[properties objectForKey:@"encoding"] isEqualToString:@"lossless"]])
return NO; return NO;
[self initDownmixer];
[self setRGInfo:rgi]; [self setRGInfo:rgi];
@ -110,29 +112,15 @@
- (BOOL)openWithDecoder:(id<CogDecoder>)decoder - (BOOL)openWithDecoder:(id<CogDecoder>)decoder
withOutputFormat:(AudioStreamBasicDescription)outputFormat withOutputFormat:(AudioStreamBasicDescription)outputFormat
withUserInfo:(id)userInfo withOutputConfig:(uint32_t)outputConfig
withRGInfo:(NSDictionary *)rgi; withRGInfo:(NSDictionary *)rgi;
{ {
DLog(@"New buffer chain!"); DLog(@"New buffer chain!");
[self setUserInfo:userInfo]; [self buildChain];
if(![self buildChain]) {
DLog(@"Couldn't build processing chain...");
return NO;
}
if(![inputNode openWithDecoder:decoder]) if(![inputNode openWithDecoder:decoder])
return NO; return NO;
if(![self initConverter:outputFormat])
return NO;
[self initDownmixer];
[self setRGInfo:rgi];
return YES;
}
- (BOOL)initConverter:(AudioStreamBasicDescription)outputFormat {
NSDictionary *properties = [inputNode properties]; NSDictionary *properties = [inputNode properties];
DLog(@"Input Properties: %@", properties); DLog(@"Input Properties: %@", properties);
@ -146,17 +134,14 @@
outputFormat.mBytesPerFrame = ((outputFormat.mBitsPerChannel + 7) / 8) * outputFormat.mChannelsPerFrame; outputFormat.mBytesPerFrame = ((outputFormat.mBitsPerChannel + 7) / 8) * outputFormat.mChannelsPerFrame;
outputFormat.mBytesPerPacket = outputFormat.mBytesPerFrame * outputFormat.mFramesPerPacket; outputFormat.mBytesPerPacket = outputFormat.mBytesPerFrame * outputFormat.mFramesPerPacket;
if(![converterNode setupWithInputFormat:inputFormat withInputConfig:inputChannelConfig outputFormat:outputFormat isLossless:[[properties valueForKey:@"encoding"] isEqualToString:@"lossless"]]) outputConfig = inputChannelConfig;
if(![converterNode setupWithInputFormat:inputFormat withInputConfig:inputChannelConfig outputFormat:outputFormat outputConfig:outputConfig isLossless:[[properties objectForKey:@"encoding"] isEqualToString:@"lossless"]])
return NO; return NO;
return YES; [self setRGInfo:rgi];
}
- (void)initDownmixer { return YES;
AudioPlayer * audioPlayer = controller;
OutputNode *outputNode = [audioPlayer output];
DSPDownmixNode *downmixNode = [outputNode downmix];
[downmixNode setOutputFormat:[outputNode deviceFormat] withChannelConfig:[outputNode deviceChannelConfig]];
} }
- (void)launchThreads { - (void)launchThreads {
@ -186,8 +171,7 @@
- (void)dealloc { - (void)dealloc {
[inputNode setShouldContinue:NO]; [inputNode setShouldContinue:NO];
[[inputNode exitAtTheEndOfTheStream] signal]; [[inputNode exitAtTheEndOfTheStream] signal];
[[inputNode writeSemaphore] signal]; [[inputNode semaphore] signal];
if(![inputNode threadExited])
[[inputNode exitAtTheEndOfTheStream] wait]; // wait for decoder to be closed (see InputNode's -(void)process ) [[inputNode exitAtTheEndOfTheStream] wait]; // wait for decoder to be closed (see InputNode's -(void)process )
DLog(@"Bufferchain dealloc"); DLog(@"Bufferchain dealloc");

View file

@ -8,57 +8,24 @@
#import <CoreAudio/CoreAudio.h> #import <CoreAudio/CoreAudio.h>
#import <Foundation/Foundation.h> #import <Foundation/Foundation.h>
#import <CogAudio/AudioChunk.h> #import "AudioChunk.h"
#import <CogAudio/CogSemaphore.h> #import "Semaphore.h"
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
#define DSD_DECIMATE 1
@interface ChunkList : NSObject { @interface ChunkList : NSObject {
NSMutableArray<AudioChunk *> *chunkList; NSMutableArray<AudioChunk *> *chunkList;
double listDuration; double listDuration;
double listDurationRatioed;
double maxDuration; double maxDuration;
BOOL inAdder; BOOL inAdder;
BOOL inRemover; BOOL inRemover;
BOOL inPeeker; BOOL inPeeker;
BOOL inMerger;
BOOL inConverter;
BOOL stopping; BOOL stopping;
// For format converter
void *inputBuffer;
size_t inputBufferSize;
#if DSD_DECIMATE
void **dsd2pcm;
size_t dsd2pcmCount;
int dsd2pcmLatency;
#endif
BOOL observersRegistered;
BOOL halveDSDVolume;
BOOL enableHDCD;
void *hdcd_decoder;
BOOL formatRead;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription floatFormat;
uint32_t inputChannelConfig;
BOOL inputLossless;
uint8_t *tempData;
size_t tempDataSize;
} }
@property(readonly) double listDuration; @property(readonly) double listDuration;
@property(readonly) double listDurationRatioed;
@property(readonly) double maxDuration; @property(readonly) double maxDuration;
- (id)initWithMaximumDuration:(double)duration; - (id)initWithMaximumDuration:(double)duration;
@ -71,16 +38,8 @@ NS_ASSUME_NONNULL_BEGIN
- (void)addChunk:(AudioChunk *)chunk; - (void)addChunk:(AudioChunk *)chunk;
- (AudioChunk *)removeSamples:(size_t)maxFrameCount; - (AudioChunk *)removeSamples:(size_t)maxFrameCount;
- (AudioChunk *)removeSamplesAsFloat32:(size_t)maxFrameCount;
- (BOOL)peekFormat:(nonnull AudioStreamBasicDescription *)format channelConfig:(nonnull uint32_t *)config; - (BOOL)peekFormat:(nonnull AudioStreamBasicDescription *)format channelConfig:(nonnull uint32_t *)config;
- (BOOL)peekTimestamp:(nonnull double *)timestamp timeRatio:(nonnull double *)timeRatio;
// Helpers
- (AudioChunk *)removeAndMergeSamples:(size_t)maxFrameCount callBlock:(BOOL(NS_NOESCAPE ^ _Nonnull)(void))block;
- (AudioChunk *)removeAndMergeSamplesAsFloat32:(size_t)maxFrameCount callBlock:(BOOL(NS_NOESCAPE ^ _Nonnull)(void))block;
@end @end
NS_ASSUME_NONNULL_END NS_ASSUME_NONNULL_END

View file

@ -5,371 +5,11 @@
// Created by Christopher Snowhill on 2/5/22. // Created by Christopher Snowhill on 2/5/22.
// //
#import <Accelerate/Accelerate.h>
#import "ChunkList.h" #import "ChunkList.h"
#import "hdcd_decode2.h"
#if !DSD_DECIMATE
#import "dsd2float.h"
#endif
#ifdef _DEBUG
#import "BadSampleCleaner.h"
#endif
static void *kChunkListContext = &kChunkListContext;
#if DSD_DECIMATE
/**
* DSD 2 PCM: Stage 1:
* Decimate by factor 8
* (one byte (8 samples) -> one float sample)
* The bits are processed from least signicifant to most signicicant.
* @author Sebastian Gesemann
*/
/**
* This is the 2nd half of an even order symmetric FIR
* lowpass filter (to be used on a signal sampled at 44100*64 Hz)
* Passband is 0-24 kHz (ripples +/- 0.025 dB)
* Stopband starts at 176.4 kHz (rejection: 170 dB)
* The overall gain is 2.0
*/
#define dsd2pcm_FILTER_COEFFS_COUNT 64
static const float dsd2pcm_FILTER_COEFFS[64] = {
0.09712411121659f, 0.09613438994044f, 0.09417884216316f, 0.09130441727307f,
0.08757947648990f, 0.08309142055179f, 0.07794369263673f, 0.07225228745463f,
0.06614191680338f, 0.05974199351302f, 0.05318259916599f, 0.04659059631228f,
0.04008603356890f, 0.03377897290478f, 0.02776684382775f, 0.02213240062966f,
0.01694232798846f, 0.01224650881275f, 0.00807793792573f, 0.00445323755944f,
0.00137370697215f, -0.00117318019994f, -0.00321193033831f, -0.00477694265140f,
-0.00591028841335f, -0.00665946056286f, -0.00707518873201f, -0.00720940203988f,
-0.00711340642819f, -0.00683632603227f, -0.00642384017266f, -0.00591723006715f,
-0.00535273320457f, -0.00476118922548f, -0.00416794965654f, -0.00359301524813f,
-0.00305135909510f, -0.00255339111833f, -0.00210551956895f, -0.00171076760278f,
-0.00136940723130f, -0.00107957856005f, -0.00083786862365f, -0.00063983084245f,
-0.00048043272086f, -0.00035442550015f, -0.00025663481039f, -0.00018217573430f,
-0.00012659899635f, -0.00008597726991f, -0.00005694188820f, -0.00003668060332f,
-0.00002290670286f, -0.00001380895679f, -0.00000799057558f, -0.00000440385083f,
-0.00000228567089f, -0.00000109760778f, -0.00000047286430f, -0.00000017129652f,
-0.00000004282776f, 0.00000000119422f, 0.00000000949179f, 0.00000000747450f
};
struct dsd2pcm_state {
/*
* This is the 2nd half of an even order symmetric FIR
* lowpass filter (to be used on a signal sampled at 44100*64 Hz)
* Passband is 0-24 kHz (ripples +/- 0.025 dB)
* Stopband starts at 176.4 kHz (rejection: 170 dB)
* The overall gain is 2.0
*/
/* These remain constant for the duration */
int FILT_LOOKUP_PARTS;
float *FILT_LOOKUP_TABLE;
uint8_t *REVERSE_BITS;
int FIFO_LENGTH;
int FIFO_OFS_MASK;
/* These are altered */
int *fifo;
int fpos;
};
static void dsd2pcm_free(void *);
static void dsd2pcm_reset(void *);
static void *dsd2pcm_alloc(void) {
struct dsd2pcm_state *state = (struct dsd2pcm_state *)calloc(1, sizeof(struct dsd2pcm_state));
float *FILT_LOOKUP_TABLE;
double *temp;
uint8_t *REVERSE_BITS;
if(!state)
return NULL;
state->FILT_LOOKUP_PARTS = (dsd2pcm_FILTER_COEFFS_COUNT + 7) / 8;
const int FILT_LOOKUP_PARTS = state->FILT_LOOKUP_PARTS;
// The current 128 tap FIR leads to an 8 KB lookup table
state->FILT_LOOKUP_TABLE = (float *)calloc(sizeof(float), FILT_LOOKUP_PARTS << 8);
if(!state->FILT_LOOKUP_TABLE)
goto fail;
FILT_LOOKUP_TABLE = state->FILT_LOOKUP_TABLE;
temp = (double *)calloc(sizeof(double), 0x100);
if(!temp)
goto fail;
for(int part = 0, sofs = 0, dofs = 0; part < FILT_LOOKUP_PARTS;) {
memset(temp, 0, 0x100 * sizeof(double));
for(int bit = 0, bitmask = 0x80; bit < 8 && sofs + bit < dsd2pcm_FILTER_COEFFS_COUNT;) {
double coeff = dsd2pcm_FILTER_COEFFS[sofs + bit];
for(int bite = 0; bite < 0x100; bite++) {
if((bite & bitmask) == 0) {
temp[bite] -= coeff;
} else {
temp[bite] += coeff;
}
}
bit++;
bitmask >>= 1;
}
for(int s = 0; s < 0x100;) {
FILT_LOOKUP_TABLE[dofs++] = (float)temp[s++];
}
part++;
sofs += 8;
}
free(temp);
{ // calculate FIFO stuff
int k = 1;
while(k < FILT_LOOKUP_PARTS * 2) k <<= 1;
state->FIFO_LENGTH = k;
state->FIFO_OFS_MASK = k - 1;
}
state->REVERSE_BITS = (uint8_t *)calloc(1, 0x100);
if(!state->REVERSE_BITS)
goto fail;
REVERSE_BITS = state->REVERSE_BITS;
for(int i = 0, j = 0; i < 0x100; i++) {
REVERSE_BITS[i] = (uint8_t)j;
// "reverse-increment" of j
for(int bitmask = 0x80;;) {
if(((j ^= bitmask) & bitmask) != 0) break;
if(bitmask == 1) break;
bitmask >>= 1;
}
}
state->fifo = (int *)calloc(sizeof(int), state->FIFO_LENGTH);
if(!state->fifo)
goto fail;
dsd2pcm_reset(state);
return (void *)state;
fail:
dsd2pcm_free(state);
return NULL;
}
static void *dsd2pcm_dup(void *_state) {
struct dsd2pcm_state *state = (struct dsd2pcm_state *)_state;
if(state) {
struct dsd2pcm_state *newstate = (struct dsd2pcm_state *)calloc(1, sizeof(struct dsd2pcm_state));
if(newstate) {
newstate->FILT_LOOKUP_PARTS = state->FILT_LOOKUP_PARTS;
newstate->FIFO_LENGTH = state->FIFO_LENGTH;
newstate->FIFO_OFS_MASK = state->FIFO_OFS_MASK;
newstate->fpos = state->fpos;
newstate->FILT_LOOKUP_TABLE = (float *)calloc(sizeof(float), state->FILT_LOOKUP_PARTS << 8);
if(!newstate->FILT_LOOKUP_TABLE)
goto fail;
memcpy(newstate->FILT_LOOKUP_TABLE, state->FILT_LOOKUP_TABLE, sizeof(float) * (state->FILT_LOOKUP_PARTS << 8));
newstate->REVERSE_BITS = (uint8_t *)calloc(1, 0x100);
if(!newstate->REVERSE_BITS)
goto fail;
memcpy(newstate->REVERSE_BITS, state->REVERSE_BITS, 0x100);
newstate->fifo = (int *)calloc(sizeof(int), state->FIFO_LENGTH);
if(!newstate->fifo)
goto fail;
memcpy(newstate->fifo, state->fifo, sizeof(int) * state->FIFO_LENGTH);
return (void *)newstate;
}
fail:
dsd2pcm_free(newstate);
return NULL;
}
return NULL;
}
static void dsd2pcm_free(void *_state) {
struct dsd2pcm_state *state = (struct dsd2pcm_state *)_state;
if(state) {
free(state->fifo);
free(state->REVERSE_BITS);
free(state->FILT_LOOKUP_TABLE);
free(state);
}
}
static void dsd2pcm_reset(void *_state) {
struct dsd2pcm_state *state = (struct dsd2pcm_state *)_state;
const int FILT_LOOKUP_PARTS = state->FILT_LOOKUP_PARTS;
int *fifo = state->fifo;
for(int i = 0; i < FILT_LOOKUP_PARTS; i++) {
fifo[i] = 0x55;
fifo[i + FILT_LOOKUP_PARTS] = 0xAA;
}
state->fpos = FILT_LOOKUP_PARTS;
}
static int dsd2pcm_latency(void *_state) {
struct dsd2pcm_state *state = (struct dsd2pcm_state *)_state;
if(state)
return state->FILT_LOOKUP_PARTS * 8;
else
return 0;
}
static void dsd2pcm_process(void *_state, const uint8_t *src, size_t sofs, size_t sinc, float *dest, size_t dofs, size_t dinc, size_t len) {
struct dsd2pcm_state *state = (struct dsd2pcm_state *)_state;
int bite1, bite2, temp;
float sample;
int *fifo = state->fifo;
const uint8_t *REVERSE_BITS = state->REVERSE_BITS;
const float *FILT_LOOKUP_TABLE = state->FILT_LOOKUP_TABLE;
const int FILT_LOOKUP_PARTS = state->FILT_LOOKUP_PARTS;
const int FIFO_OFS_MASK = state->FIFO_OFS_MASK;
int fpos = state->fpos;
while(len > 0) {
fifo[fpos] = REVERSE_BITS[fifo[fpos]] & 0xFF;
fifo[(fpos + FILT_LOOKUP_PARTS) & FIFO_OFS_MASK] = src[sofs] & 0xFF;
sofs += sinc;
temp = (fpos + 1) & FIFO_OFS_MASK;
sample = 0;
for(int k = 0, lofs = 0; k < FILT_LOOKUP_PARTS;) {
bite1 = fifo[(fpos - k) & FIFO_OFS_MASK];
bite2 = fifo[(temp + k) & FIFO_OFS_MASK];
sample += FILT_LOOKUP_TABLE[lofs + bite1] + FILT_LOOKUP_TABLE[lofs + bite2];
k++;
lofs += 0x100;
}
fpos = temp;
dest[dofs] = sample;
dofs += dinc;
len--;
}
state->fpos = fpos;
}
static void convert_dsd_to_f32(float *output, const uint8_t *input, size_t count, size_t channels, void **dsd2pcm) {
for(size_t channel = 0; channel < channels; ++channel) {
dsd2pcm_process(dsd2pcm[channel], input, channel, channels, output, channel, channels, count);
}
}
#else
static void convert_dsd_to_f32(float *output, const uint8_t *input, size_t count, size_t channels) {
const uint8_t *iptr = input;
float *optr = output;
for(size_t index = 0; index < count; ++index) {
for(size_t channel = 0; channel < channels; ++channel) {
uint8_t sample = *iptr++;
cblas_scopy(8, &dsd2float[sample][0], 1, optr++, (int)channels);
}
optr += channels * 7;
}
}
#endif
static void convert_u8_to_s16(int16_t *output, const uint8_t *input, size_t count) {
for(size_t i = 0; i < count; ++i) {
uint16_t sample = (input[i] << 8) | input[i];
sample ^= 0x8080;
output[i] = (int16_t)(sample);
}
}
static void convert_s8_to_s16(int16_t *output, const uint8_t *input, size_t count) {
for(size_t i = 0; i < count; ++i) {
uint16_t sample = (input[i] << 8) | input[i];
output[i] = (int16_t)(sample);
}
}
static void convert_u16_to_s16(int16_t *buffer, size_t count) {
for(size_t i = 0; i < count; ++i) {
buffer[i] ^= 0x8000;
}
}
static void convert_s16_to_hdcd_input(int32_t *output, const int16_t *input, size_t count) {
for(size_t i = 0; i < count; ++i) {
output[i] = input[i];
}
}
static void convert_s24_to_s32(int32_t *output, const uint8_t *input, size_t count) {
for(size_t i = 0; i < count; ++i) {
int32_t sample = (input[i * 3] << 8) | (input[i * 3 + 1] << 16) | (input[i * 3 + 2] << 24);
output[i] = sample;
}
}
static void convert_u24_to_s32(int32_t *output, const uint8_t *input, size_t count) {
for(size_t i = 0; i < count; ++i) {
int32_t sample = (input[i * 3] << 8) | (input[i * 3 + 1] << 16) | (input[i * 3 + 2] << 24);
output[i] = sample ^ 0x80000000;
}
}
static void convert_u32_to_s32(int32_t *buffer, size_t count) {
for(size_t i = 0; i < count; ++i) {
buffer[i] ^= 0x80000000;
}
}
static void convert_f64_to_f32(float *output, const double *input, size_t count) {
vDSP_vdpsp(input, 1, output, 1, count);
}
static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes) {
size_t i;
bitsPerSample = (bitsPerSample + 7) / 8;
switch(bitsPerSample) {
case 2:
for(i = 0; i < bytes; i += 2) {
*(int16_t *)buffer = __builtin_bswap16(*(int16_t *)buffer);
buffer += 2;
}
break;
case 3: {
union {
vDSP_int24 int24;
uint32_t int32;
} intval;
intval.int32 = 0;
for(i = 0; i < bytes; i += 3) {
intval.int24 = *(vDSP_int24 *)buffer;
intval.int32 = __builtin_bswap32(intval.int32 << 8);
*(vDSP_int24 *)buffer = intval.int24;
buffer += 3;
}
} break;
case 4:
for(i = 0; i < bytes; i += 4) {
*(uint32_t *)buffer = __builtin_bswap32(*(uint32_t *)buffer);
buffer += 4;
}
break;
case 8:
for(i = 0; i < bytes; i += 8) {
*(uint64_t *)buffer = __builtin_bswap64(*(uint64_t *)buffer);
buffer += 8;
}
break;
}
}
@implementation ChunkList @implementation ChunkList
@synthesize listDuration; @synthesize listDuration;
@synthesize listDurationRatioed;
@synthesize maxDuration; @synthesize maxDuration;
- (id)initWithMaximumDuration:(double)duration { - (id)initWithMaximumDuration:(double)duration {
@ -378,97 +18,28 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
if(self) { if(self) {
chunkList = [[NSMutableArray alloc] init]; chunkList = [[NSMutableArray alloc] init];
listDuration = 0.0; listDuration = 0.0;
listDurationRatioed = 0.0;
maxDuration = duration; maxDuration = duration;
inAdder = NO; inAdder = NO;
inRemover = NO; inRemover = NO;
inPeeker = NO; inPeeker = NO;
inMerger = NO;
inConverter = NO;
stopping = NO; stopping = NO;
formatRead = NO;
inputBuffer = NULL;
inputBufferSize = 0;
#if DSD_DECIMATE
dsd2pcm = NULL;
dsd2pcmCount = 0;
dsd2pcmLatency = 0;
#endif
observersRegistered = NO;
} }
return self; return self;
} }
- (void)addObservers {
if(!observersRegistered) {
halveDSDVolume = NO;
enableHDCD = NO;
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.halveDSDVolume" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kChunkListContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.enableHDCD" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kChunkListContext];
observersRegistered = YES;
}
}
- (void)removeObservers {
if(observersRegistered) {
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.halveDSDVolume" context:kChunkListContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.enableHDCD" context:kChunkListContext];
observersRegistered = NO;
}
}
- (void)dealloc { - (void)dealloc {
stopping = YES; stopping = YES;
while(inAdder || inRemover || inPeeker || inMerger || inConverter) { while(inAdder || inRemover || inPeeker) {
usleep(500); usleep(500);
} }
[self removeObservers];
if(hdcd_decoder) {
free(hdcd_decoder);
hdcd_decoder = NULL;
}
#if DSD_DECIMATE
if(dsd2pcm && dsd2pcmCount) {
for(size_t i = 0; i < dsd2pcmCount; ++i) {
dsd2pcm_free(dsd2pcm[i]);
dsd2pcm[i] = NULL;
}
free(dsd2pcm);
dsd2pcm = NULL;
}
#endif
if(tempData) {
free(tempData);
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if(context != kChunkListContext) {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
if([keyPath isEqualToString:@"values.halveDSDVolume"]) {
halveDSDVolume = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] boolForKey:@"halveDSDVolume"];
} else if([keyPath isEqualToString:@"values.enableHDCD"]) {
enableHDCD = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] boolForKey:@"enableHDCD"];
}
} }
- (void)reset { - (void)reset {
@synchronized(chunkList) { @synchronized(chunkList) {
[chunkList removeAllObjects]; [chunkList removeAllObjects];
listDuration = 0.0; listDuration = 0.0;
listDurationRatioed = 0.0;
} }
} }
@ -479,9 +50,7 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
} }
- (BOOL)isFull { - (BOOL)isFull {
@synchronized (chunkList) { return (maxDuration - listDuration) < 0.05;
return (maxDuration - listDuration) < 0.001;
}
} }
- (void)addChunk:(AudioChunk *)chunk { - (void)addChunk:(AudioChunk *)chunk {
@ -490,12 +59,10 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
inAdder = YES; inAdder = YES;
const double chunkDuration = [chunk duration]; const double chunkDuration = [chunk duration];
const double chunkDurationRatioed = [chunk durationRatioed];
@synchronized(chunkList) { @synchronized(chunkList) {
[chunkList addObject:chunk]; [chunkList addObject:chunk];
listDuration += chunkDuration; listDuration += chunkDuration;
listDurationRatioed += chunkDurationRatioed;
} }
inAdder = NO; inAdder = NO;
@ -516,461 +83,30 @@ static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes
if([chunk frameCount] <= maxFrameCount) { if([chunk frameCount] <= maxFrameCount) {
[chunkList removeObjectAtIndex:0]; [chunkList removeObjectAtIndex:0];
listDuration -= [chunk duration]; listDuration -= [chunk duration];
listDurationRatioed -= [chunk durationRatioed];
inRemover = NO; inRemover = NO;
return chunk; return chunk;
} }
double streamTimestamp = [chunk streamTimestamp];
NSData *removedData = [chunk removeSamples:maxFrameCount]; NSData *removedData = [chunk removeSamples:maxFrameCount];
AudioChunk *ret = [[AudioChunk alloc] init]; AudioChunk *ret = [[AudioChunk alloc] init];
[ret setFormat:[chunk format]]; [ret setFormat:[chunk format]];
[ret setChannelConfig:[chunk channelConfig]]; [ret setChannelConfig:[chunk channelConfig]];
[ret setLossless:[chunk lossless]]; [ret assignSamples:[removedData bytes] frameCount:maxFrameCount];
[ret setStreamTimestamp:streamTimestamp];
[ret setStreamTimeRatio:[chunk streamTimeRatio]];
[ret assignData:removedData];
listDuration -= [ret duration]; listDuration -= [ret duration];
listDurationRatioed -= [ret durationRatioed];
inRemover = NO; inRemover = NO;
return ret; return ret;
} }
} }
- (AudioChunk *)removeSamplesAsFloat32:(size_t)maxFrameCount {
if(stopping) {
return [[AudioChunk alloc] init];
}
@synchronized (chunkList) {
inRemover = YES;
if(![chunkList count]) {
inRemover = NO;
return [[AudioChunk alloc] init];
}
AudioChunk *chunk = [chunkList objectAtIndex:0];
#if !DSD_DECIMATE
AudioStreamBasicDescription asbd = [chunk format];
if(asbd.mBitsPerChannel == 1) {
maxFrameCount /= 8;
}
#endif
if([chunk frameCount] <= maxFrameCount) {
[chunkList removeObjectAtIndex:0];
listDuration -= [chunk duration];
listDurationRatioed -= [chunk durationRatioed];
inRemover = NO;
return [self convertChunk:chunk];
}
double streamTimestamp = [chunk streamTimestamp];
NSData *removedData = [chunk removeSamples:maxFrameCount];
AudioChunk *ret = [[AudioChunk alloc] init];
[ret setFormat:[chunk format]];
[ret setChannelConfig:[chunk channelConfig]];
[ret setLossless:[chunk lossless]];
[ret setStreamTimestamp:streamTimestamp];
[ret setStreamTimeRatio:[chunk streamTimeRatio]];
[ret assignData:removedData];
listDuration -= [ret duration];
listDurationRatioed -= [ret durationRatioed];
inRemover = NO;
return [self convertChunk:ret];
}
}
- (AudioChunk *)removeAndMergeSamples:(size_t)maxFrameCount callBlock:(BOOL(NS_NOESCAPE ^ _Nonnull)(void))block {
if(stopping) {
return [[AudioChunk alloc] init];
}
inMerger = YES;
BOOL formatSet = NO;
AudioStreamBasicDescription currentFormat;
uint32_t currentChannelConfig = 0;
double streamTimestamp = 0.0;
double streamTimeRatio = 1.0;
BOOL blocked = NO;
while(![self peekTimestamp:&streamTimestamp timeRatio:&streamTimeRatio]) {
if((blocked = block())) {
break;
}
}
if(blocked) {
inMerger = NO;
return [[AudioChunk alloc] init];
}
AudioChunk *chunk;
size_t totalFrameCount = 0;
AudioChunk *outputChunk = [[AudioChunk alloc] init];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:streamTimeRatio];
while(!stopping && totalFrameCount < maxFrameCount) {
AudioStreamBasicDescription newFormat;
uint32_t newChannelConfig;
if(![self peekFormat:&newFormat channelConfig:&newChannelConfig]) {
if(block()) {
break;
}
continue;
}
if(formatSet &&
(memcmp(&newFormat, &currentFormat, sizeof(newFormat)) != 0 ||
newChannelConfig != currentChannelConfig)) {
break;
} else if(!formatSet) {
[outputChunk setFormat:newFormat];
[outputChunk setChannelConfig:newChannelConfig];
currentFormat = newFormat;
currentChannelConfig = newChannelConfig;
formatSet = YES;
}
chunk = [self removeSamples:maxFrameCount - totalFrameCount];
if(!chunk || ![chunk frameCount]) {
if(block()) {
break;
}
continue;
}
if([chunk isHDCD]) {
[outputChunk setHDCD];
}
size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount];
[outputChunk assignData:sampleData];
totalFrameCount += frameCount;
}
if(!totalFrameCount) {
inMerger = NO;
return [[AudioChunk alloc] init];
}
inMerger = NO;
return outputChunk;
}
- (AudioChunk *)removeAndMergeSamplesAsFloat32:(size_t)maxFrameCount callBlock:(BOOL(NS_NOESCAPE ^ _Nonnull)(void))block {
AudioChunk *ret = [self removeAndMergeSamples:maxFrameCount callBlock:block];
return [self convertChunk:ret];
}
- (AudioChunk *)convertChunk:(AudioChunk *)inChunk {
if(stopping) return [[AudioChunk alloc] init];
inConverter = YES;
AudioStreamBasicDescription chunkFormat = [inChunk format];
if(![inChunk frameCount] ||
(chunkFormat.mFormatFlags == kAudioFormatFlagsNativeFloatPacked &&
chunkFormat.mBitsPerChannel == 32)) {
inConverter = NO;
return inChunk;
}
uint32_t chunkConfig = [inChunk channelConfig];
BOOL chunkLossless = [inChunk lossless];
if(!formatRead || memcmp(&chunkFormat, &inputFormat, sizeof(chunkFormat)) != 0 ||
chunkConfig != inputChannelConfig || chunkLossless != inputLossless) {
formatRead = YES;
inputFormat = chunkFormat;
inputChannelConfig = chunkConfig;
inputLossless = chunkLossless;
BOOL isFloat = !!(inputFormat.mFormatFlags & kAudioFormatFlagIsFloat);
if((!isFloat && !(inputFormat.mBitsPerChannel >= 1 && inputFormat.mBitsPerChannel <= 32)) || (isFloat && !(inputFormat.mBitsPerChannel == 32 || inputFormat.mBitsPerChannel == 64))) {
inConverter = NO;
return [[AudioChunk alloc] init];
}
// These are really placeholders, as we're doing everything internally now
if(inputLossless &&
inputFormat.mBitsPerChannel == 16 &&
inputFormat.mChannelsPerFrame == 2 &&
inputFormat.mSampleRate == 44100) {
// possibly HDCD, run through decoder
[self addObservers];
if(hdcd_decoder) {
free(hdcd_decoder);
hdcd_decoder = NULL;
}
hdcd_decoder = calloc(1, sizeof(hdcd_state_stereo_t));
hdcd_reset_stereo((hdcd_state_stereo_t *)hdcd_decoder, 44100);
}
floatFormat = inputFormat;
floatFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked;
floatFormat.mBitsPerChannel = 32;
floatFormat.mBytesPerFrame = (32 / 8) * floatFormat.mChannelsPerFrame;
floatFormat.mBytesPerPacket = floatFormat.mBytesPerFrame * floatFormat.mFramesPerPacket;
#if DSD_DECIMATE
if(inputFormat.mBitsPerChannel == 1) {
// Decimate this for speed
floatFormat.mSampleRate *= 1.0 / 8.0;
if(dsd2pcm && dsd2pcmCount) {
for(size_t i = 0; i < dsd2pcmCount; ++i) {
dsd2pcm_free(dsd2pcm[i]);
dsd2pcm[i] = NULL;
}
free(dsd2pcm);
dsd2pcm = NULL;
}
dsd2pcmCount = floatFormat.mChannelsPerFrame;
dsd2pcm = (void **)calloc(dsd2pcmCount, sizeof(void *));
dsd2pcm[0] = dsd2pcm_alloc();
dsd2pcmLatency = dsd2pcm_latency(dsd2pcm[0]);
for(size_t i = 1; i < dsd2pcmCount; ++i) {
dsd2pcm[i] = dsd2pcm_dup(dsd2pcm[0]);
}
}
#endif
}
NSUInteger samplesRead = [inChunk frameCount];
if(!samplesRead) {
inConverter = NO;
return [[AudioChunk alloc] init];
}
BOOL isFloat = !!(inputFormat.mFormatFlags & kAudioFormatFlagIsFloat);
BOOL isUnsigned = !isFloat && !(inputFormat.mFormatFlags & kAudioFormatFlagIsSignedInteger);
size_t bitsPerSample = inputFormat.mBitsPerChannel;
BOOL isBigEndian = !!(inputFormat.mFormatFlags & kAudioFormatFlagIsBigEndian);
double streamTimestamp = [inChunk streamTimestamp];
NSData *inputData = [inChunk removeSamples:samplesRead];
#if DSD_DECIMATE
const size_t sizeFactor = 3;
#else
const size_t sizeFactor = (bitsPerSample == 1) ? 9 : 3;
#endif
size_t newSize = samplesRead * floatFormat.mBytesPerPacket * sizeFactor + 64;
if(!tempData || tempDataSize < newSize)
tempData = realloc(tempData, tempDataSize = newSize); // Either two buffers plus padding, and/or double precision in case of endian flip
// double buffer system, with alignment
const size_t buffer_adder_base = (samplesRead * floatFormat.mBytesPerPacket + 31) & ~31;
NSUInteger bytesReadFromInput = samplesRead * inputFormat.mBytesPerPacket;
uint8_t *inputBuffer = (uint8_t *)[inputData bytes];
BOOL inputChanged = NO;
BOOL hdcdSustained = NO;
if(bytesReadFromInput && isBigEndian) {
// Time for endian swap!
memcpy(&tempData[0], [inputData bytes], bytesReadFromInput);
convert_be_to_le((uint8_t *)(&tempData[0]), inputFormat.mBitsPerChannel, bytesReadFromInput);
inputBuffer = &tempData[0];
inputChanged = YES;
}
if(bytesReadFromInput && isFloat && bitsPerSample == 64) {
// Time for precision loss from weird inputs
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base * 2 : 0;
samplesRead = bytesReadFromInput / sizeof(double);
convert_f64_to_f32((float *)(&tempData[buffer_adder]), (const double *)inputBuffer, samplesRead);
bytesReadFromInput = samplesRead * sizeof(float);
inputBuffer = &tempData[buffer_adder];
inputChanged = YES;
bitsPerSample = 32;
}
if(bytesReadFromInput && !isFloat) {
float gain = 1.0;
if(bitsPerSample == 1) {
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
samplesRead = bytesReadFromInput / inputFormat.mBytesPerPacket;
convert_dsd_to_f32((float *)(&tempData[buffer_adder]), (const uint8_t *)inputBuffer, samplesRead, inputFormat.mChannelsPerFrame
#if DSD_DECIMATE
,
dsd2pcm
#endif
);
#if !DSD_DECIMATE
samplesRead *= 8;
#endif
bitsPerSample = 32;
bytesReadFromInput = samplesRead * floatFormat.mBytesPerPacket;
isFloat = YES;
inputBuffer = &tempData[buffer_adder];
inputChanged = YES;
[self addObservers];
#if DSD_DECIMATE
if(halveDSDVolume) {
float scaleFactor = 2.0f;
vDSP_vsdiv((float *)inputBuffer, 1, &scaleFactor, (float *)inputBuffer, 1, bytesReadFromInput / sizeof(float));
}
#else
if(!halveDSDVolume) {
float scaleFactor = 2.0f;
vDSP_vsmul((float *)inputBuffer, 1, &scaleFactor, (float *)inputBuffer, 1, bytesReadFromInput / sizeof(float));
}
#endif
} else if(bitsPerSample <= 8) {
samplesRead = bytesReadFromInput;
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
if(!isUnsigned)
convert_s8_to_s16((int16_t *)(&tempData[buffer_adder]), (const uint8_t *)inputBuffer, samplesRead);
else
convert_u8_to_s16((int16_t *)(&tempData[buffer_adder]), (const uint8_t *)inputBuffer, samplesRead);
bitsPerSample = 16;
bytesReadFromInput = samplesRead * 2;
isUnsigned = NO;
inputBuffer = &tempData[buffer_adder];
inputChanged = YES;
}
if(hdcd_decoder) { // implied bits per sample is 16, produces 32 bit int scale
samplesRead = bytesReadFromInput / 2;
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
if(isUnsigned) {
if(!inputChanged) {
memcpy(&tempData[buffer_adder], inputBuffer, samplesRead * 2);
inputBuffer = &tempData[buffer_adder];
inputChanged = YES;
}
convert_u16_to_s16((int16_t *)inputBuffer, samplesRead);
isUnsigned = NO;
}
const size_t buffer_adder2 = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
convert_s16_to_hdcd_input((int32_t *)(&tempData[buffer_adder2]), (int16_t *)inputBuffer, samplesRead);
hdcd_process_stereo((hdcd_state_stereo_t *)hdcd_decoder, (int32_t *)(&tempData[buffer_adder2]), (int)(samplesRead / 2));
if(((hdcd_state_stereo_t *)hdcd_decoder)->channel[0].sustain &&
((hdcd_state_stereo_t *)hdcd_decoder)->channel[1].sustain) {
hdcdSustained = YES;
}
if(enableHDCD) {
gain = 2.0;
bitsPerSample = 32;
bytesReadFromInput = samplesRead * 4;
isUnsigned = NO;
inputBuffer = &tempData[buffer_adder2];
inputChanged = YES;
} else {
// Discard the output of the decoder and process again
goto process16bit;
}
} else if(bitsPerSample <= 16) {
process16bit:
samplesRead = bytesReadFromInput / 2;
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
if(isUnsigned) {
if(!inputChanged) {
memcpy(&tempData[buffer_adder], inputBuffer, samplesRead * 2);
inputBuffer = &tempData[buffer_adder];
//inputChanged = YES;
}
convert_u16_to_s16((int16_t *)inputBuffer, samplesRead);
}
const size_t buffer_adder2 = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
vDSP_vflt16((const short *)inputBuffer, 1, (float *)(&tempData[buffer_adder2]), 1, samplesRead);
float scale = 1ULL << 15;
vDSP_vsdiv((const float *)(&tempData[buffer_adder2]), 1, &scale, (float *)(&tempData[buffer_adder2]), 1, samplesRead);
bitsPerSample = 32;
bytesReadFromInput = samplesRead * sizeof(float);
isUnsigned = NO;
isFloat = YES;
inputBuffer = &tempData[buffer_adder2];
inputChanged = YES;
} else if(bitsPerSample <= 24) {
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
samplesRead = bytesReadFromInput / 3;
if(isUnsigned)
convert_u24_to_s32((int32_t *)(&tempData[buffer_adder]), (uint8_t *)inputBuffer, samplesRead);
else
convert_s24_to_s32((int32_t *)(&tempData[buffer_adder]), (uint8_t *)inputBuffer, samplesRead);
bitsPerSample = 32;
bytesReadFromInput = samplesRead * 4;
isUnsigned = NO;
inputBuffer = &tempData[buffer_adder];
inputChanged = YES;
}
if(!isFloat && bitsPerSample <= 32) {
samplesRead = bytesReadFromInput / 4;
if(isUnsigned) {
if(!inputChanged) {
memcpy(&tempData[0], inputBuffer, bytesReadFromInput);
inputBuffer = &tempData[0];
}
convert_u32_to_s32((int32_t *)inputBuffer, samplesRead);
}
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0; // vDSP functions expect aligned to four elements
vDSP_vflt32((const int *)inputBuffer, 1, (float *)(&tempData[buffer_adder]), 1, samplesRead);
float scale = (1ULL << 31) / gain;
vDSP_vsdiv((const float *)(&tempData[buffer_adder]), 1, &scale, (float *)(&tempData[buffer_adder]), 1, samplesRead);
//bitsPerSample = 32;
bytesReadFromInput = samplesRead * sizeof(float);
//isUnsigned = NO;
//isFloat = YES;
inputBuffer = &tempData[buffer_adder];
}
#ifdef _DEBUG
[BadSampleCleaner cleanSamples:(float *)inputBuffer
amount:bytesReadFromInput / sizeof(float)
location:@"post int to float conversion"];
#endif
}
AudioChunk *outChunk = [[AudioChunk alloc] init];
[outChunk setFormat:floatFormat];
[outChunk setChannelConfig:inputChannelConfig];
[outChunk setLossless:inputLossless];
[outChunk setStreamTimestamp:streamTimestamp];
[outChunk setStreamTimeRatio:[inChunk streamTimeRatio]];
if(hdcdSustained) [outChunk setHDCD];
[outChunk assignSamples:inputBuffer frameCount:bytesReadFromInput / floatFormat.mBytesPerPacket];
inConverter = NO;
return outChunk;
}
- (BOOL)peekFormat:(AudioStreamBasicDescription *)format channelConfig:(uint32_t *)config { - (BOOL)peekFormat:(AudioStreamBasicDescription *)format channelConfig:(uint32_t *)config {
if(stopping) return NO; if(stopping) return NO;
inPeeker = YES;
@synchronized(chunkList) { @synchronized(chunkList) {
if([chunkList count]) { if([chunkList count]) {
AudioChunk *chunk = [chunkList objectAtIndex:0]; AudioChunk *chunk = [chunkList objectAtIndex:0];
*format = [chunk format]; *format = [chunk format];
*config = [chunk channelConfig]; *config = [chunk channelConfig];
inPeeker = NO;
return YES; return YES;
} }
} }
inPeeker = NO;
return NO;
}
- (BOOL)peekTimestamp:(double *)timestamp timeRatio:(double *)timeRatio {
if(stopping) return NO;
inPeeker = YES;
@synchronized (chunkList) {
if([chunkList count]) {
AudioChunk *chunk = [chunkList objectAtIndex:0];
*timestamp = [chunk streamTimestamp];
*timeRatio = [chunk streamTimeRatio];
inPeeker = NO;
return YES;
}
}
*timestamp = 0.0;
*timeRatio = 1.0;
inPeeker = NO;
return NO; return NO;
} }

View file

@ -12,21 +12,19 @@
#import <AudioUnit/AudioUnit.h> #import <AudioUnit/AudioUnit.h>
#import <CoreAudio/AudioHardware.h> #import <CoreAudio/AudioHardware.h>
#import <CogAudio/soxr.h> #import "Node.h"
#import <CogAudio/Node.h> #import "HeadphoneFilter.h"
@interface ConverterNode : Node { @interface ConverterNode : Node {
NSDictionary *rgInfo; NSDictionary *rgInfo;
soxr_t soxr; void *_r8bstate;
void *inputBuffer; void *inputBuffer;
size_t inputBufferSize; size_t inputBufferSize;
size_t inpSize, inpOffset; size_t inpSize, inpOffset;
double streamTimestamp, streamTimeRatio;
BOOL stopping; BOOL stopping;
BOOL convertEntered; BOOL convertEntered;
BOOL paused; BOOL paused;
@ -37,52 +35,61 @@
unsigned int N_samples_to_add_; unsigned int N_samples_to_add_;
unsigned int N_samples_to_drop_; unsigned int N_samples_to_drop_;
BOOL is_preextrapolated_; unsigned int is_preextrapolated_;
int is_postextrapolated_; unsigned int is_postextrapolated_;
int latencyEaten; int latencyEaten;
int latencyEatenPost; int latencyEatenPost;
double sampleRatio; double sampleRatio;
BOOL observersAdded;
float volumeScale; float volumeScale;
void *floatBuffer; void *floatBuffer;
size_t floatBufferSize; size_t floatBufferSize;
size_t floatSize, floatOffset;
void *extrapolateBuffer; void *extrapolateBuffer;
size_t extrapolateBufferSize; size_t extrapolateBufferSize;
void **dsd2pcm;
size_t dsd2pcmCount;
int dsd2pcmLatency;
BOOL rememberedLossless; BOOL rememberedLossless;
AudioStreamBasicDescription inputFormat; AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription floatFormat; AudioStreamBasicDescription floatFormat;
AudioStreamBasicDescription dmFloatFormat; // downmixed/upmixed float format
AudioStreamBasicDescription outputFormat; AudioStreamBasicDescription outputFormat;
uint32_t inputChannelConfig; uint32_t inputChannelConfig;
uint32_t outputChannelConfig;
BOOL streamFormatChanged; BOOL streamFormatChanged;
AudioStreamBasicDescription newInputFormat; AudioStreamBasicDescription newInputFormat;
uint32_t newInputChannelConfig; uint32_t newInputChannelConfig;
AudioChunk *lastChunkIn;
void *hdcd_decoder;
HeadphoneFilter *hFilter;
} }
@property AudioStreamBasicDescription inputFormat; @property AudioStreamBasicDescription inputFormat;
- (id)initWithController:(id)c previous:(id)p; - (id)initWithController:(id)c previous:(id)p;
- (BOOL)setupWithInputFormat:(AudioStreamBasicDescription)inputFormat withInputConfig:(uint32_t)inputConfig outputFormat:(AudioStreamBasicDescription)outputFormat isLossless:(BOOL)lossless; - (BOOL)setupWithInputFormat:(AudioStreamBasicDescription)inputFormat withInputConfig:(uint32_t)inputConfig outputFormat:(AudioStreamBasicDescription)outputFormat outputConfig:(uint32_t)outputConfig isLossless:(BOOL)lossless;
- (void)cleanUp; - (void)cleanUp;
- (BOOL)paused;
- (void)process; - (void)process;
- (AudioChunk *)convert; - (int)convert:(void *)dest amount:(int)amount;
- (void)setRGInfo:(NSDictionary *)rgi; - (void)setRGInfo:(NSDictionary *)rgi;
- (void)setOutputFormat:(AudioStreamBasicDescription)outputFormat; - (void)setOutputFormat:(AudioStreamBasicDescription)format outputConfig:(uint32_t)outputConfig;
- (void)inputFormatDidChange:(AudioStreamBasicDescription)format inputConfig:(uint32_t)inputConfig; - (void)inputFormatDidChange:(AudioStreamBasicDescription)format inputConfig:(uint32_t)inputConfig;

View file

@ -1,564 +0,0 @@
//
// ConverterNode.m
// Cog
//
// Created by Zaphod Beeblebrox on 8/2/05.
// Copyright 2005 __MyCompanyName__. All rights reserved.
//
#import <Accelerate/Accelerate.h>
#import <Foundation/Foundation.h>
#import "ConverterNode.h"
#import "BufferChain.h"
#import "OutputNode.h"
#import "Logging.h"
#import "lpc.h"
#import "util.h"
#ifdef _DEBUG
#import "BadSampleCleaner.h"
#endif
void PrintStreamDesc(AudioStreamBasicDescription *inDesc) {
if(!inDesc) {
DLog(@"Can't print a NULL desc!\n");
return;
}
DLog(@"- - - - - - - - - - - - - - - - - - - -\n");
DLog(@" Sample Rate:%f\n", inDesc->mSampleRate);
DLog(@" Format ID:%s\n", (char *)&inDesc->mFormatID);
DLog(@" Format Flags:%X\n", inDesc->mFormatFlags);
DLog(@" Bytes per Packet:%d\n", inDesc->mBytesPerPacket);
DLog(@" Frames per Packet:%d\n", inDesc->mFramesPerPacket);
DLog(@" Bytes per Frame:%d\n", inDesc->mBytesPerFrame);
DLog(@" Channels per Frame:%d\n", inDesc->mChannelsPerFrame);
DLog(@" Bits per Channel:%d\n", inDesc->mBitsPerChannel);
DLog(@"- - - - - - - - - - - - - - - - - - - -\n");
}
@implementation ConverterNode
static void *kConverterNodeContext = &kConverterNodeContext;
@synthesize inputFormat;
- (id)initWithController:(id)c previous:(id)p {
self = [super initWithController:c previous:p];
if(self) {
rgInfo = nil;
soxr = 0;
inputBuffer = NULL;
inputBufferSize = 0;
floatBuffer = NULL;
floatBufferSize = 0;
stopping = NO;
convertEntered = NO;
paused = NO;
skipResampler = YES;
extrapolateBuffer = NULL;
extrapolateBufferSize = 0;
#ifdef LOG_CHAINS
[self initLogFiles];
#endif
}
return self;
}
- (void)addObservers {
if(!observersAdded) {
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.volumeScaling" options:(NSKeyValueObservingOptionInitial|NSKeyValueObservingOptionNew) context:kConverterNodeContext];
observersAdded = YES;
}
}
- (void)removeObservers {
if(observersAdded) {
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.volumeScaling" context:kConverterNodeContext];
observersAdded = NO;
}
}
void scale_by_volume(float *buffer, size_t count, float volume) {
if(volume != 1.0) {
size_t unaligned = (uintptr_t)buffer & 15;
if(unaligned) {
size_t count_unaligned = (16 - unaligned) / sizeof(float);
while(count > 0 && count_unaligned > 0) {
*buffer++ *= volume;
count_unaligned--;
count--;
}
}
if(count) {
vDSP_vsmul(buffer, 1, &volume, buffer, 1, count);
}
}
}
- (BOOL)paused {
return paused;
}
- (void)process {
// Removed endOfStream check from here, since we want to be able to flush the converter
// when the end of stream is reached. Convert function instead processes what it can,
// and returns 0 samples when it has nothing more to process at the end of stream.
while([self shouldContinue] == YES) {
while(paused) {
usleep(500);
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
endOfStream = YES;
break;
}
if(paused || !streamFormatChanged) {
continue;
}
usleep(500);
} else {
[self writeChunk:chunk];
chunk = nil;
}
if(streamFormatChanged) {
[self cleanUp];
[self setupWithInputFormat:newInputFormat withInputConfig:newInputChannelConfig outputFormat:self->outputFormat isLossless:rememberedLossless];
}
}
}
endOfStream = YES;
}
- (AudioChunk *)convert {
UInt32 ioNumberPackets;
if(stopping)
return 0;
convertEntered = YES;
if(stopping || [self shouldContinue] == NO) {
convertEntered = NO;
return nil;
}
if(inpOffset == inpSize) {
streamTimestamp = 0.0;
streamTimeRatio = 1.0;
if(![self peekTimestamp:&streamTimestamp timeRatio:&streamTimeRatio]) {
convertEntered = NO;
return nil;
}
}
while(inpOffset == inpSize) {
// Approximately the most we want on input
ioNumberPackets = 4096;
size_t newSize = ioNumberPackets * floatFormat.mBytesPerPacket;
if(!inputBuffer || inputBufferSize < newSize)
inputBuffer = realloc(inputBuffer, inputBufferSize = newSize);
ssize_t amountToWrite = ioNumberPackets * floatFormat.mBytesPerPacket;
ssize_t bytesReadFromInput = 0;
while(bytesReadFromInput < amountToWrite && !stopping && !paused && !streamFormatChanged && [self shouldContinue] == YES && !([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES)) {
AudioStreamBasicDescription inf;
uint32_t config;
if([self peekFormat:&inf channelConfig:&config]) {
if(config != inputChannelConfig || memcmp(&inf, &inputFormat, sizeof(inf)) != 0) {
if(inputChannelConfig == 0 && memcmp(&inf, &inputFormat, sizeof(inf)) == 0) {
inputChannelConfig = config;
continue;
} else {
newInputFormat = inf;
newInputChannelConfig = config;
streamFormatChanged = YES;
break;
}
}
}
AudioChunk *chunk = [self readChunkAsFloat32:((amountToWrite - bytesReadFromInput) / floatFormat.mBytesPerPacket)];
inf = [chunk format];
size_t frameCount = [chunk frameCount];
config = [chunk channelConfig];
size_t bytesRead = frameCount * inf.mBytesPerPacket;
if(frameCount) {
NSData *samples = [chunk removeSamples:frameCount];
memcpy(((uint8_t *)inputBuffer) + bytesReadFromInput, [samples bytes], bytesRead);
if([chunk isHDCD]) {
[controller sustainHDCD];
}
}
bytesReadFromInput += bytesRead;
if(!frameCount) {
usleep(500);
}
}
if(!bytesReadFromInput) {
convertEntered = NO;
return nil;
}
if(stopping || paused || streamFormatChanged || [self shouldContinue] == NO || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES)) {
if(!skipResampler) {
if(!is_postextrapolated_) {
is_postextrapolated_ = 1;
}
} else {
is_postextrapolated_ = 3;
}
}
// Extrapolate start
if(!skipResampler && !is_preextrapolated_) {
size_t inputSamples = bytesReadFromInput / floatFormat.mBytesPerPacket;
size_t prime = MIN(inputSamples, PRIME_LEN_);
size_t _N_samples_to_add_ = N_samples_to_add_;
size_t newSize = _N_samples_to_add_ * floatFormat.mBytesPerPacket;
newSize += bytesReadFromInput;
if(newSize > inputBufferSize) {
inputBuffer = realloc(inputBuffer, inputBufferSize = newSize * 3);
}
memmove(inputBuffer + _N_samples_to_add_ * floatFormat.mBytesPerPacket, inputBuffer, bytesReadFromInput);
lpc_extrapolate_bkwd(inputBuffer + _N_samples_to_add_ * floatFormat.mBytesPerPacket, inputSamples, prime, floatFormat.mChannelsPerFrame, LPC_ORDER, _N_samples_to_add_, &extrapolateBuffer, &extrapolateBufferSize);
bytesReadFromInput += _N_samples_to_add_ * floatFormat.mBytesPerPacket;
latencyEaten = N_samples_to_drop_;
is_preextrapolated_ = YES;
}
if(is_postextrapolated_ == 1) {
size_t inputSamples = bytesReadFromInput / floatFormat.mBytesPerPacket;
size_t prime = MIN(inputSamples, PRIME_LEN_);
size_t _N_samples_to_add_ = N_samples_to_add_;
size_t newSize = bytesReadFromInput;
newSize += _N_samples_to_add_ * floatFormat.mBytesPerPacket;
if(newSize > inputBufferSize) {
inputBuffer = realloc(inputBuffer, inputBufferSize = newSize * 3);
}
lpc_extrapolate_fwd(inputBuffer, inputSamples, prime, floatFormat.mChannelsPerFrame, LPC_ORDER, _N_samples_to_add_, &extrapolateBuffer, &extrapolateBufferSize);
bytesReadFromInput += _N_samples_to_add_ * floatFormat.mBytesPerPacket;
latencyEatenPost = N_samples_to_drop_;
is_postextrapolated_ = 2;
} else if(is_postextrapolated_ == 3) {
latencyEatenPost = 0;
}
// Input now contains bytesReadFromInput worth of floats, in the input sample rate
inpSize = bytesReadFromInput;
inpOffset = 0;
}
ioNumberPackets = (UInt32)(inpSize - inpOffset);
ioNumberPackets -= ioNumberPackets % floatFormat.mBytesPerPacket;
if(ioNumberPackets) {
size_t inputSamples = ioNumberPackets / floatFormat.mBytesPerPacket;
ioNumberPackets = (UInt32)inputSamples;
ioNumberPackets = (UInt32)ceil((float)ioNumberPackets * sampleRatio);
ioNumberPackets += soxr_delay(soxr);
ioNumberPackets = (ioNumberPackets + 255) & ~255;
size_t newSize = ioNumberPackets * floatFormat.mBytesPerPacket;
if(!floatBuffer || floatBufferSize < newSize) {
floatBuffer = realloc(floatBuffer, floatBufferSize = newSize * 3);
}
if(stopping) {
convertEntered = NO;
return nil;
}
size_t inputDone = 0;
size_t outputDone = 0;
if(!skipResampler) {
soxr_process(soxr, (float *)(((uint8_t *)inputBuffer) + inpOffset), inputSamples, &inputDone, floatBuffer, ioNumberPackets, &outputDone);
if(latencyEatenPost) {
// Post file or format change flush
size_t idone = 0, odone = 0;
do {
soxr_process(soxr, NULL, 0, &idone, floatBuffer + outputDone * floatFormat.mBytesPerPacket, ioNumberPackets - outputDone, &odone);
outputDone += odone;
} while(odone > 0);
}
} else {
memcpy(floatBuffer, (((uint8_t *)inputBuffer) + inpOffset), inputSamples * floatFormat.mBytesPerPacket);
inputDone = inputSamples;
outputDone = inputSamples;
}
inpOffset += inputDone * floatFormat.mBytesPerPacket;
if(latencyEaten) {
if(outputDone > latencyEaten) {
outputDone -= latencyEaten;
memmove(floatBuffer, floatBuffer + latencyEaten * floatFormat.mBytesPerPacket, outputDone * floatFormat.mBytesPerPacket);
latencyEaten = 0;
} else {
latencyEaten -= outputDone;
outputDone = 0;
}
}
if(latencyEatenPost) {
if(outputDone > latencyEatenPost) {
outputDone -= latencyEatenPost;
} else {
outputDone = 0;
}
latencyEatenPost = 0;
}
ioNumberPackets = (UInt32)outputDone * floatFormat.mBytesPerPacket;
}
if(ioNumberPackets) {
AudioChunk *chunk = [[AudioChunk alloc] init];
[chunk setFormat:nodeFormat];
if(nodeChannelConfig) {
[chunk setChannelConfig:nodeChannelConfig];
}
[self addObservers];
scale_by_volume(floatBuffer, ioNumberPackets / sizeof(float), volumeScale);
[chunk setStreamTimestamp:streamTimestamp];
[chunk setStreamTimeRatio:streamTimeRatio];
[chunk assignSamples:floatBuffer frameCount:ioNumberPackets / floatFormat.mBytesPerPacket];
streamTimestamp += [chunk durationRatioed];
convertEntered = NO;
return chunk;
}
convertEntered = NO;
return nil;
}
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context {
if(context == kConverterNodeContext) {
DLog(@"SOMETHING CHANGED!");
if([keyPath isEqualToString:@"values.volumeScaling"]) {
// User reset the volume scaling option
[self refreshVolumeScaling];
}
} else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
static float db_to_scale(float db) {
return pow(10.0, db / 20);
}
- (void)refreshVolumeScaling {
if(rgInfo == nil) {
volumeScale = 1.0;
return;
}
NSString *scaling = [[NSUserDefaults standardUserDefaults] stringForKey:@"volumeScaling"];
BOOL useAlbum = [scaling hasPrefix:@"albumGain"];
BOOL useTrack = useAlbum || [scaling hasPrefix:@"trackGain"];
BOOL useVolume = useAlbum || useTrack || [scaling isEqualToString:@"volumeScale"];
BOOL usePeak = [scaling hasSuffix:@"WithPeak"];
float scale = 1.0;
float peak = 0.0;
if(useVolume) {
id pVolumeScale = [rgInfo objectForKey:@"volume"];
if(pVolumeScale != nil)
scale = [pVolumeScale floatValue];
}
if(useTrack) {
id trackGain = [rgInfo objectForKey:@"replayGainTrackGain"];
id trackPeak = [rgInfo objectForKey:@"replayGainTrackPeak"];
if(trackGain != nil)
scale = db_to_scale([trackGain floatValue]);
if(trackPeak != nil)
peak = [trackPeak floatValue];
}
if(useAlbum) {
id albumGain = [rgInfo objectForKey:@"replayGainAlbumGain"];
id albumPeak = [rgInfo objectForKey:@"replayGainAlbumPeak"];
if(albumGain != nil)
scale = db_to_scale([albumGain floatValue]);
if(albumPeak != nil)
peak = [albumPeak floatValue];
}
if(usePeak) {
if(scale * peak > 1.0)
scale = 1.0 / peak;
}
volumeScale = scale;
}
- (BOOL)setupWithInputFormat:(AudioStreamBasicDescription)inf withInputConfig:(uint32_t)inputConfig outputFormat:(AudioStreamBasicDescription)outf isLossless:(BOOL)lossless {
// Make the converter
inputFormat = inf;
outputFormat = outf;
inputChannelConfig = inputConfig;
rememberedLossless = lossless;
// These are the only sample formats we support translating
BOOL isFloat = !!(inputFormat.mFormatFlags & kAudioFormatFlagIsFloat);
if((!isFloat && !(inputFormat.mBitsPerChannel >= 1 && inputFormat.mBitsPerChannel <= 32)) || (isFloat && !(inputFormat.mBitsPerChannel == 32 || inputFormat.mBitsPerChannel == 64)))
return NO;
floatFormat = inputFormat;
floatFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked;
floatFormat.mBitsPerChannel = 32;
floatFormat.mBytesPerFrame = (32 / 8) * floatFormat.mChannelsPerFrame;
floatFormat.mBytesPerPacket = floatFormat.mBytesPerFrame * floatFormat.mFramesPerPacket;
#if DSD_DECIMATE
if(inputFormat.mBitsPerChannel == 1) {
// Decimate this for speed
floatFormat.mSampleRate *= 1.0 / 8.0;
}
#endif
inpOffset = 0;
inpSize = 0;
// This is a post resampler format
nodeFormat = floatFormat;
nodeFormat.mSampleRate = outputFormat.mSampleRate;
nodeChannelConfig = inputChannelConfig;
sampleRatio = (double)outputFormat.mSampleRate / (double)floatFormat.mSampleRate;
skipResampler = fabs(sampleRatio - 1.0) < 1e-7;
if(!skipResampler) {
soxr_quality_spec_t q_spec = soxr_quality_spec(SOXR_HQ, 0);
soxr_io_spec_t io_spec = soxr_io_spec(SOXR_FLOAT32_I, SOXR_FLOAT32_I);
soxr_runtime_spec_t runtime_spec = soxr_runtime_spec(0);
soxr_error_t error;
soxr = soxr_create(floatFormat.mSampleRate, outputFormat.mSampleRate, floatFormat.mChannelsPerFrame, &error, &io_spec, &q_spec, &runtime_spec);
if(error)
return NO;
PRIME_LEN_ = MAX(floatFormat.mSampleRate / 20, 1024u);
PRIME_LEN_ = MIN(PRIME_LEN_, 16384u);
PRIME_LEN_ = MAX(PRIME_LEN_, (unsigned int)(2 * LPC_ORDER + 1));
N_samples_to_add_ = floatFormat.mSampleRate;
N_samples_to_drop_ = outputFormat.mSampleRate;
samples_len(&N_samples_to_add_, &N_samples_to_drop_, 20, 8192u);
is_preextrapolated_ = NO;
is_postextrapolated_ = 0;
}
latencyEaten = 0;
latencyEatenPost = 0;
PrintStreamDesc(&inf);
PrintStreamDesc(&nodeFormat);
[self refreshVolumeScaling];
// Move this here so process call isn't running the resampler until it's allocated
stopping = NO;
convertEntered = NO;
streamFormatChanged = NO;
paused = NO;
return YES;
}
- (void)dealloc {
DLog(@"Converter dealloc");
[self removeObservers];
paused = NO;
[self cleanUp];
[super cleanUp];
}
- (void)setOutputFormat:(AudioStreamBasicDescription)format {
DLog(@"SETTING OUTPUT FORMAT!");
outputFormat = format;
}
- (void)inputFormatDidChange:(AudioStreamBasicDescription)format inputConfig:(uint32_t)inputConfig {
DLog(@"FORMAT CHANGED");
paused = YES;
while(convertEntered) {
usleep(500);
}
[self cleanUp];
[self setupWithInputFormat:format withInputConfig:inputConfig outputFormat:self->outputFormat isLossless:rememberedLossless];
}
- (void)setRGInfo:(NSDictionary *)rgi {
DLog(@"Setting ReplayGain info");
rgInfo = rgi;
[self refreshVolumeScaling];
}
- (void)cleanUp {
stopping = YES;
while(convertEntered) {
usleep(500);
}
if(soxr) {
soxr_delete(soxr);
soxr = NULL;
}
if(extrapolateBuffer) {
free(extrapolateBuffer);
extrapolateBuffer = NULL;
extrapolateBufferSize = 0;
}
if(floatBuffer) {
free(floatBuffer);
floatBuffer = NULL;
floatBufferSize = 0;
}
if(inputBuffer) {
free(inputBuffer);
inputBuffer = NULL;
inputBufferSize = 0;
}
inpOffset = 0;
inpSize = 0;
}
- (double)secondsBuffered {
return [buffer listDuration];
}
@end

1073
Audio/Chain/ConverterNode.mm Normal file

File diff suppressed because it is too large Load diff

View file

@ -1,34 +0,0 @@
//
// DSPDownmixNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/13/25.
//
#ifndef DSPDownmixNode_h
#define DSPDownmixNode_h
#import <AudioToolbox/AudioToolbox.h>
#import <CogAudio/DSPNode.h>
@interface DSPDownmixNode : DSPNode {
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (BOOL)setup;
- (void)cleanUp;
- (void)resetBuffer;
- (BOOL)paused;
- (void)process;
- (AudioChunk * _Nullable)convert;
- (void)setOutputFormat:(AudioStreamBasicDescription)format withChannelConfig:(uint32_t)config;
@end
#endif /* DSPDownmixNode_h */

View file

@ -1,201 +0,0 @@
//
// DSPDownmixNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/13/25.
//
#import <Foundation/Foundation.h>
#import "Downmix.h"
#import "Logging.h"
#import "DSPDownmixNode.h"
@implementation DSPDownmixNode {
DownmixProcessor *downmix;
BOOL stopping, paused;
BOOL processEntered;
BOOL formatSet;
AudioStreamBasicDescription lastInputFormat;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription outputFormat;
uint32_t lastInputChannelConfig, inputChannelConfig;
uint32_t outputChannelConfig;
float outBuffer[4096 * 32];
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super initWithController:c previous:p latency:latency];
return self;
}
- (void)dealloc {
DLog(@"Downmix dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[super cleanUp];
}
- (BOOL)fullInit {
if(formatSet) {
downmix = [[DownmixProcessor alloc] initWithInputFormat:inputFormat inputConfig:inputChannelConfig andOutputFormat:outputFormat outputConfig:outputChannelConfig];
if(!downmix) {
return NO;
}
}
return YES;
}
- (void)fullShutdown {
downmix = nil;
}
- (BOOL)setup {
if(stopping)
return NO;
[self fullShutdown];
return [self fullInit];
}
- (void)cleanUp {
stopping = YES;
while(processEntered) {
usleep(500);
}
[self fullShutdown];
formatSet = NO;
}
- (void)resetBuffer {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
paused = NO;
}
- (void)setOutputFormat:(AudioStreamBasicDescription)format withChannelConfig:(uint32_t)config {
if(memcmp(&outputFormat, &format, sizeof(outputFormat)) != 0 ||
outputChannelConfig != config) {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
[self fullShutdown];
paused = NO;
}
outputFormat = format;
outputChannelConfig = config;
formatSet = YES;
}
- (BOOL)paused {
return paused;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
}
usleep(500);
} else {
[self writeChunk:chunk];
chunk = nil;
}
}
}
}
- (AudioChunk *)convert {
if(stopping)
return nil;
processEntered = YES;
if(stopping || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}
if(![self peekFormat:&inputFormat channelConfig:&inputChannelConfig]) {
processEntered = NO;
return nil;
}
if(!inputFormat.mSampleRate ||
!inputFormat.mBitsPerChannel ||
!inputFormat.mChannelsPerFrame ||
!inputFormat.mBytesPerFrame ||
!inputFormat.mFramesPerPacket ||
!inputFormat.mBytesPerPacket) {
processEntered = NO;
return nil;
}
if((formatSet && !downmix) ||
memcmp(&inputFormat, &lastInputFormat, sizeof(inputFormat)) != 0 ||
inputChannelConfig != lastInputChannelConfig) {
lastInputFormat = inputFormat;
lastInputChannelConfig = inputChannelConfig;
[self fullShutdown];
if(formatSet && ![self setup]) {
processEntered = NO;
return nil;
}
}
if(!downmix) {
processEntered = NO;
return [self readChunk:4096];
}
AudioChunk *chunk = [self readChunkAsFloat32:4096];
if(!chunk || ![chunk frameCount]) {
processEntered = NO;
return nil;
}
double streamTimestamp = [chunk streamTimestamp];
size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount];
[downmix process:[sampleData bytes] frameCount:frameCount output:&outBuffer[0]];
AudioChunk *outputChunk = [[AudioChunk alloc] init];
[outputChunk setFormat:outputFormat];
if(outputChannelConfig) {
[outputChunk setChannelConfig:outputChannelConfig];
}
if([chunk isHDCD]) [outputChunk setHDCD];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:[chunk streamTimeRatio]];
[outputChunk assignSamples:&outBuffer[0] frameCount:frameCount];
processEntered = NO;
return outputChunk;
}
@end

View file

@ -1,31 +0,0 @@
//
// DSPEqualizerNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/11/25.
//
#ifndef DSPEqualizerNode_h
#define DSPEqualizerNode_h
#import <CogAudio/DSPNode.h>
@interface DSPEqualizerNode : DSPNode {
float *samplePtr;
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (BOOL)setup;
- (void)cleanUp;
- (void)resetBuffer;
- (BOOL)paused;
- (void)process;
- (AudioChunk * _Nullable)convert;
@end
#endif /* DSPEqualizerNode_h */

View file

@ -1,400 +0,0 @@
//
// DSPEqualizerNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/11/25.
//
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import <AudioUnit/AudioUnit.h>
#import <Accelerate/Accelerate.h>
#import "DSPEqualizerNode.h"
#import "OutputNode.h"
#import "Logging.h"
#import "AudioPlayer.h"
extern void scale_by_volume(float *buffer, size_t count, float volume);
static void * kDSPEqualizerNodeContext = &kDSPEqualizerNodeContext;
@implementation DSPEqualizerNode {
BOOL enableEqualizer;
BOOL equalizerInitialized;
double equalizerPreamp;
__weak AudioPlayer *audioPlayer;
AudioUnit _eq;
AudioTimeStamp timeStamp;
BOOL stopping, paused;
BOOL processEntered;
BOOL observersapplied;
AudioStreamBasicDescription lastInputFormat;
AudioStreamBasicDescription inputFormat;
uint32_t lastInputChannelConfig, inputChannelConfig;
uint32_t outputChannelConfig;
float inBuffer[4096 * 32];
float eqBuffer[4096 * 32];
float outBuffer[4096 * 32];
}
static void fillBuffers(AudioBufferList *ioData, const float *inbuffer, size_t count, size_t offset) {
const size_t channels = ioData->mNumberBuffers;
for(int i = 0; i < channels; ++i) {
const size_t maxCount = (ioData->mBuffers[i].mDataByteSize / sizeof(float)) - offset;
float *output = ((float *)ioData->mBuffers[i].mData) + offset;
const float *input = inbuffer + i;
cblas_scopy((int)((count > maxCount) ? maxCount : count), input, (int)channels, output, 1);
ioData->mBuffers[i].mNumberChannels = 1;
}
}
static void clearBuffers(AudioBufferList *ioData, size_t count, size_t offset) {
for(int i = 0; i < ioData->mNumberBuffers; ++i) {
memset((uint8_t *)ioData->mBuffers[i].mData + offset * sizeof(float), 0, count * sizeof(float));
ioData->mBuffers[i].mNumberChannels = 1;
}
}
static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData) {
if(inNumberFrames > 4096 || !inRefCon) {
clearBuffers(ioData, inNumberFrames, 0);
return 0;
}
DSPEqualizerNode *_self = (__bridge DSPEqualizerNode *)inRefCon;
fillBuffers(ioData, _self->samplePtr, inNumberFrames, 0);
return 0;
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super initWithController:c previous:p latency:latency];
if(self) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableEqualizer = [defaults boolForKey:@"GraphicEQenable"];
float preamp = [defaults floatForKey:@"eqPreamp"];
equalizerPreamp = pow(10.0, preamp / 20.0);
OutputNode *outputNode = c;
audioPlayer = [outputNode controller];
[self addObservers];
}
return self;
}
- (void)dealloc {
DLog(@"Equalizer dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[self removeObservers];
[super cleanUp];
}
- (void)addObservers {
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.GraphicEQenable" options:0 context:kDSPEqualizerNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.eqPreamp" options:0 context:kDSPEqualizerNodeContext];
observersapplied = YES;
}
- (void)removeObservers {
if(observersapplied) {
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.GraphicEQenable" context:kDSPEqualizerNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.eqPreamp" context:kDSPEqualizerNodeContext];
observersapplied = NO;
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if(context != kDSPEqualizerNodeContext) {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
if([keyPath isEqualToString:@"values.GraphicEQenable"]) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableEqualizer = [defaults boolForKey:@"GraphicEQenable"];
} else if([keyPath isEqualToString:@"values.eqPreamp"]) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
float preamp = [defaults floatForKey:@"eqPreamp"];
equalizerPreamp = pow(10.0, preamp / 20.0);
}
}
- (AudioPlayer *)audioPlayer {
return audioPlayer;
}
- (BOOL)fullInit {
if(enableEqualizer) {
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Effect;
desc.componentSubType = kAudioUnitSubType_GraphicEQ;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp = NULL;
comp = AudioComponentFindNext(comp, &desc);
if(!comp) {
return NO;
}
OSStatus status = AudioComponentInstanceNew(comp, &_eq);
if(status != noErr) {
return NO;
}
UInt32 value;
UInt32 size = sizeof(value);
value = 4096;
AudioUnitSetProperty(_eq, kAudioUnitProperty_MaximumFramesPerSlice,
kAudioUnitScope_Global, 0, &value, size);
value = 127;
AudioUnitSetProperty(_eq, kAudioUnitProperty_RenderQuality,
kAudioUnitScope_Global, 0, &value, size);
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProcRefCon = (__bridge void *)self;
callbackStruct.inputProc = eqRenderCallback;
AudioUnitSetProperty(_eq, kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input, 0, &callbackStruct, sizeof(callbackStruct));
AudioUnitReset(_eq, kAudioUnitScope_Input, 0);
AudioUnitReset(_eq, kAudioUnitScope_Output, 0);
AudioUnitReset(_eq, kAudioUnitScope_Global, 0);
AudioStreamBasicDescription asbd = inputFormat;
// Of course, non-interleaved has only one sample per frame/packet, per buffer
asbd.mFormatFlags |= kAudioFormatFlagIsNonInterleaved;
asbd.mBytesPerFrame = sizeof(float);
asbd.mBytesPerPacket = sizeof(float);
asbd.mFramesPerPacket = 1;
UInt32 maximumFrames = 4096;
AudioUnitSetProperty(_eq, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFrames, sizeof(maximumFrames));
AudioUnitSetProperty(_eq, kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input, 0, &asbd, sizeof(asbd));
AudioUnitSetProperty(_eq, kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output, 0, &asbd, sizeof(asbd));
AudioUnitReset(_eq, kAudioUnitScope_Input, 0);
AudioUnitReset(_eq, kAudioUnitScope_Output, 0);
AudioUnitReset(_eq, kAudioUnitScope_Global, 0);
status = AudioUnitInitialize(_eq);
if(status != noErr) {
return NO;
}
bzero(&timeStamp, sizeof(timeStamp));
timeStamp.mFlags = kAudioTimeStampSampleTimeValid;
equalizerInitialized = YES;
[[self audioPlayer] beginEqualizer:_eq];
}
return YES;
}
- (void)fullShutdown {
if(_eq) {
if(equalizerInitialized) {
[[self audioPlayer] endEqualizer:_eq];
AudioUnitUninitialize(_eq);
equalizerInitialized = NO;
}
AudioComponentInstanceDispose(_eq);
_eq = NULL;
}
}
- (BOOL)setup {
if(stopping)
return NO;
[self fullShutdown];
return [self fullInit];
}
- (void)cleanUp {
stopping = YES;
while(processEntered) {
usleep(500);
}
[self fullShutdown];
}
- (void)resetBuffer {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
[self fullShutdown];
paused = NO;
}
- (BOOL)paused {
return paused;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
}
usleep(500);
} else {
[self writeChunk:chunk];
chunk = nil;
}
if(!enableEqualizer && equalizerInitialized) {
[self fullShutdown];
}
}
}
}
- (AudioChunk *)convert {
if(stopping)
return nil;
processEntered = YES;
if(stopping || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}
if(![self peekFormat:&inputFormat channelConfig:&inputChannelConfig]) {
processEntered = NO;
return nil;
}
if(!inputFormat.mSampleRate ||
!inputFormat.mBitsPerChannel ||
!inputFormat.mChannelsPerFrame ||
!inputFormat.mBytesPerFrame ||
!inputFormat.mFramesPerPacket ||
!inputFormat.mBytesPerPacket) {
processEntered = NO;
return nil;
}
if((enableEqualizer && !equalizerInitialized) ||
memcmp(&inputFormat, &lastInputFormat, sizeof(inputFormat)) != 0 ||
inputChannelConfig != lastInputChannelConfig) {
lastInputFormat = inputFormat;
lastInputChannelConfig = inputChannelConfig;
[self fullShutdown];
if(enableEqualizer && ![self setup]) {
processEntered = NO;
return nil;
}
}
if(!equalizerInitialized) {
processEntered = NO;
return [self readChunk:4096];
}
AudioChunk *chunk = [self readChunkAsFloat32:4096];
if(!chunk || ![chunk frameCount]) {
processEntered = NO;
return nil;
}
double streamTimestamp = [chunk streamTimestamp];
samplePtr = &inBuffer[0];
size_t channels = inputFormat.mChannelsPerFrame;
size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount];
cblas_scopy((int)(frameCount * channels), [sampleData bytes], 1, &inBuffer[0], 1);
const size_t channelsminusone = channels - 1;
uint8_t tempBuffer[sizeof(AudioBufferList) + sizeof(AudioBuffer) * channelsminusone];
AudioBufferList *ioData = (AudioBufferList *)&tempBuffer[0];
ioData->mNumberBuffers = (UInt32)channels;
for(size_t i = 0; i < channels; ++i) {
ioData->mBuffers[i].mData = &eqBuffer[4096 * i];
ioData->mBuffers[i].mDataByteSize = (UInt32)(frameCount * sizeof(float));
ioData->mBuffers[i].mNumberChannels = 1;
}
OSStatus status = AudioUnitRender(_eq, NULL, &timeStamp, 0, (UInt32)frameCount, ioData);
if(status != noErr) {
processEntered = NO;
return nil;
}
timeStamp.mSampleTime += ((double)frameCount) / inputFormat.mSampleRate;
for(int i = 0; i < channels; ++i) {
cblas_scopy((int)frameCount, &eqBuffer[4096 * i], 1, &outBuffer[i], (int)channels);
}
AudioChunk *outputChunk = nil;
if(frameCount) {
scale_by_volume(&outBuffer[0], frameCount * channels, equalizerPreamp);
outputChunk = [[AudioChunk alloc] init];
[outputChunk setFormat:inputFormat];
if(outputChannelConfig) {
[outputChunk setChannelConfig:inputChannelConfig];
}
if([chunk isHDCD]) [outputChunk setHDCD];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:[chunk streamTimeRatio]];
[outputChunk assignSamples:&outBuffer[0] frameCount:frameCount];
}
processEntered = NO;
return outputChunk;
}
@end

View file

@ -1,30 +0,0 @@
//
// DSPFSurroundNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/11/25.
//
#ifndef DSPFSurroundNode_h
#define DSPFSurroundNode_h
#import <CogAudio/DSPNode.h>
@interface DSPFSurroundNode : DSPNode {
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (BOOL)setup;
- (void)cleanUp;
- (void)resetBuffer;
- (BOOL)paused;
- (void)process;
- (AudioChunk * _Nullable)convert;
@end
#endif /* DSPFSurroundNode_h */

View file

@ -1,275 +0,0 @@
//
// DSPFSurroundNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/11/25.
//
#import <Foundation/Foundation.h>
#import <Accelerate/Accelerate.h>
#import "DSPFSurroundNode.h"
#import "Logging.h"
#import "FSurroundFilter.h"
#define OCTAVES 5
static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
@implementation DSPFSurroundNode {
BOOL enableFSurround;
BOOL FSurroundDelayRemoved;
FSurroundFilter *fsurround;
BOOL stopping, paused;
BOOL processEntered;
BOOL observersapplied;
AudioStreamBasicDescription lastInputFormat;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription outputFormat;
uint32_t lastInputChannelConfig, inputChannelConfig;
uint32_t outputChannelConfig;
float inBuffer[4096 * 2];
float outBuffer[8192 * 6];
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super initWithController:c previous:p latency:latency];
if(self) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableFSurround = [defaults boolForKey:@"enableFSurround"];
[self addObservers];
}
return self;
}
- (void)dealloc {
DLog(@"FreeSurround dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[self removeObservers];
[super cleanUp];
}
- (void)addObservers {
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.enableFSurround" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPFSurroundNodeContext];
observersapplied = YES;
}
- (void)removeObservers {
if(observersapplied) {
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.enableFSurround" context:kDSPFSurroundNodeContext];
observersapplied = NO;
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if(context != kDSPFSurroundNodeContext) {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
if([keyPath isEqualToString:@"values.enableFSurround"]) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableFSurround = [defaults boolForKey:@"enableFSurround"];
}
}
- (BOOL)fullInit {
if(enableFSurround && inputFormat.mChannelsPerFrame == 2) {
fsurround = [[FSurroundFilter alloc] initWithSampleRate:inputFormat.mSampleRate];
if(!fsurround) {
return NO;
}
outputFormat = inputFormat;
outputFormat.mChannelsPerFrame = [fsurround channelCount];
outputFormat.mBytesPerFrame = sizeof(float) * outputFormat.mChannelsPerFrame;
outputFormat.mBytesPerPacket = outputFormat.mBytesPerFrame * outputFormat.mFramesPerPacket;
outputChannelConfig = [fsurround channelConfig];
FSurroundDelayRemoved = NO;
} else {
fsurround = nil;
}
return YES;
}
- (void)fullShutdown {
fsurround = nil;
}
- (BOOL)setup {
if(stopping)
return NO;
[self fullShutdown];
return [self fullInit];
}
- (void)cleanUp {
stopping = YES;
while(processEntered) {
usleep(500);
}
[self fullShutdown];
}
- (void)resetBuffer {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
[self fullShutdown];
paused = NO;
}
- (BOOL)paused {
return paused;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
}
usleep(500);
} else {
[self writeChunk:chunk];
chunk = nil;
}
if(!enableFSurround && fsurround) {
[self fullShutdown];
}
}
}
}
- (AudioChunk *)convert {
if(stopping)
return nil;
processEntered = YES;
if(stopping || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}
if(![self peekFormat:&inputFormat channelConfig:&inputChannelConfig]) {
processEntered = NO;
return nil;
}
if(!inputFormat.mSampleRate ||
!inputFormat.mBitsPerChannel ||
!inputFormat.mChannelsPerFrame ||
!inputFormat.mBytesPerFrame ||
!inputFormat.mFramesPerPacket ||
!inputFormat.mBytesPerPacket) {
processEntered = NO;
return nil;
}
if((enableFSurround && !fsurround) ||
memcmp(&inputFormat, &lastInputFormat, sizeof(inputFormat)) != 0 ||
inputChannelConfig != lastInputChannelConfig) {
lastInputFormat = inputFormat;
lastInputChannelConfig = inputChannelConfig;
[self fullShutdown];
if(enableFSurround && ![self setup]) {
processEntered = NO;
return nil;
}
}
if(!fsurround) {
processEntered = NO;
return [self readChunk:4096];
}
size_t totalRequestedSamples = 4096;
size_t totalFrameCount = 0;
AudioChunk *chunk = [self readAndMergeChunksAsFloat32:totalRequestedSamples];
if(!chunk || ![chunk frameCount]) {
processEntered = NO;
return nil;
}
double streamTimestamp = [chunk streamTimestamp];
float *samplePtr = &inBuffer[0];
size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount];
cblas_scopy((int)frameCount * 2, [sampleData bytes], 1, &samplePtr[0], 1);
totalFrameCount = frameCount;
size_t countToProcess = totalFrameCount;
size_t samplesRendered;
if(countToProcess < 4096) {
bzero(&inBuffer[countToProcess * 2], (4096 - countToProcess) * 2 * sizeof(float));
countToProcess = 4096;
}
[fsurround process:&inBuffer[0] output:&outBuffer[0] count:(int)countToProcess];
samplePtr = &outBuffer[0];
samplesRendered = totalFrameCount;
if(totalFrameCount < 4096) {
bzero(&outBuffer[4096 * 6], 4096 * 2 * sizeof(float));
[fsurround process:&outBuffer[4096 * 6] output:&outBuffer[4096 * 6] count:4096];
samplesRendered += 2048;
}
if(!FSurroundDelayRemoved) {
FSurroundDelayRemoved = YES;
if(samplesRendered > 2048) {
samplePtr += 2048 * 6;
samplesRendered -= 2048;
}
}
AudioChunk *outputChunk = nil;
if(samplesRendered) {
outputChunk = [[AudioChunk alloc] init];
[outputChunk setFormat:outputFormat];
if(outputChannelConfig) {
[outputChunk setChannelConfig:outputChannelConfig];
}
if([chunk isHDCD]) [outputChunk setHDCD];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:[chunk streamTimeRatio]];
[outputChunk assignSamples:samplePtr frameCount:samplesRendered];
}
processEntered = NO;
return outputChunk;
}
@end

View file

@ -1,35 +0,0 @@
//
// DSPHRTFNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/11/25.
//
#ifndef DSPHRTFNode_h
#define DSPHRTFNode_h
#import <simd/types.h>
#import <CogAudio/DSPNode.h>
@interface DSPHRTFNode : DSPNode {
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (BOOL)setup;
- (void)cleanUp;
- (void)resetBuffer;
- (BOOL)paused;
- (void)process;
- (AudioChunk * _Nullable)convert;
- (void)reportMotion:(simd_float4x4)matrix;
- (void)resetReferencePosition:(NSNotification *_Nullable)notification;
@end
#endif /* DSPHRTFNode_h */

View file

@ -1,435 +0,0 @@
//
// DSPHRTFNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/11/25.
//
#import <Foundation/Foundation.h>
#import <CoreMotion/CoreMotion.h>
#import "Logging.h"
#import "DSPHRTFNode.h"
#import "lpc.h"
#import "HeadphoneFilter.h"
#include <AvailabilityMacros.h>
#if defined(MAC_OS_VERSION_14_0) && MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_VERSION_14_0
#define MOTION_MANAGER 1
#endif
static void * kDSPHRTFNodeContext = &kDSPHRTFNodeContext;
static NSString *CogPlaybackDidResetHeadTracking = @"CogPlaybackDigResetHeadTracking";
#ifdef MOTION_MANAGER
static simd_float4x4 convertMatrix(CMRotationMatrix r) {
simd_float4x4 matrix = {
simd_make_float4(r.m33, -r.m31, r.m32, 0.0f),
simd_make_float4(r.m13, -r.m11, r.m12, 0.0f),
simd_make_float4(r.m23, -r.m21, r.m22, 0.0f),
simd_make_float4(0.0f, 0.0f, 0.0f, 1.0f)
};
return matrix;
}
static NSLock *motionManagerLock = nil;
API_AVAILABLE(macos(14.0)) static CMHeadphoneMotionManager *motionManager = nil;
static DSPHRTFNode __weak *registeredMotionListener = nil;
#endif
static void registerMotionListener(DSPHRTFNode *listener) {
#ifdef MOTION_MANAGER
if(@available(macOS 14, *)) {
[motionManagerLock lock];
if([motionManager isDeviceMotionActive]) {
[motionManager stopDeviceMotionUpdates];
}
if([motionManager isDeviceMotionAvailable]) {
registeredMotionListener = listener;
[motionManager startDeviceMotionUpdatesToQueue:[NSOperationQueue mainQueue] withHandler:^(CMDeviceMotion * _Nullable motion, NSError * _Nullable error) {
if(motion) {
[motionManagerLock lock];
if(registeredMotionListener)
[registeredMotionListener reportMotion:convertMatrix(motion.attitude.rotationMatrix)];
[motionManagerLock unlock];
}
}];
}
[motionManagerLock unlock];
}
#endif
}
static void unregisterMotionListener(void) {
#ifdef MOTION_MANAGER
if(@available(macOS 14, *)) {
[motionManagerLock lock];
if([motionManager isDeviceMotionActive]) {
[motionManager stopDeviceMotionUpdates];
}
registeredMotionListener = nil;
[motionManagerLock unlock];
}
#endif
}
@implementation DSPHRTFNode {
BOOL enableHrtf;
BOOL enableHeadTracking;
BOOL lastEnableHeadTracking;
HeadphoneFilter *hrtf;
BOOL stopping, paused;
BOOL processEntered;
BOOL resetFilter;
size_t needPrefill;
BOOL observersapplied;
AudioStreamBasicDescription lastInputFormat;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription outputFormat;
uint32_t lastInputChannelConfig, inputChannelConfig;
uint32_t outputChannelConfig;
BOOL referenceMatrixSet;
BOOL rotationMatrixUpdated;
simd_float4x4 rotationMatrix;
simd_float4x4 referenceMatrix;
float prefillBuffer[4096 * 32];
float outBuffer[4096 * 2];
void *extrapolate_buffer;
size_t extrapolate_buffer_size;
}
+ (void)initialize {
#ifdef MOTION_MANAGER
motionManagerLock = [[NSLock alloc] init];
if(@available(macOS 14, *)) {
CMAuthorizationStatus status = [CMHeadphoneMotionManager authorizationStatus];
if(status == CMAuthorizationStatusDenied) {
ALog(@"Headphone motion not authorized");
return;
} else if(status == CMAuthorizationStatusAuthorized) {
ALog(@"Headphone motion authorized");
} else if(status == CMAuthorizationStatusRestricted) {
ALog(@"Headphone motion restricted");
} else if(status == CMAuthorizationStatusNotDetermined) {
ALog(@"Headphone motion status not determined; will prompt for access");
}
motionManager = [[CMHeadphoneMotionManager alloc] init];
}
#endif
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super initWithController:c previous:p latency:latency];
if(self) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableHrtf = [defaults boolForKey:@"enableHrtf"];
enableHeadTracking = [defaults boolForKey:@"enableHeadTracking"];
rotationMatrix = matrix_identity_float4x4;
[self addObservers];
}
return self;
}
- (void)dealloc {
DLog(@"HRTF dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[self removeObservers];
[super cleanUp];
if(extrapolate_buffer) {
free(extrapolate_buffer);
extrapolate_buffer = NULL;
extrapolate_buffer_size = 0;
}
}
- (void)addObservers {
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.enableHrtf" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPHRTFNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.enableHeadTracking" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPHRTFNodeContext];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(resetReferencePosition:) name:CogPlaybackDidResetHeadTracking object:nil];
observersapplied = YES;
}
- (void)removeObservers {
if(observersapplied) {
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.enableHrtf" context:kDSPHRTFNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.enableHeadTracking" context:kDSPHRTFNodeContext];
[[NSNotificationCenter defaultCenter] removeObserver:self name:CogPlaybackDidResetHeadTracking object:nil];
observersapplied = NO;
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if(context != kDSPHRTFNodeContext) {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
if([keyPath isEqualToString:@"values.enableHrtf"] ||
[keyPath isEqualToString:@"values.enableHeadTracking"]) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableHrtf = [defaults boolForKey:@"enableHrtf"];
enableHeadTracking = [defaults boolForKey:@"enableHeadTracking"];
resetFilter = YES;
}
}
- (BOOL)fullInit {
if(enableHrtf) {
NSURL *presetUrl = [[NSBundle mainBundle] URLForResource:@"SADIE_D02-96000" withExtension:@"mhr"];
rotationMatrixUpdated = NO;
simd_float4x4 matrix;
if(!referenceMatrixSet || !enableHeadTracking) {
referenceMatrixSet = NO;
matrix = matrix_identity_float4x4;
self->referenceMatrix = matrix;
if(enableHeadTracking) {
lastEnableHeadTracking = YES;
registerMotionListener(self);
} else if(lastEnableHeadTracking) {
lastEnableHeadTracking = NO;
unregisterMotionListener();
}
} else {
simd_float4x4 mirrorTransform = {
simd_make_float4(-1.0, 0.0, 0.0, 0.0),
simd_make_float4(0.0, 1.0, 0.0, 0.0),
simd_make_float4(0.0, 0.0, 1.0, 0.0),
simd_make_float4(0.0, 0.0, 0.0, 1.0)
};
matrix = simd_mul(mirrorTransform, rotationMatrix);
matrix = simd_mul(matrix, referenceMatrix);
}
hrtf = [[HeadphoneFilter alloc] initWithImpulseFile:presetUrl forSampleRate:inputFormat.mSampleRate withInputChannels:inputFormat.mChannelsPerFrame withConfig:inputChannelConfig withMatrix:matrix];
if(!hrtf) {
return NO;
}
outputFormat = inputFormat;
outputFormat.mChannelsPerFrame = 2;
outputFormat.mBytesPerFrame = sizeof(float) * outputFormat.mChannelsPerFrame;
outputFormat.mBytesPerPacket = outputFormat.mBytesPerFrame * outputFormat.mFramesPerPacket;
outputChannelConfig = AudioChannelSideLeft | AudioChannelSideRight;
resetFilter = NO;
needPrefill = [hrtf needPrefill];
} else {
if(lastEnableHeadTracking) {
lastEnableHeadTracking = NO;
unregisterMotionListener();
}
referenceMatrixSet = NO;
hrtf = nil;
}
return YES;
}
- (void)fullShutdown {
hrtf = nil;
if(lastEnableHeadTracking) {
lastEnableHeadTracking = NO;
unregisterMotionListener();
}
resetFilter = NO;
}
- (BOOL)setup {
if(stopping)
return NO;
[self fullShutdown];
return [self fullInit];
}
- (void)cleanUp {
stopping = YES;
while(processEntered) {
usleep(500);
}
[self fullShutdown];
}
- (void)resetBuffer {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
[self fullShutdown];
paused = NO;
}
- (BOOL)paused {
return paused;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
}
usleep(500);
} else {
[self writeChunk:chunk];
chunk = nil;
}
if(resetFilter || (!enableHrtf && hrtf)) {
[self fullShutdown];
}
}
}
}
- (AudioChunk *)convert {
if(stopping)
return nil;
processEntered = YES;
if(stopping || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}
if(![self peekFormat:&inputFormat channelConfig:&inputChannelConfig]) {
processEntered = NO;
return nil;
}
if(!inputFormat.mSampleRate ||
!inputFormat.mBitsPerChannel ||
!inputFormat.mChannelsPerFrame ||
!inputFormat.mBytesPerFrame ||
!inputFormat.mFramesPerPacket ||
!inputFormat.mBytesPerPacket) {
processEntered = NO;
return nil;
}
if((enableHrtf && !hrtf) ||
memcmp(&inputFormat, &lastInputFormat, sizeof(inputFormat)) != 0 ||
inputChannelConfig != lastInputChannelConfig) {
lastInputFormat = inputFormat;
lastInputChannelConfig = inputChannelConfig;
[self fullShutdown];
if(enableHrtf && ![self setup]) {
processEntered = NO;
return nil;
}
}
if(!hrtf) {
processEntered = NO;
return [self readChunk:4096];
}
AudioChunk *chunk = [self readChunkAsFloat32:4096];
if(!chunk || ![chunk frameCount]) {
processEntered = NO;
return nil;
}
if(rotationMatrixUpdated) {
rotationMatrixUpdated = NO;
simd_float4x4 mirrorTransform = {
simd_make_float4(-1.0, 0.0, 0.0, 0.0),
simd_make_float4(0.0, 1.0, 0.0, 0.0),
simd_make_float4(0.0, 0.0, 1.0, 0.0),
simd_make_float4(0.0, 0.0, 0.0, 1.0)
};
simd_float4x4 matrix = simd_mul(mirrorTransform, rotationMatrix);
matrix = simd_mul(matrix, referenceMatrix);
[hrtf reloadWithMatrix:matrix];
}
double streamTimestamp = [chunk streamTimestamp];
size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount];
if(needPrefill) {
size_t maxToUse = 4096 - needPrefill;
if(maxToUse > frameCount) {
maxToUse = frameCount;
}
size_t channels = inputFormat.mChannelsPerFrame;
memcpy(&prefillBuffer[needPrefill * channels], [sampleData bytes], maxToUse * sizeof(float) * channels);
lpc_extrapolate_bkwd(&prefillBuffer[needPrefill * channels], maxToUse, maxToUse, (int)channels, LPC_ORDER, needPrefill, &extrapolate_buffer, &extrapolate_buffer_size);
[hrtf process:&prefillBuffer[0] sampleCount:(int)needPrefill toBuffer:&outBuffer[0]];
needPrefill = 0;
}
[hrtf process:(const float *)[sampleData bytes] sampleCount:(int)frameCount toBuffer:&outBuffer[0]];
AudioChunk *outputChunk = [[AudioChunk alloc] init];
[outputChunk setFormat:outputFormat];
if(outputChannelConfig) {
[outputChunk setChannelConfig:outputChannelConfig];
}
if([chunk isHDCD]) [outputChunk setHDCD];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:[chunk streamTimeRatio]];
[outputChunk assignSamples:&outBuffer[0] frameCount:frameCount];
processEntered = NO;
return outputChunk;
}
- (void)reportMotion:(simd_float4x4)matrix {
rotationMatrix = matrix;
if(!referenceMatrixSet) {
referenceMatrix = simd_inverse(matrix);
referenceMatrixSet = YES;
}
rotationMatrixUpdated = YES;
}
- (void)resetReferencePosition:(NSNotification *)notification {
referenceMatrixSet = NO;
}
@end

View file

@ -1,32 +0,0 @@
//
// DSPRubberbandNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/10/25.
//
#ifndef DSPRubberbandNode_h
#define DSPRubberbandNode_h
#import <CogAudio/DSPNode.h>
@interface DSPRubberbandNode : DSPNode {
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (BOOL)setup;
- (void)cleanUp;
- (void)resetBuffer;
- (BOOL)paused;
- (void)process;
- (AudioChunk * _Nullable)convert;
- (double)secondsBuffered;
@end
#endif /* DSPRubberbandNode_h */

View file

@ -1,561 +0,0 @@
//
// DSPRubberbandNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/10/25.
//
#import <Foundation/Foundation.h>
#import <Accelerate/Accelerate.h>
#import "DSPRubberbandNode.h"
#import "Logging.h"
#import <rubberband/rubberband-c.h>
static void * kDSPRubberbandNodeContext = &kDSPRubberbandNodeContext;
@implementation DSPRubberbandNode {
BOOL enableRubberband;
RubberBandState ts;
RubberBandOptions tslastoptions, tsnewoptions;
size_t tschannels;
ssize_t blockSize, toDrop, samplesBuffered;
BOOL tsapplynewoptions;
BOOL tsrestartengine;
double tempo, pitch;
double lastTempo, lastPitch;
double countIn;
uint64_t countOut;
double streamTimestamp;
double streamTimeRatio;
BOOL isHDCD;
BOOL stopping, paused;
BOOL processEntered;
BOOL flushed;
BOOL observersapplied;
AudioStreamBasicDescription lastInputFormat;
AudioStreamBasicDescription inputFormat;
uint32_t lastInputChannelConfig, inputChannelConfig;
float *rsPtrs[32];
float rsInBuffer[4096 * 32];
float rsOutBuffer[65536 * 32];
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super initWithController:c previous:p latency:latency];
if(self) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableRubberband = ![[defaults stringForKey:@"rubberbandEngine"] isEqualToString:@"disabled"];
pitch = [defaults doubleForKey:@"pitch"];
tempo = [defaults doubleForKey:@"tempo"];
lastPitch = pitch;
lastTempo = tempo;
[self addObservers];
}
return self;
}
- (void)dealloc {
DLog(@"Rubber Band dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[self removeObservers];
[super cleanUp];
}
- (void)addObservers {
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.pitch" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.tempo" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandEngine" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandTransients" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandDetector" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandPhase" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandWindow" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandSmoothing" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandFormant" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandPitch" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandChannels" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
observersapplied = YES;
}
- (void)removeObservers {
if(observersapplied) {
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.pitch" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.tempo" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandEngine" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandTransients" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandDetector" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandPhase" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandWindow" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandSmoothing" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandFormant" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandPitch" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandChannels" context:kDSPRubberbandNodeContext];
observersapplied = NO;
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if(context != kDSPRubberbandNodeContext) {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
if([keyPath isEqualToString:@"values.pitch"] ||
[keyPath isEqualToString:@"values.tempo"]) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
pitch = [defaults doubleForKey:@"pitch"];
tempo = [defaults doubleForKey:@"tempo"];
tsapplynewoptions = YES;
} else if([[keyPath substringToIndex:17] isEqualToString:@"values.rubberband"]) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableRubberband = ![[defaults stringForKey:@"rubberbandEngine"] isEqualToString:@"disabled"];
if(enableRubberband && ts) {
RubberBandOptions options = [self getRubberbandOptions];
RubberBandOptions changed = options ^ tslastoptions;
if(changed) {
BOOL engineR3 = !!(options & RubberBandOptionEngineFiner);
// Options which require a restart of the engine
const RubberBandOptions mustRestart = RubberBandOptionEngineFaster | RubberBandOptionEngineFiner | RubberBandOptionWindowStandard | RubberBandOptionWindowShort | RubberBandOptionWindowLong | RubberBandOptionSmoothingOff | RubberBandOptionSmoothingOn | (engineR3 ? RubberBandOptionPitchHighSpeed | RubberBandOptionPitchHighQuality | RubberBandOptionPitchHighConsistency : 0) | RubberBandOptionChannelsApart | RubberBandOptionChannelsTogether;
if(changed & mustRestart) {
tsrestartengine = YES;
} else {
tsnewoptions = options;
tsapplynewoptions = YES;
}
}
}
}
}
- (RubberBandOptions)getRubberbandOptions {
RubberBandOptions options = RubberBandOptionProcessRealTime;
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
NSString *value = [defaults stringForKey:@"rubberbandEngine"];
BOOL engineR3 = NO;
if([value isEqualToString:@"faster"]) {
options |= RubberBandOptionEngineFaster;
} else if([value isEqualToString:@"finer"]) {
options |= RubberBandOptionEngineFiner;
engineR3 = YES;
}
if(!engineR3) {
value = [defaults stringForKey:@"rubberbandTransients"];
if([value isEqualToString:@"crisp"]) {
options |= RubberBandOptionTransientsCrisp;
} else if([value isEqualToString:@"mixed"]) {
options |= RubberBandOptionTransientsMixed;
} else if([value isEqualToString:@"smooth"]) {
options |= RubberBandOptionTransientsSmooth;
}
value = [defaults stringForKey:@"rubberbandDetector"];
if([value isEqualToString:@"compound"]) {
options |= RubberBandOptionDetectorCompound;
} else if([value isEqualToString:@"percussive"]) {
options |= RubberBandOptionDetectorPercussive;
} else if([value isEqualToString:@"soft"]) {
options |= RubberBandOptionDetectorSoft;
}
value = [defaults stringForKey:@"rubberbandPhase"];
if([value isEqualToString:@"laminar"]) {
options |= RubberBandOptionPhaseLaminar;
} else if([value isEqualToString:@"independent"]) {
options |= RubberBandOptionPhaseIndependent;
}
}
value = [defaults stringForKey:@"rubberbandWindow"];
if([value isEqualToString:@"standard"]) {
options |= RubberBandOptionWindowStandard;
} else if([value isEqualToString:@"short"]) {
options |= RubberBandOptionWindowShort;
} else if([value isEqualToString:@"long"]) {
if(engineR3) {
options |= RubberBandOptionWindowStandard;
} else {
options |= RubberBandOptionWindowLong;
}
}
if(!engineR3) {
value = [defaults stringForKey:@"rubberbandSmoothing"];
if([value isEqualToString:@"off"]) {
options |= RubberBandOptionSmoothingOff;
} else if([value isEqualToString:@"on"]) {
options |= RubberBandOptionSmoothingOn;
}
}
value = [defaults stringForKey:@"rubberbandFormant"];
if([value isEqualToString:@"shifted"]) {
options |= RubberBandOptionFormantShifted;
} else if([value isEqualToString:@"preserved"]) {
options |= RubberBandOptionFormantPreserved;
}
value = [defaults stringForKey:@"rubberbandPitch"];
if([value isEqualToString:@"highspeed"]) {
options |= RubberBandOptionPitchHighSpeed;
} else if([value isEqualToString:@"highquality"]) {
options |= RubberBandOptionPitchHighQuality;
} else if([value isEqualToString:@"highconsistency"]) {
options |= RubberBandOptionPitchHighConsistency;
}
value = [defaults stringForKey:@"rubberbandChannels"];
if([value isEqualToString:@"apart"]) {
options |= RubberBandOptionChannelsApart;
} else if([value isEqualToString:@"together"]) {
options |= RubberBandOptionChannelsTogether;
}
return options;
}
- (BOOL)fullInit {
RubberBandOptions options = [self getRubberbandOptions];
tslastoptions = options;
tschannels = inputFormat.mChannelsPerFrame;
ts = rubberband_new(inputFormat.mSampleRate, (int)tschannels, options, 1.0 / tempo, pitch);
if(!ts)
return NO;
blockSize = rubberband_get_process_size_limit(ts);
toDrop = rubberband_get_start_delay(ts);
samplesBuffered = 0;
if(blockSize > 4096)
blockSize = 4096;
rubberband_set_max_process_size(ts, (unsigned int)blockSize);
for(size_t i = 0; i < 32; ++i) {
rsPtrs[i] = &rsInBuffer[4096 * i];
}
ssize_t toPad = rubberband_get_preferred_start_pad(ts);
if(toPad > 0) {
for(size_t i = 0; i < tschannels; ++i) {
memset(rsPtrs[i], 0, 4096 * sizeof(float));
}
while(toPad > 0) {
ssize_t p = toPad;
if(p > blockSize) p = blockSize;
rubberband_process(ts, (const float * const *)rsPtrs, (int)p, false);
toPad -= p;
}
}
tsapplynewoptions = NO;
tsrestartengine = NO;
flushed = NO;
countIn = 0.0;
countOut = 0;
return YES;
}
- (void)partialInit {
if(stopping || paused || !ts) return;
processEntered = YES;
RubberBandOptions changed = tslastoptions ^ tsnewoptions;
if(changed) {
tslastoptions = tsnewoptions;
BOOL engineR3 = !!(tsnewoptions & RubberBandOptionEngineFiner);
const RubberBandOptions transientsmask = RubberBandOptionTransientsCrisp | RubberBandOptionTransientsMixed | RubberBandOptionTransientsSmooth;
const RubberBandOptions detectormask = RubberBandOptionDetectorCompound | RubberBandOptionDetectorPercussive | RubberBandOptionDetectorSoft;
const RubberBandOptions phasemask = RubberBandOptionPhaseLaminar | RubberBandOptionPhaseIndependent;
const RubberBandOptions formantmask = RubberBandOptionFormantShifted | RubberBandOptionFormantPreserved;
const RubberBandOptions pitchmask = RubberBandOptionPitchHighSpeed | RubberBandOptionPitchHighQuality | RubberBandOptionPitchHighConsistency;
if(changed & transientsmask)
rubberband_set_transients_option(ts, tsnewoptions & transientsmask);
if(!engineR3) {
if(changed & detectormask)
rubberband_set_detector_option(ts, tsnewoptions & detectormask);
if(changed & phasemask)
rubberband_set_phase_option(ts, tsnewoptions & phasemask);
}
if(changed & formantmask)
rubberband_set_formant_option(ts, tsnewoptions & formantmask);
if(!engineR3 && (changed & pitchmask))
rubberband_set_pitch_option(ts, tsnewoptions & pitchmask);
}
if(fabs(pitch - lastPitch) > 1e-5 ||
fabs(tempo - lastTempo) > 1e-5) {
lastPitch = pitch;
lastTempo = tempo;
rubberband_set_pitch_scale(ts, pitch);
rubberband_set_time_ratio(ts, 1.0 / tempo);
}
tsapplynewoptions = NO;
processEntered = NO;
}
- (void)fullShutdown {
if(ts) {
rubberband_delete(ts);
ts = NULL;
}
}
- (BOOL)setup {
if(stopping)
return NO;
[self fullShutdown];
return [self fullInit];
}
- (void)cleanUp {
stopping = YES;
while(processEntered) {
usleep(500);
}
[self fullShutdown];
}
- (void)resetBuffer {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
[self fullShutdown];
paused = NO;
}
- (BOOL)paused {
return paused;
}
- (void)setPreviousNode:(id)p {
if(previousNode != p) {
paused = YES;
while(processEntered);
previousNode = p;
paused = NO;
}
}
- (void)setEndOfStream:(BOOL)e {
if(endOfStream && !e) {
while(processEntered);
[self fullShutdown];
}
[super setEndOfStream:e];
flushed = e;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if(!ts) {
flushed = previousNode && [[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES;
}
if(flushed) {
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
}
usleep(500);
} else {
[self writeChunk:chunk];
chunk = nil;
}
if(!enableRubberband && ts) {
[self fullShutdown];
} else if(tsrestartengine) {
[self fullShutdown];
} else if(tsapplynewoptions) {
[self partialInit];
}
}
}
}
- (AudioChunk *)convert {
if(stopping)
return nil;
processEntered = YES;
if(stopping || flushed || !previousNode || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}
if(![self peekFormat:&inputFormat channelConfig:&inputChannelConfig]) {
processEntered = NO;
return nil;
}
if(!inputFormat.mSampleRate ||
!inputFormat.mBitsPerChannel ||
!inputFormat.mChannelsPerFrame ||
!inputFormat.mBytesPerFrame ||
!inputFormat.mFramesPerPacket ||
!inputFormat.mBytesPerPacket) {
processEntered = NO;
return nil;
}
if((enableRubberband && !ts) ||
memcmp(&inputFormat, &lastInputFormat, sizeof(inputFormat)) != 0 ||
inputChannelConfig != lastInputChannelConfig) {
lastInputFormat = inputFormat;
lastInputChannelConfig = inputChannelConfig;
[self fullShutdown];
if(enableRubberband && ![self setup]) {
processEntered = NO;
return nil;
}
}
if(!ts) {
processEntered = NO;
return [self readChunk:4096];
}
ssize_t samplesToProcess = rubberband_get_samples_required(ts);
if(samplesToProcess > blockSize)
samplesToProcess = blockSize;
int channels = (int)(inputFormat.mChannelsPerFrame);
if(samplesToProcess > 0) {
AudioChunk *chunk = [self readAndMergeChunksAsFloat32:samplesToProcess];
if(!chunk || ![chunk frameCount]) {
processEntered = NO;
return nil;
}
streamTimestamp = [chunk streamTimestamp];
streamTimeRatio = [chunk streamTimeRatio];
isHDCD = [chunk isHDCD];
size_t frameCount = [chunk frameCount];
countIn += ((double)frameCount) / tempo;
NSData *sampleData = [chunk removeSamples:frameCount];
for (size_t i = 0; i < channels; ++i) {
cblas_scopy((int)frameCount, ((const float *)[sampleData bytes]) + i, channels, rsPtrs[i], 1);
}
flushed = [[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES;
int len = (int)frameCount;
rubberband_process(ts, (const float * const *)rsPtrs, len, flushed);
}
ssize_t samplesAvailable;
while(!stopping && (samplesAvailable = rubberband_available(ts)) > 0) {
if(toDrop > 0) {
ssize_t blockDrop = toDrop;
if(blockDrop > samplesAvailable) blockDrop = samplesAvailable;
if(blockDrop > blockSize) blockDrop = blockSize;
rubberband_retrieve(ts, (float * const *)rsPtrs, (int)blockDrop);
toDrop -= blockDrop;
continue;
}
ssize_t maxAvailable = 65536 - samplesBuffered;
ssize_t samplesOut = samplesAvailable;
if(samplesOut > maxAvailable) {
samplesOut = maxAvailable;
if(samplesOut <= 0) {
break;
}
}
if(samplesOut > blockSize) samplesOut = blockSize;
rubberband_retrieve(ts, (float * const *)rsPtrs, (int)samplesOut);
for(size_t i = 0; i < channels; ++i) {
cblas_scopy((int)samplesOut, rsPtrs[i], 1, &rsOutBuffer[samplesBuffered * channels + i], channels);
}
samplesBuffered += samplesOut;
}
if(flushed) {
if(samplesBuffered > 0) {
ssize_t ideal = (ssize_t)floor(countIn + 0.5);
if(countOut + samplesBuffered > ideal) {
// Rubber Band does not account for flushing duration in real time mode
samplesBuffered = ideal - countOut;
}
}
}
AudioChunk *outputChunk = nil;
if(samplesBuffered > 0) {
outputChunk = [[AudioChunk alloc] init];
[outputChunk setFormat:inputFormat];
if(inputChannelConfig) {
[outputChunk setChannelConfig:inputChannelConfig];
}
if(isHDCD) [outputChunk setHDCD];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:streamTimeRatio * tempo];
[outputChunk assignSamples:&rsOutBuffer[0] frameCount:samplesBuffered];
countOut += samplesBuffered;
samplesBuffered = 0;
double chunkDuration = [outputChunk duration];
streamTimestamp += chunkDuration * [outputChunk streamTimeRatio];
}
processEntered = NO;
return outputChunk;
}
- (double)secondsBuffered {
double rbBuffered = 0.0;
if(ts) {
// We don't use Rubber Band's latency function, because at least in Cog's case,
// by the time we call this function, and also, because it doesn't account for
// how much audio will be lopped off at the end of the process.
//
// Tested once, this tends to be close to zero when actually called.
rbBuffered = countIn - (double)(countOut);
if(rbBuffered < 0) {
rbBuffered = 0.0;
} else {
rbBuffered /= inputFormat.mSampleRate;
}
}
return [buffer listDuration] + rbBuffered;
}
@end

View file

@ -1,36 +0,0 @@
//
// FSurroundFilter.h
// CogAudio
//
// Created by Christopher Snowhill on 7/9/22.
//
#ifndef FSurroundFilter_h
#define FSurroundFilter_h
#import <Cocoa/Cocoa.h>
#import <stdint.h>
#define FSurroundChunkSize 4096
@interface FSurroundFilter : NSObject {
void *decoder;
void *params;
double srate;
uint32_t channelCount;
uint32_t channelConfig;
float tempBuffer[4096 * 2];
}
- (id)initWithSampleRate:(double)srate;
- (uint32_t)channelCount;
- (uint32_t)channelConfig;
- (double)srate;
- (void)process:(const float *)samplesIn output:(float *)samplesOut count:(uint32_t)count;
@end
#endif /* FSurround_h */

View file

@ -1,156 +0,0 @@
//
// FSurroundFilter.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 7/9/22.
//
#import "FSurroundFilter.h"
#import "freesurround_decoder.h"
#import "AudioChunk.h"
#import <Accelerate/Accelerate.h>
#import <map>
#import <vector>
struct freesurround_params {
// the user-configurable parameters
float center_image, shift, depth, circular_wrap, focus, front_sep, rear_sep, bass_lo, bass_hi;
bool use_lfe;
channel_setup channels_fs; // FreeSurround channel setup
std::vector<unsigned> chanmap; // FreeSurround -> WFX channel index translation (derived data for faster lookup)
// construct with defaults
freesurround_params()
: center_image(0.7), shift(0), depth(1), circular_wrap(90), focus(0), front_sep(1), rear_sep(1),
bass_lo(40), bass_hi(90), use_lfe(false) {
set_channels_fs(cs_5point1);
}
// compute the WFX version of the channel setup code
unsigned channel_count() {
return (unsigned)chanmap.size();
}
unsigned channels_wfx() {
unsigned res = 0;
for(unsigned i = 0; i < chanmap.size(); res |= chanmap[i++]) {};
return res;
}
// assign a channel setup & recompute derived data
void set_channels_fs(channel_setup setup) {
channels_fs = setup;
chanmap.clear();
// Note: Because WFX does not define a few of the more exotic channels (side front left&right, side rear left&right, back center left&right),
// the side front/back channel pairs (both left and right sides, resp.) are mapped here onto foobar's top front/back channel pairs and the
// back (off-)center left/right channels are mapped onto foobar's top front center and top back center, respectively.
// Therefore, these speakers should be connected to those outputs instead.
std::map<channel_id, uint32_t> fs2wfx;
fs2wfx[ci_front_left] = AudioChannelFrontLeft;
fs2wfx[ci_front_center_left] = AudioChannelFrontCenterLeft;
fs2wfx[ci_front_center] = AudioChannelFrontCenter;
fs2wfx[ci_front_center_right] = AudioChannelFrontCenterRight;
fs2wfx[ci_front_right] = AudioChannelFrontRight;
fs2wfx[ci_side_front_left] = AudioChannelFrontLeft;
fs2wfx[ci_side_front_right] = AudioChannelTopFrontRight;
fs2wfx[ci_side_center_left] = AudioChannelSideLeft;
fs2wfx[ci_side_center_right] = AudioChannelSideRight;
fs2wfx[ci_side_back_left] = AudioChannelTopBackLeft;
fs2wfx[ci_side_back_right] = AudioChannelTopBackRight;
fs2wfx[ci_back_left] = AudioChannelBackLeft;
fs2wfx[ci_back_center_left] = AudioChannelTopFrontCenter;
fs2wfx[ci_back_center] = AudioChannelBackCenter;
fs2wfx[ci_back_center_right] = AudioChannelTopBackCenter;
fs2wfx[ci_back_right] = AudioChannelBackRight;
fs2wfx[ci_lfe] = AudioChannelLFE;
for(unsigned i = 0; i < freesurround_decoder::num_channels(channels_fs); i++)
chanmap.push_back(fs2wfx[freesurround_decoder::channel_at(channels_fs, i)]);
}
};
@implementation FSurroundFilter
- (id)initWithSampleRate:(double)srate {
self = [super init];
if(!self) return nil;
self->srate = srate;
freesurround_params *_params = new freesurround_params;
params = (void *)_params;
freesurround_decoder *_decoder = new freesurround_decoder(cs_5point1, 4096);
decoder = (void *)_decoder;
_decoder->circular_wrap(_params->circular_wrap);
_decoder->shift(_params->shift);
_decoder->depth(_params->depth);
_decoder->focus(_params->focus);
_decoder->center_image(_params->center_image);
_decoder->front_separation(_params->front_sep);
_decoder->rear_separation(_params->rear_sep);
_decoder->bass_redirection(_params->use_lfe);
_decoder->low_cutoff(_params->bass_lo / (srate / 2.0));
_decoder->high_cutoff(_params->bass_hi / (srate / 2.0));
channelCount = _params->channel_count();
channelConfig = _params->channels_wfx();
return self;
}
- (void)dealloc {
if(decoder) {
freesurround_decoder *_decoder = (freesurround_decoder *)decoder;
delete _decoder;
}
if(params) {
freesurround_params *_params = (freesurround_params *)params;
delete _params;
}
}
- (uint32_t)channelCount {
return channelCount;
}
- (uint32_t)channelConfig {
return channelConfig;
}
- (double)srate {
return srate;
}
- (void)process:(const float *)samplesIn output:(float *)samplesOut count:(uint32_t)count {
freesurround_params *_params = (freesurround_params *)params;
freesurround_decoder *_decoder = (freesurround_decoder *)decoder;
uint32_t zeroCount = 0;
if(count > 4096) {
zeroCount = count - 4096;
count = 4096;
}
if(count < 4096) {
cblas_scopy(count * 2, samplesIn, 1, &tempBuffer[0], 1);
vDSP_vclr(&tempBuffer[count * 2], 1, (4096 - count) * 2);
samplesIn = &tempBuffer[0];
}
float *src = _decoder->decode(samplesIn);
for(unsigned c = 0, num = channelCount; c < num; c++) {
unsigned idx = [AudioChunk channelIndexFromConfig:channelConfig forFlag:_params->chanmap[c]];
cblas_scopy(count, src + c, num, samplesOut + idx, num);
if(zeroCount) {
vDSP_vclr(samplesOut + idx + count, num, zeroCount);
}
}
}
@end

View file

@ -1,46 +0,0 @@
//
// HeadphoneFilter.h
// CogAudio Framework
//
// Created by Christopher Snowhill on 1/24/22.
//
#ifndef HeadphoneFilter_h
#define HeadphoneFilter_h
#import <Accelerate/Accelerate.h>
#import <Cocoa/Cocoa.h>
#import <simd/simd.h>
@interface HeadphoneFilter : NSObject {
NSURL *URL;
int bufferSize;
int paddedBufferSize;
double sampleRate;
int channelCount;
uint32_t config;
float **mirroredImpulseResponses;
float **prevInputs;
float *paddedSignal[2];
}
+ (BOOL)validateImpulseFile:(NSURL *)url;
- (id)initWithImpulseFile:(NSURL *)url forSampleRate:(double)sampleRate withInputChannels:(int)channels withConfig:(uint32_t)config withMatrix:(simd_float4x4)matrix;
- (void)reloadWithMatrix:(simd_float4x4)matrix;
- (void)process:(const float *)inBuffer sampleCount:(int)count toBuffer:(float *)outBuffer;
- (void)reset;
- (size_t)needPrefill;
@end
#endif /* HeadphoneFilter_h */

View file

@ -1,386 +0,0 @@
//
// HeadphoneFilter.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 1/24/22.
//
#import "HeadphoneFilter.h"
#import "AudioChunk.h"
#import "AudioDecoder.h"
#import "AudioSource.h"
#import <stdlib.h>
#import <fstream>
#import <soxr.h>
#import "HrtfData.h"
#import "Logging.h"
typedef struct speakerPosition {
float elevation;
float azimuth;
float distance;
} speakerPosition;
#define DEGREES(x) ((x)*M_PI / 180.0)
static const speakerPosition speakerPositions[18] = {
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-30.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(+30.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-135.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(+135.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-15.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(+15.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-180.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-90.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(+90.0), .distance = 1.0 },
{ .elevation = DEGREES(+90.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
{ .elevation = DEGREES(+45.0), .azimuth = DEGREES(-30.0), .distance = 1.0 },
{ .elevation = DEGREES(+45.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
{ .elevation = DEGREES(+45.0), .azimuth = DEGREES(+30.0), .distance = 1.0 },
{ .elevation = DEGREES(+45.0), .azimuth = DEGREES(-135.0), .distance = 1.0 },
{ .elevation = DEGREES(+45.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
{ .elevation = DEGREES(+45.0), .azimuth = DEGREES(+135.0), .distance = 1.0 }
};
static simd_float4x4 matX(float theta) {
simd_float4x4 mat = {
simd_make_float4(1.0f, 0.0f, 0.0f, 0.0f),
simd_make_float4(0.0f, cosf(theta), -sinf(theta), 0.0f),
simd_make_float4(0.0f, sinf(theta), cosf(theta), 0.0f),
simd_make_float4(0.0f, 0.0f, 0.0f, 1.0f)
};
return mat;
};
static simd_float4x4 matY(float theta) {
simd_float4x4 mat = {
simd_make_float4(cosf(theta), 0.0f, sinf(theta), 0.0f),
simd_make_float4(0.0f, 1.0f, 0.0f, 0.0f),
simd_make_float4(-sinf(theta), 0.0f, cosf(theta), 0.0f),
simd_make_float4(0.0f, 0.0f, 0.0f, 1.0f)
};
return mat;
}
#if 0
static simd_float4x4 matZ(float theta) {
simd_float4x4 mat = {
simd_make_float4(cosf(theta), -sinf(theta), 0.0f, 0.0f),
simd_make_float4(sinf(theta), cosf(theta), 0.0f, 0.0f),
simd_make_float4(0.0f, 0.0f, 1.0f, 0.0f),
simd_make_float4(0.0f, 0.0f, 0.0f, 1.0f)
};
return mat;
};
#endif
static void transformPosition(float &elevation, float &azimuth, const simd_float4x4 &matrix) {
simd_float4x4 mat_x = matX(azimuth);
simd_float4x4 mat_y = matY(elevation);
//simd_float4x4 mat_z = matrix_identity_float4x4;
simd_float4x4 offsetMatrix = simd_mul(mat_x, mat_y);
//offsetMatrix = simd_mul(offsetMatrix, mat_z);
offsetMatrix = simd_mul(offsetMatrix, matrix);
double sy = sqrt(offsetMatrix.columns[0].x * offsetMatrix.columns[0].x + offsetMatrix.columns[1].x * offsetMatrix.columns[1].x);
bool singular = sy < 1e-6; // If
float x, y/*, z*/;
if(!singular) {
x = atan2(offsetMatrix.columns[2].y, offsetMatrix.columns[2].z);
y = atan2(-offsetMatrix.columns[2].x, sy);
//z = atan2(offsetMatrix.columns[1].x, offsetMatrix.columns[0].x);
} else {
x = atan2(-offsetMatrix.columns[1].z, offsetMatrix.columns[1].y);
y = atan2(-offsetMatrix.columns[2].x, sy);
//z = 0;
}
elevation = y;
azimuth = x;
if(elevation < (M_PI * (-0.5))) {
elevation = (M_PI * (-0.5));
} else if(elevation > M_PI * 0.5) {
elevation = M_PI * 0.5;
}
while(azimuth < (M_PI * (-2.0))) {
azimuth += M_PI * 2.0;
}
while(azimuth > M_PI * 2.0) {
azimuth -= M_PI * 2.0;
}
}
@interface impulseSetCache : NSObject {
NSURL *URL;
HrtfData *data;
}
+ (impulseSetCache *)sharedController;
- (void)getImpulse:(NSURL *)url outImpulse:(float **)outImpulse outSampleCount:(int *)outSampleCount sampleRate:(double)sampleRate channelCount:(int)channelCount channelConfig:(uint32_t)channelConfig withMatrix:(simd_float4x4)matrix;
@end
@implementation impulseSetCache
static impulseSetCache *_sharedController = nil;
+ (impulseSetCache *)sharedController {
@synchronized(self) {
if(!_sharedController) {
_sharedController = [[impulseSetCache alloc] init];
}
}
return _sharedController;
}
- (id)init {
self = [super init];
if(self) {
data = NULL;
}
return self;
}
- (void)dealloc {
delete data;
}
- (void)getImpulse:(NSURL *)url outImpulse:(float **)outImpulse outSampleCount:(int *)outSampleCount sampleRate:(double)sampleRate channelCount:(int)channelCount channelConfig:(uint32_t)channelConfig withMatrix:(simd_float4x4)matrix {
double sampleRateOfSource = 0;
int sampleCount = 0;
if(!data || ![url isEqualTo:URL]) {
delete data;
data = NULL;
URL = url;
NSString *filePath = [url path];
try {
std::ifstream file([filePath UTF8String], std::fstream::binary);
if(!file.is_open()) {
throw std::logic_error("Cannot open file.");
}
data = new HrtfData(file);
file.close();
} catch(std::exception &e) {
ALog(@"Exception caught: %s", e.what());
}
}
try {
soxr_quality_spec_t q_spec = soxr_quality_spec(SOXR_HQ, 0);
soxr_io_spec_t io_spec = soxr_io_spec(SOXR_FLOAT32_I, SOXR_FLOAT32_I);
soxr_runtime_spec_t runtime_spec = soxr_runtime_spec(0);
bool resampling;
sampleRateOfSource = data->get_sample_rate();
resampling = !!(fabs(sampleRateOfSource - sampleRate) > 1e-6);
uint32_t sampleCountResampled;
uint32_t sampleCountExact = data->get_response_length();
sampleCount = sampleCountExact + ((data->get_longest_delay() + 2) >> 2);
uint32_t actualSampleCount = sampleCount;
if(resampling) {
sampleCountResampled = (uint32_t)(((double)sampleCountExact) * sampleRate / sampleRateOfSource);
actualSampleCount = (uint32_t)(((double)actualSampleCount) * sampleRate / sampleRateOfSource);
io_spec.scale = sampleRateOfSource / sampleRate;
}
actualSampleCount = (actualSampleCount + 15) & ~15;
*outImpulse = (float *)calloc(sizeof(float), actualSampleCount * channelCount * 2);
if(!*outImpulse) {
throw std::bad_alloc();
}
float *hrtfData = *outImpulse;
for(uint32_t i = 0; i < channelCount; ++i) {
uint32_t channelFlag = [AudioChunk extractChannelFlag:i fromConfig:channelConfig];
uint32_t channelNumber = [AudioChunk findChannelIndex:channelFlag];
if(channelNumber < 18) {
const speakerPosition &speaker = speakerPositions[channelNumber];
DirectionData hrtfLeft;
DirectionData hrtfRight;
float azimuth = speaker.azimuth;
float elevation = speaker.elevation;
transformPosition(elevation, azimuth, matrix);
data->get_direction_data(elevation, azimuth, speaker.distance, hrtfLeft, hrtfRight);
if(resampling) {
ssize_t leftDelay = (ssize_t)((double)(hrtfLeft.delay) * 0.25 * sampleRate / sampleRateOfSource);
ssize_t rightDelay = (ssize_t)((double)(hrtfRight.delay) * 0.25 * sampleRate / sampleRateOfSource);
soxr_oneshot(sampleRateOfSource, sampleRate, 1, &hrtfLeft.impulse_response[0], sampleCountExact, NULL, &hrtfData[leftDelay + actualSampleCount * i * 2], sampleCountResampled, NULL, &io_spec, &q_spec, &runtime_spec);
soxr_oneshot(sampleRateOfSource, sampleRate, 1, &hrtfRight.impulse_response[0], sampleCountExact, NULL, &hrtfData[rightDelay + actualSampleCount * (i * 2 + 1)], sampleCountResampled, NULL, &io_spec, &q_spec, &runtime_spec);
} else {
cblas_scopy(sampleCountExact, &hrtfLeft.impulse_response[0], 1, &hrtfData[((hrtfLeft.delay + 2) >> 2) + actualSampleCount * i * 2], 1);
cblas_scopy(sampleCountExact, &hrtfRight.impulse_response[0], 1, &hrtfData[((hrtfRight.delay + 2) >> 2) + actualSampleCount * (i * 2 + 1)], 1);
}
}
}
*outSampleCount = actualSampleCount;
} catch(std::exception &e) {
ALog(@"Exception caught: %s", e.what());
}
}
@end
@implementation HeadphoneFilter
+ (BOOL)validateImpulseFile:(NSURL *)url {
NSString *filePath = [url path];
try {
std::ifstream file([filePath UTF8String], std::fstream::binary);
if(!file.is_open()) {
throw std::logic_error("Cannot open file.");
}
HrtfData data(file);
file.close();
return YES;
} catch(std::exception &e) {
ALog(@"Exception thrown: %s", e.what());
return NO;
}
}
- (id)initWithImpulseFile:(NSURL *)url forSampleRate:(double)sampleRate withInputChannels:(int)channels withConfig:(uint32_t)config withMatrix:(simd_float4x4)matrix {
self = [super init];
if(self) {
URL = url;
self->sampleRate = sampleRate;
channelCount = channels;
self->config = config;
float *impulseBuffer = NULL;
int sampleCount = 0;
[[impulseSetCache sharedController] getImpulse:url outImpulse:&impulseBuffer outSampleCount:&sampleCount sampleRate:sampleRate channelCount:channels channelConfig:config withMatrix:matrix];
if(!impulseBuffer) {
return nil;
}
mirroredImpulseResponses = (float **)calloc(sizeof(float *), channelCount * 2);
if(!mirroredImpulseResponses) {
free(impulseBuffer);
return nil;
}
for(int i = 0; i < channelCount * 2; ++i) {
mirroredImpulseResponses[i] = &impulseBuffer[sampleCount * i];
vDSP_vrvrs(mirroredImpulseResponses[i], 1, sampleCount);
}
paddedBufferSize = sampleCount;
paddedSignal[0] = (float *)calloc(sizeof(float), paddedBufferSize * 2);
if(!paddedSignal[0]) {
return nil;
}
paddedSignal[1] = paddedSignal[0] + paddedBufferSize;
prevInputs = (float **)calloc(channels, sizeof(float *));
if(!prevInputs)
return nil;
prevInputs[0] = (float *)calloc(sizeof(float), sampleCount * channelCount);
if(!prevInputs[0])
return nil;
for(int i = 1; i < channels; ++i) {
prevInputs[i] = prevInputs[i - 1] + sampleCount;
}
}
return self;
}
- (void)dealloc {
if(paddedSignal[0]) {
free(paddedSignal[0]);
}
if(prevInputs) {
if(prevInputs[0]) {
free(prevInputs[0]);
}
free(prevInputs);
}
if(mirroredImpulseResponses) {
if(mirroredImpulseResponses[0]) {
free(mirroredImpulseResponses[0]);
}
free(mirroredImpulseResponses);
}
}
- (void)reloadWithMatrix:(simd_float4x4)matrix {
@synchronized (self) {
if(!mirroredImpulseResponses[0]) {
return;
}
free(mirroredImpulseResponses[0]);
float *impulseBuffer = NULL;
int sampleCount = 0;
[[impulseSetCache sharedController] getImpulse:URL outImpulse:&impulseBuffer outSampleCount:&sampleCount sampleRate:sampleRate channelCount:channelCount channelConfig:config withMatrix:matrix];
for(int i = 0; i < channelCount * 2; ++i) {
mirroredImpulseResponses[i] = &impulseBuffer[sampleCount * i];
vDSP_vrvrs(mirroredImpulseResponses[i], 1, sampleCount);
}
}
}
- (void)process:(const float *)inBuffer sampleCount:(int)count toBuffer:(float *)outBuffer {
@synchronized (self) {
int sampleCount = paddedBufferSize;
while(count > 0) {
float left = 0, right = 0;
for(int i = 0; i < channelCount; ++i) {
float thisleft, thisright;
vDSP_vmul(prevInputs[i], 1, mirroredImpulseResponses[i * 2], 1, paddedSignal[0], 1, sampleCount);
vDSP_vmul(prevInputs[i], 1, mirroredImpulseResponses[i * 2 + 1], 1, paddedSignal[1], 1, sampleCount);
vDSP_sve(paddedSignal[0], 1, &thisleft, sampleCount);
vDSP_sve(paddedSignal[1], 1, &thisright, sampleCount);
left += thisleft;
right += thisright;
memmove(prevInputs[i], prevInputs[i] + 1, sizeof(float) * (sampleCount - 1));
prevInputs[i][sampleCount - 1] = *inBuffer++;
}
outBuffer[0] = left;
outBuffer[1] = right;
outBuffer += 2;
--count;
}
}
}
- (void)reset {
for(int i = 0; i < channelCount; ++i) {
vDSP_vclr(prevInputs[i], 1, paddedBufferSize);
}
}
- (size_t)needPrefill {
return paddedBufferSize;
}
@end

View file

@ -1,26 +0,0 @@
//
// DSPNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/10/25.
//
#ifndef DSPNode_h
#define DSPNode_h
#import <CogAudio/Node.h>
@interface DSPNode : Node {
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (void)threadEntry:(id _Nullable)arg;
- (void)setShouldContinue:(BOOL)s;
- (double)secondsBuffered;
@end
#endif /* DSPNode_h */

View file

@ -1,76 +0,0 @@
//
// DSPNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/10/25.
//
#import <Foundation/Foundation.h>
#import "DSPNode.h"
@implementation DSPNode {
BOOL threadTerminated;
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super init];
if(self) {
buffer = [[ChunkList alloc] initWithMaximumDuration:latency];
writeSemaphore = [[Semaphore alloc] init];
readSemaphore = [[Semaphore alloc] init];
accessLock = [[NSLock alloc] init];
initialBufferFilled = NO;
controller = c;
endOfStream = NO;
shouldContinue = YES;
nodeChannelConfig = 0;
nodeLossless = NO;
durationPrebuffer = latency * 0.25;
inWrite = NO;
inPeek = NO;
inRead = NO;
inMerge = NO;
[self setPreviousNode:p];
#ifdef LOG_CHAINS
[self initLogFiles];
#endif
}
return self;
}
// DSP threads buffer for low latency, and therefore should have high priority
- (void)threadEntry:(id _Nullable)arg {
@autoreleasepool {
NSThread *currentThread = [NSThread currentThread];
[currentThread setThreadPriority:0.75];
[currentThread setQualityOfService:NSQualityOfServiceUserInitiated];
threadTerminated = NO;
[self process];
threadTerminated = YES;
}
}
- (void)setShouldContinue:(BOOL)s {
BOOL currentShouldContinue = shouldContinue;
shouldContinue = s;
if(!currentShouldContinue && s && threadTerminated) {
[self launchThread];
}
}
- (double)secondsBuffered {
return [buffer listDuration];
}
@end

View file

@ -9,7 +9,11 @@
#import <CoreAudio/CoreAudio.h> #import <CoreAudio/CoreAudio.h>
#import <Foundation/Foundation.h> #import <Foundation/Foundation.h>
#import "HeadphoneFilter.h"
@interface DownmixProcessor : NSObject { @interface DownmixProcessor : NSObject {
HeadphoneFilter *hFilter;
AudioStreamBasicDescription inputFormat; AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription outputFormat; AudioStreamBasicDescription outputFormat;

View file

@ -12,8 +12,6 @@
#import "AudioChunk.h" #import "AudioChunk.h"
#import <Accelerate/Accelerate.h>
static void downmix_to_stereo(const float *inBuffer, int channels, uint32_t config, float *outBuffer, size_t count) { static void downmix_to_stereo(const float *inBuffer, int channels, uint32_t config, float *outBuffer, size_t count) {
float FrontRatios[2] = { 0.0F, 0.0F }; float FrontRatios[2] = { 0.0F, 0.0F };
float FrontCenterRatio = 0.0F; float FrontCenterRatio = 0.0F;
@ -73,42 +71,39 @@ static void downmix_to_stereo(const float *inBuffer, int channels, uint32_t conf
channelIndexes[i] = [AudioChunk findChannelIndex:[AudioChunk extractChannelFlag:i fromConfig:config]]; channelIndexes[i] = [AudioChunk findChannelIndex:[AudioChunk extractChannelFlag:i fromConfig:config]];
} }
vDSP_vclr(outBuffer, 1, count * 2); for(size_t i = 0; i < count; ++i) {
float left = 0.0F, right = 0.0F;
float tempBuffer[count * 2]; for(uint32_t j = 0; j < channels; ++j) {
float inSample = inBuffer[i * channels + j];
for(uint32_t i = 0; i < channels; ++i) { switch(channelIndexes[j]) {
float leftRatio = 0.0F;
float rightRatio = 0.0F;
switch(channelIndexes[i]) {
case 0: case 0:
leftRatio = FrontRatios[0]; left += inSample * FrontRatios[0];
rightRatio = FrontRatios[1]; right += inSample * FrontRatios[1];
break; break;
case 1: case 1:
leftRatio = FrontRatios[1]; left += inSample * FrontRatios[1];
rightRatio = FrontRatios[0]; right += inSample * FrontRatios[0];
break; break;
case 2: case 2:
leftRatio = FrontCenterRatio; left += inSample * FrontCenterRatio;
rightRatio = FrontCenterRatio; right += inSample * FrontCenterRatio;
break; break;
case 3: case 3:
leftRatio = LFERatio; left += inSample * LFERatio;
rightRatio = LFERatio; right += inSample * LFERatio;
break; break;
case 4: case 4:
leftRatio = BackRatios[0]; left += inSample * BackRatios[0];
rightRatio = BackRatios[1]; right += inSample * BackRatios[1];
break; break;
case 5: case 5:
leftRatio = BackRatios[1]; left += inSample * BackRatios[1];
rightRatio = BackRatios[0]; right += inSample * BackRatios[0];
break; break;
case 6: case 6:
@ -116,18 +111,18 @@ static void downmix_to_stereo(const float *inBuffer, int channels, uint32_t conf
break; break;
case 8: case 8:
leftRatio = BackCenterRatio; left += inSample * BackCenterRatio;
rightRatio = BackCenterRatio; right += inSample * BackCenterRatio;
break; break;
case 9: case 9:
leftRatio = SideRatios[0]; left += inSample * SideRatios[0];
rightRatio = SideRatios[1]; right += inSample * SideRatios[1];
break; break;
case 10: case 10:
leftRatio = SideRatios[1]; left += inSample * SideRatios[1];
rightRatio = SideRatios[0]; right += inSample * SideRatios[0];
break; break;
case 11: case 11:
@ -140,25 +135,25 @@ static void downmix_to_stereo(const float *inBuffer, int channels, uint32_t conf
default: default:
break; break;
} }
vDSP_vsmul(inBuffer + i, channels, &leftRatio, tempBuffer, 1, count); }
vDSP_vsmul(inBuffer + i, channels, &rightRatio, tempBuffer + count, 1, count); outBuffer[i * 2 + 0] = left;
vDSP_vadd(outBuffer, 2, tempBuffer, 1, outBuffer, 2, count); outBuffer[i * 2 + 1] = right;
vDSP_vadd(outBuffer + 1, 2, tempBuffer + count, 1, outBuffer + 1, 2, count);
} }
} }
static void downmix_to_mono(const float *inBuffer, int channels, uint32_t config, float *outBuffer, size_t count) { static void downmix_to_mono(const float *inBuffer, int channels, uint32_t config, float *outBuffer, size_t count) {
float tempBuffer[count * 2]; float tempBuffer[count * 2];
if(channels > 2 || config != AudioConfigStereo) {
downmix_to_stereo(inBuffer, channels, config, tempBuffer, count); downmix_to_stereo(inBuffer, channels, config, tempBuffer, count);
inBuffer = tempBuffer; inBuffer = tempBuffer;
channels = 2; channels = 2;
config = AudioConfigStereo; config = AudioConfigStereo;
for(size_t i = 0; i < count; ++i) {
float sample = 0;
for(int j = 0; j < channels; ++j) {
sample += inBuffer[i * channels + j];
}
outBuffer[i] = sample;
} }
cblas_scopy((int)count, inBuffer, 2, outBuffer, 1);
vDSP_vadd(outBuffer, 1, inBuffer + 1, 2, outBuffer, 1, count);
const float scale = 0.5f;
vDSP_vsmul(outBuffer, 1, &scale, outBuffer, 1, count);
} }
static void upmix(const float *inBuffer, int inchannels, uint32_t inconfig, float *outBuffer, int outchannels, uint32_t outconfig, size_t count) { static void upmix(const float *inBuffer, int inchannels, uint32_t inconfig, float *outBuffer, int outchannels, uint32_t outconfig, size_t count) {
@ -254,8 +249,6 @@ static void upmix(const float *inBuffer, int inchannels, uint32_t inconfig, floa
@implementation DownmixProcessor @implementation DownmixProcessor
static void *kDownmixProcessorContext = &kDownmixProcessorContext;
- (id)initWithInputFormat:(AudioStreamBasicDescription)inf inputConfig:(uint32_t)iConfig andOutputFormat:(AudioStreamBasicDescription)outf outputConfig:(uint32_t)oConfig { - (id)initWithInputFormat:(AudioStreamBasicDescription)inf inputConfig:(uint32_t)iConfig andOutputFormat:(AudioStreamBasicDescription)outf outputConfig:(uint32_t)oConfig {
self = [super init]; self = [super init];
@ -279,17 +272,89 @@ static void *kDownmixProcessorContext = &kDownmixProcessorContext;
inConfig = iConfig; inConfig = iConfig;
outConfig = oConfig; outConfig = oConfig;
[self setupVirt];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.headphoneVirtualization" options:0 context:nil];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.hrirPath" options:0 context:nil];
} }
return self; return self;
} }
- (void)dealloc {
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.headphoneVirtualization"];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.hrirPath"];
}
- (void)setupVirt {
@synchronized(hFilter) {
hFilter = nil;
}
BOOL hVirt = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] boolForKey:@"headphoneVirtualization"];
if(hVirt &&
outputFormat.mChannelsPerFrame >= 2 &&
(outConfig & AudioConfigStereo) == AudioConfigStereo &&
inputFormat.mChannelsPerFrame >= 1 &&
(inConfig & (AudioConfig7Point1 | AudioChannelBackCenter)) != 0) {
NSString *userPreset = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] stringForKey:@"hrirPath"];
NSURL *presetUrl = nil;
if(userPreset && ![userPreset isEqualToString:@""]) {
presetUrl = [NSURL fileURLWithPath:userPreset];
if(![HeadphoneFilter validateImpulseFile:presetUrl])
presetUrl = nil;
}
if(!presetUrl) {
presetUrl = [[NSBundle mainBundle] URLForResource:@"gsx" withExtension:@"wv"];
if(![HeadphoneFilter validateImpulseFile:presetUrl])
presetUrl = nil;
}
if(presetUrl) {
@synchronized(hFilter) {
hFilter = [[HeadphoneFilter alloc] initWithImpulseFile:presetUrl forSampleRate:outputFormat.mSampleRate withInputChannels:inputFormat.mChannelsPerFrame withConfig:inConfig];
}
}
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context {
DLog(@"SOMETHING CHANGED!");
if([keyPath isEqualToString:@"values.headphoneVirtualization"] ||
[keyPath isEqualToString:@"values.hrirPath"]) {
// Reset the converter, without rebuffering
[self setupVirt];
}
}
- (void)process:(const void *)inBuffer frameCount:(size_t)frames output:(void *)outBuffer { - (void)process:(const void *)inBuffer frameCount:(size_t)frames output:(void *)outBuffer {
if(inputFormat.mChannelsPerFrame == 2 && outConfig == AudioConfigStereo && @synchronized(hFilter) {
inConfig == (AudioChannelSideLeft | AudioChannelSideRight)) { if(hFilter) {
// Workaround for HRTF output uint32_t outChannels = outputFormat.mChannelsPerFrame;
memcpy(outBuffer, inBuffer, frames * outputFormat.mBytesPerPacket); if(outChannels > 2) {
} else if(inputFormat.mChannelsPerFrame > 2 && outConfig == AudioConfigStereo) { float tempBuffer[frames * 2];
[hFilter process:(const float *)inBuffer sampleCount:frames toBuffer:&tempBuffer[0]];
cblas_scopy((int)frames, tempBuffer, 2, (float *)outBuffer, outChannels);
cblas_scopy((int)frames, tempBuffer + 1, 2, ((float *)outBuffer) + 1, outChannels);
for(size_t i = 2; i < outChannels; ++i) {
vDSP_vclr(((float *)outBuffer) + i, outChannels, (int)frames);
}
} else {
[hFilter process:(const float *)inBuffer sampleCount:frames toBuffer:(float *)outBuffer];
}
return;
}
}
if(inputFormat.mChannelsPerFrame > 2 && outConfig == AudioConfigStereo) {
downmix_to_stereo((const float *)inBuffer, inputFormat.mChannelsPerFrame, inConfig, (float *)outBuffer, frames); downmix_to_stereo((const float *)inBuffer, inputFormat.mChannelsPerFrame, inConfig, (float *)outBuffer, frames);
} else if(inputFormat.mChannelsPerFrame > 1 && outConfig == AudioConfigMono) { } else if(inputFormat.mChannelsPerFrame > 1 && outConfig == AudioConfigMono) {
downmix_to_mono((const float *)inBuffer, inputFormat.mChannelsPerFrame, inConfig, (float *)outBuffer, frames); downmix_to_mono((const float *)inBuffer, inputFormat.mChannelsPerFrame, inConfig, (float *)outBuffer, frames);

View file

@ -0,0 +1,53 @@
//
// HeadphoneFilter.h
// CogAudio Framework
//
// Created by Christopher Snowhill on 1/24/22.
//
#ifndef HeadphoneFilter_h
#define HeadphoneFilter_h
#import <Accelerate/Accelerate.h>
#import <Cocoa/Cocoa.h>
@interface HeadphoneFilter : NSObject {
FFTSetup fftSetup;
size_t fftSize;
size_t fftSizeOver2;
size_t log2n;
size_t log2nhalf;
size_t bufferSize;
size_t paddedBufferSize;
size_t channelCount;
COMPLEX_SPLIT signal_fft;
COMPLEX_SPLIT input_filtered_signal_per_channel[2];
COMPLEX_SPLIT *impulse_responses;
float *left_result;
float *right_result;
float *left_mix_result;
float *right_mix_result;
float *paddedSignal;
float *prevOverlapLeft;
float *prevOverlapRight;
int prevOverlapLength;
}
+ (BOOL)validateImpulseFile:(NSURL *)url;
- (id)initWithImpulseFile:(NSURL *)url forSampleRate:(double)sampleRate withInputChannels:(size_t)channels withConfig:(uint32_t)config;
- (void)process:(const float *)inBuffer sampleCount:(size_t)count toBuffer:(float *)outBuffer;
- (void)reset;
@end
#endif /* HeadphoneFilter_h */

View file

@ -0,0 +1,539 @@
//
// HeadphoneFilter.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 1/24/22.
//
#import "HeadphoneFilter.h"
#import "AudioChunk.h"
#import "AudioDecoder.h"
#import "AudioSource.h"
#import <stdlib.h>
#import "r8bstate.h"
#import "lpc.h"
#import "util.h"
// Apparently _mm_malloc is Intel-only on newer macOS targets, so use supported posix_memalign
static void *_memalign_malloc(size_t size, size_t align) {
void *ret = NULL;
if(posix_memalign(&ret, align, size) != 0) {
return NULL;
}
return ret;
}
@implementation HeadphoneFilter
enum {
speaker_is_back_center = -1,
speaker_not_present = -2,
};
static const uint32_t max_speaker_index = 10;
static const int8_t speakers_to_hesuvi_7[11][2] = {
// front left
{ 0, 1 },
// front right
{ 1, 0 },
// front center
{ 6, 6 },
// lfe
{ 6, 6 },
// back left
{ 4, 5 },
// back right
{ 5, 4 },
// front center left
{ speaker_not_present, speaker_not_present },
// front center right
{ speaker_not_present, speaker_not_present },
// back center
{ speaker_is_back_center, speaker_is_back_center },
// side left
{ 2, 3 },
// side right
{ 3, 2 }
};
static const int8_t speakers_to_hesuvi_14[11][2] = {
// front left
{ 0, 1 },
// front right
{ 8, 7 },
// front center
{ 6, 13 },
// lfe
{ 6, 13 },
// back left
{ 4, 5 },
// back right
{ 12, 11 },
// front center left
{ speaker_not_present, speaker_not_present },
// front center right
{ speaker_not_present, speaker_not_present },
// back center
{ speaker_is_back_center, speaker_is_back_center },
// side left
{ 2, 3 },
// side right
{ 10, 9 }
};
+ (BOOL)validateImpulseFile:(NSURL *)url {
id<CogSource> source = [AudioSource audioSourceForURL:url];
if(!source)
return NO;
if(![source open:url])
return NO;
id<CogDecoder> decoder = [AudioDecoder audioDecoderForSource:source];
if(decoder == nil) {
[source close];
source = nil;
return NO;
}
if(![decoder open:source]) {
decoder = nil;
[source close];
source = nil;
return NO;
}
NSDictionary *properties = [decoder properties];
[decoder close];
decoder = nil;
[source close];
source = nil;
int impulseChannels = [[properties objectForKey:@"channels"] intValue];
if([[properties objectForKey:@"floatingPoint"] boolValue] != YES ||
[[properties objectForKey:@"bitsPerSample"] intValue] != 32 ||
!([[properties objectForKey:@"endian"] isEqualToString:@"host"] ||
[[properties objectForKey:@"endian"] isEqualToString:@"little"]) ||
(impulseChannels != 14 && impulseChannels != 7))
return NO;
return YES;
}
- (id)initWithImpulseFile:(NSURL *)url forSampleRate:(double)sampleRate withInputChannels:(size_t)channels withConfig:(uint32_t)config {
self = [super init];
if(self) {
id<CogSource> source = [AudioSource audioSourceForURL:url];
if(!source)
return nil;
if(![source open:url])
return nil;
id<CogDecoder> decoder = [AudioDecoder audioDecoderForSource:source];
if(decoder == nil) {
[source close];
source = nil;
return nil;
}
if(![decoder open:source]) {
decoder = nil;
[source close];
source = nil;
return nil;
}
NSDictionary *properties = [decoder properties];
double sampleRateOfSource = [[properties objectForKey:@"sampleRate"] floatValue];
int sampleCount = [[properties objectForKey:@"totalFrames"] intValue];
int impulseChannels = [[properties objectForKey:@"channels"] intValue];
if([[properties objectForKey:@"floatingPoint"] boolValue] != YES ||
[[properties objectForKey:@"bitsPerSample"] intValue] != 32 ||
!([[properties objectForKey:@"endian"] isEqualToString:@"host"] ||
[[properties objectForKey:@"endian"] isEqualToString:@"little"]) ||
(impulseChannels != 14 && impulseChannels != 7)) {
[decoder close];
decoder = nil;
[source close];
source = nil;
return nil;
}
float *impulseBuffer = (float *)malloc(sampleCount * sizeof(float) * impulseChannels);
if(!impulseBuffer) {
[decoder close];
decoder = nil;
[source close];
source = nil;
return nil;
}
if([decoder readAudio:impulseBuffer frames:sampleCount] != sampleCount) {
[decoder close];
decoder = nil;
[source close];
source = nil;
return nil;
}
[decoder close];
decoder = nil;
[source close];
source = nil;
if(sampleRateOfSource != sampleRate) {
double sampleRatio = sampleRate / sampleRateOfSource;
int resampledCount = (int)ceil((double)sampleCount * sampleRatio);
r8bstate *_r8bstate = new r8bstate(impulseChannels, 1024, sampleRateOfSource, sampleRate);
unsigned long PRIME_LEN_ = MAX(sampleRateOfSource / 20, 1024u);
PRIME_LEN_ = MIN(PRIME_LEN_, 16384u);
PRIME_LEN_ = MAX(PRIME_LEN_, 2 * LPC_ORDER + 1);
unsigned int N_samples_to_add_ = sampleRateOfSource;
unsigned int N_samples_to_drop_ = sampleRate;
samples_len(&N_samples_to_add_, &N_samples_to_drop_, 20, 8192u);
int resamplerLatencyIn = (int)N_samples_to_add_;
int resamplerLatencyOut = (int)N_samples_to_drop_;
float *tempImpulse = (float *)realloc(impulseBuffer, (sampleCount + resamplerLatencyIn * 2 + 1024) * sizeof(float) * impulseChannels);
if(!tempImpulse) {
free(impulseBuffer);
return nil;
}
impulseBuffer = tempImpulse;
resampledCount += resamplerLatencyOut * 2 + 1024;
float *resampledImpulse = (float *)malloc(resampledCount * sizeof(float) * impulseChannels);
if(!resampledImpulse) {
free(impulseBuffer);
return nil;
}
size_t prime = MIN(sampleCount, PRIME_LEN_);
void *extrapolate_buffer = NULL;
size_t extrapolate_buffer_size = 0;
memmove(impulseBuffer + resamplerLatencyIn * impulseChannels, impulseBuffer, sampleCount * sizeof(float) * impulseChannels);
lpc_extrapolate_bkwd(impulseBuffer + N_samples_to_add_ * impulseChannels, sampleCount, prime, impulseChannels, LPC_ORDER, N_samples_to_add_, &extrapolate_buffer, &extrapolate_buffer_size);
lpc_extrapolate_fwd(impulseBuffer + N_samples_to_add_ * impulseChannels, sampleCount, prime, impulseChannels, LPC_ORDER, N_samples_to_add_, &extrapolate_buffer, &extrapolate_buffer_size);
free(extrapolate_buffer);
size_t inputDone = 0;
size_t outputDone = 0;
outputDone = _r8bstate->resample(impulseBuffer, sampleCount + N_samples_to_add_ * 2, &inputDone, resampledImpulse, resampledCount);
if (outputDone < resampledCount) {
outputDone += _r8bstate->flush(resampledImpulse + outputDone * impulseChannels, resampledCount - outputDone);
}
delete _r8bstate;
outputDone -= N_samples_to_drop_ * 2;
memmove(resampledImpulse, resampledImpulse + N_samples_to_drop_ * impulseChannels, outputDone * sizeof(float) * impulseChannels);
free(impulseBuffer);
impulseBuffer = resampledImpulse;
sampleCount = (int)outputDone;
}
channelCount = channels;
bufferSize = 512;
fftSize = sampleCount + bufferSize;
int pow = 1;
while(fftSize > 2) {
pow++;
fftSize /= 2;
}
fftSize = 2 << pow;
float *deinterleavedImpulseBuffer = (float *)_memalign_malloc(fftSize * sizeof(float) * (impulseChannels + 1), 16);
if(!deinterleavedImpulseBuffer) {
free(impulseBuffer);
return nil;
}
for(size_t i = 0; i < impulseChannels; ++i) {
cblas_scopy(sampleCount, impulseBuffer + i, impulseChannels, deinterleavedImpulseBuffer + i * fftSize, 1);
vDSP_vclr(deinterleavedImpulseBuffer + i * fftSize + sampleCount, 1, fftSize - sampleCount);
}
free(impulseBuffer);
// Null impulse
vDSP_vclr(deinterleavedImpulseBuffer + impulseChannels * fftSize, 1, fftSize);
paddedBufferSize = fftSize;
fftSizeOver2 = (fftSize + 1) / 2;
log2n = log2f(fftSize);
log2nhalf = log2n / 2;
fftSetup = vDSP_create_fftsetup(log2n, FFT_RADIX2);
if(!fftSetup) {
free(deinterleavedImpulseBuffer);
return nil;
}
paddedSignal = (float *)_memalign_malloc(sizeof(float) * paddedBufferSize, 16);
if(!paddedSignal) {
free(deinterleavedImpulseBuffer);
return nil;
}
signal_fft.realp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2, 16);
signal_fft.imagp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2, 16);
if(!signal_fft.realp || !signal_fft.imagp) {
free(deinterleavedImpulseBuffer);
return nil;
}
input_filtered_signal_per_channel[0].realp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2, 16);
input_filtered_signal_per_channel[0].imagp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2, 16);
if(!input_filtered_signal_per_channel[0].realp ||
!input_filtered_signal_per_channel[0].imagp) {
free(deinterleavedImpulseBuffer);
return nil;
}
input_filtered_signal_per_channel[1].realp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2, 16);
input_filtered_signal_per_channel[1].imagp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2, 16);
if(!input_filtered_signal_per_channel[1].realp ||
!input_filtered_signal_per_channel[1].imagp) {
free(deinterleavedImpulseBuffer);
return nil;
}
impulse_responses = (COMPLEX_SPLIT *)calloc(sizeof(COMPLEX_SPLIT), channels * 2);
if(!impulse_responses) {
free(deinterleavedImpulseBuffer);
return nil;
}
for(size_t i = 0; i < channels; ++i) {
impulse_responses[i * 2 + 0].realp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2, 16);
impulse_responses[i * 2 + 0].imagp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2, 16);
impulse_responses[i * 2 + 1].realp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2, 16);
impulse_responses[i * 2 + 1].imagp = (float *)_memalign_malloc(sizeof(float) * fftSizeOver2, 16);
if(!impulse_responses[i * 2 + 0].realp || !impulse_responses[i * 2 + 0].imagp ||
!impulse_responses[i * 2 + 1].realp || !impulse_responses[i * 2 + 1].imagp) {
free(deinterleavedImpulseBuffer);
return nil;
}
uint32_t channelFlag = [AudioChunk extractChannelFlag:(uint32_t)i fromConfig:config];
uint32_t channelIndex = [AudioChunk findChannelIndex:channelFlag];
int leftInChannel = speaker_not_present;
int rightInChannel = speaker_not_present;
if(impulseChannels == 7) {
if(channelIndex <= max_speaker_index) {
leftInChannel = speakers_to_hesuvi_7[channelIndex][0];
rightInChannel = speakers_to_hesuvi_7[channelIndex][1];
}
} else {
if(channelIndex <= max_speaker_index) {
leftInChannel = speakers_to_hesuvi_14[channelIndex][0];
rightInChannel = speakers_to_hesuvi_14[channelIndex][1];
}
}
if(leftInChannel == speaker_is_back_center || rightInChannel == speaker_is_back_center) {
float *temp;
if(impulseChannels == 7) {
temp = (float *)malloc(sizeof(float) * fftSize);
if(!temp) {
free(deinterleavedImpulseBuffer);
return nil;
}
cblas_scopy((int)fftSize, deinterleavedImpulseBuffer + 4 * fftSize, 1, temp, 1);
vDSP_vadd(temp, 1, deinterleavedImpulseBuffer + 5 * fftSize, 1, temp, 1, fftSize);
vDSP_ctoz((DSPComplex *)temp, 2, &impulse_responses[i * 2 + 0], 1, fftSizeOver2);
vDSP_ctoz((DSPComplex *)temp, 2, &impulse_responses[i * 2 + 1], 1, fftSizeOver2);
} else {
temp = (float *)malloc(sizeof(float) * fftSize * 2);
if(!temp) {
free(deinterleavedImpulseBuffer);
return nil;
}
cblas_scopy((int)fftSize, deinterleavedImpulseBuffer + 4 * fftSize, 1, temp, 1);
vDSP_vadd(temp, 1, deinterleavedImpulseBuffer + 12 * fftSize, 1, temp, 1, fftSize);
cblas_scopy((int)fftSize, deinterleavedImpulseBuffer + 5 * fftSize, 1, temp + fftSize, 1);
vDSP_vadd(temp + fftSize, 1, deinterleavedImpulseBuffer + 11 * fftSize, 1, temp + fftSize, 1, fftSize);
vDSP_ctoz((DSPComplex *)temp, 2, &impulse_responses[i * 2 + 0], 1, fftSizeOver2);
vDSP_ctoz((DSPComplex *)(temp + fftSize), 2, &impulse_responses[i * 2 + 1], 1, fftSizeOver2);
}
free(temp);
} else if(leftInChannel == speaker_not_present || rightInChannel == speaker_not_present) {
vDSP_ctoz((DSPComplex *)(deinterleavedImpulseBuffer + impulseChannels * fftSize), 2, &impulse_responses[i * 2 + 0], 1, fftSizeOver2);
vDSP_ctoz((DSPComplex *)(deinterleavedImpulseBuffer + impulseChannels * fftSize), 2, &impulse_responses[i * 2 + 1], 1, fftSizeOver2);
} else {
vDSP_ctoz((DSPComplex *)(deinterleavedImpulseBuffer + leftInChannel * fftSize), 2, &impulse_responses[i * 2 + 0], 1, fftSizeOver2);
vDSP_ctoz((DSPComplex *)(deinterleavedImpulseBuffer + rightInChannel * fftSize), 2, &impulse_responses[i * 2 + 1], 1, fftSizeOver2);
}
vDSP_fft_zrip(fftSetup, &impulse_responses[i * 2 + 0], 1, log2n, FFT_FORWARD);
vDSP_fft_zrip(fftSetup, &impulse_responses[i * 2 + 1], 1, log2n, FFT_FORWARD);
}
free(deinterleavedImpulseBuffer);
left_result = (float *)_memalign_malloc(sizeof(float) * fftSize, 16);
right_result = (float *)_memalign_malloc(sizeof(float) * fftSize, 16);
if(!left_result || !right_result)
return nil;
prevOverlapLeft = (float *)_memalign_malloc(sizeof(float) * fftSize, 16);
prevOverlapRight = (float *)_memalign_malloc(sizeof(float) * fftSize, 16);
if(!prevOverlapLeft || !prevOverlapRight)
return nil;
left_mix_result = (float *)_memalign_malloc(sizeof(float) * fftSize, 16);
right_mix_result = (float *)_memalign_malloc(sizeof(float) * fftSize, 16);
if(!left_mix_result || !right_mix_result)
return nil;
prevOverlapLength = 0;
}
return self;
}
- (void)dealloc {
if(fftSetup) vDSP_destroy_fftsetup(fftSetup);
free(paddedSignal);
free(signal_fft.realp);
free(signal_fft.imagp);
free(input_filtered_signal_per_channel[0].realp);
free(input_filtered_signal_per_channel[0].imagp);
free(input_filtered_signal_per_channel[1].realp);
free(input_filtered_signal_per_channel[1].imagp);
if(impulse_responses) {
for(size_t i = 0; i < channelCount * 2; ++i) {
free(impulse_responses[i].realp);
free(impulse_responses[i].imagp);
}
free(impulse_responses);
}
free(left_result);
free(right_result);
free(prevOverlapLeft);
free(prevOverlapRight);
free(left_mix_result);
free(right_mix_result);
}
- (void)process:(const float *)inBuffer sampleCount:(size_t)count toBuffer:(float *)outBuffer {
const float scale = 1.0 / (4.0 * (float)fftSize);
while(count > 0) {
size_t countToDo = (count > bufferSize) ? bufferSize : count;
vDSP_vclr(left_mix_result, 1, fftSize);
vDSP_vclr(right_mix_result, 1, fftSize);
for(size_t i = 0; i < channelCount; ++i) {
cblas_scopy((int)countToDo, inBuffer + i, (int)channelCount, paddedSignal, 1);
vDSP_vclr(paddedSignal + countToDo, 1, paddedBufferSize - countToDo);
vDSP_ctoz((DSPComplex *)paddedSignal, 2, &signal_fft, 1, fftSizeOver2);
vDSP_fft_zrip(fftSetup, &signal_fft, 1, log2n, FFT_FORWARD);
// One channel forward, then multiply and back twice
float preserveIRNyq = impulse_responses[i * 2 + 0].imagp[0];
float preserveSigNyq = signal_fft.imagp[0];
impulse_responses[i * 2 + 0].imagp[0] = 0;
signal_fft.imagp[0] = 0;
vDSP_zvmul(&signal_fft, 1, &impulse_responses[i * 2 + 0], 1, &input_filtered_signal_per_channel[0], 1, fftSizeOver2, 1);
input_filtered_signal_per_channel[0].imagp[0] = preserveIRNyq * preserveSigNyq;
impulse_responses[i * 2 + 0].imagp[0] = preserveIRNyq;
preserveIRNyq = impulse_responses[i * 2 + 1].imagp[0];
impulse_responses[i * 2 + 1].imagp[0] = 0;
vDSP_zvmul(&signal_fft, 1, &impulse_responses[i * 2 + 1], 1, &input_filtered_signal_per_channel[1], 1, fftSizeOver2, 1);
input_filtered_signal_per_channel[1].imagp[0] = preserveIRNyq * preserveSigNyq;
impulse_responses[i * 2 + 1].imagp[0] = preserveIRNyq;
vDSP_fft_zrip(fftSetup, &input_filtered_signal_per_channel[0], 1, log2n, FFT_INVERSE);
vDSP_fft_zrip(fftSetup, &input_filtered_signal_per_channel[1], 1, log2n, FFT_INVERSE);
vDSP_ztoc(&input_filtered_signal_per_channel[0], 1, (DSPComplex *)left_result, 2, fftSizeOver2);
vDSP_ztoc(&input_filtered_signal_per_channel[1], 1, (DSPComplex *)right_result, 2, fftSizeOver2);
vDSP_vadd(left_mix_result, 1, left_result, 1, left_mix_result, 1, fftSize);
vDSP_vadd(right_mix_result, 1, right_result, 1, right_mix_result, 1, fftSize);
}
// Integrate previous overlap
if(prevOverlapLength) {
vDSP_vadd(prevOverlapLeft, 1, left_mix_result, 1, left_mix_result, 1, prevOverlapLength);
vDSP_vadd(prevOverlapRight, 1, right_mix_result, 1, right_mix_result, 1, prevOverlapLength);
}
prevOverlapLength = (int)(fftSize - countToDo);
cblas_scopy(prevOverlapLength, left_mix_result + countToDo, 1, prevOverlapLeft, 1);
cblas_scopy(prevOverlapLength, right_mix_result + countToDo, 1, prevOverlapRight, 1);
vDSP_vsmul(left_mix_result, 1, &scale, left_mix_result, 1, countToDo);
vDSP_vsmul(right_mix_result, 1, &scale, right_mix_result, 1, countToDo);
cblas_scopy((int)countToDo, left_mix_result, 1, outBuffer + 0, 2);
cblas_scopy((int)countToDo, right_mix_result, 1, outBuffer + 1, 2);
inBuffer += countToDo * channelCount;
outBuffer += countToDo * 2;
count -= countToDo;
}
}
- (void)reset {
prevOverlapLength = 0;
}
@end

View file

@ -12,9 +12,9 @@
#import <AudioUnit/AudioUnit.h> #import <AudioUnit/AudioUnit.h>
#import <CoreAudio/AudioHardware.h> #import <CoreAudio/AudioHardware.h>
#import <CogAudio/AudioDecoder.h> #import "AudioDecoder.h"
#import <CogAudio/Node.h> #import "Node.h"
#import <CogAudio/Plugin.h> #import "Plugin.h"
#define INPUT_NODE_SEEK #define INPUT_NODE_SEEK
@ -33,22 +33,19 @@
Semaphore *exitAtTheEndOfTheStream; Semaphore *exitAtTheEndOfTheStream;
} }
@property(readonly) Semaphore * _Nonnull exitAtTheEndOfTheStream; @property(readonly) Semaphore *exitAtTheEndOfTheStream;
@property(readonly) BOOL threadExited;
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p; - (BOOL)openWithSource:(id<CogSource>)source;
- (BOOL)openWithDecoder:(id<CogDecoder>)d;
- (BOOL)openWithSource:(id<CogSource>_Nonnull)source;
- (BOOL)openWithDecoder:(id<CogDecoder>_Nonnull)d;
- (void)process; - (void)process;
- (NSDictionary *_Nonnull)properties; - (NSDictionary *)properties;
- (void)seek:(long)frame; - (void)seek:(long)frame;
- (void)registerObservers; - (void)registerObservers;
- (BOOL)setTrack:(NSURL *_Nonnull)track; - (BOOL)setTrack:(NSURL *)track;
- (id<CogDecoder>_Nonnull)decoder; - (id<CogDecoder>)decoder;
@end @end

View file

@ -18,25 +18,18 @@
#import "Logging.h" #import "Logging.h"
@implementation InputNode @implementation InputNode
static void *kInputNodeContext = &kInputNodeContext;
@synthesize threadExited;
@synthesize exitAtTheEndOfTheStream; @synthesize exitAtTheEndOfTheStream;
- (id)initWithController:(id)c previous:(id)p { - (id)initWithController:(id)c previous:(id)p {
self = [super initWithController:c previous:p]; self = [super initWithController:c previous:p];
if(self) { if(self) {
exitAtTheEndOfTheStream = [[Semaphore alloc] init]; exitAtTheEndOfTheStream = [[Semaphore alloc] init];
threadExited = NO;
} }
return self; return self;
} }
- (BOOL)openWithSource:(id<CogSource>)source { - (BOOL)openWithSource:(id<CogSource>)source {
[self removeObservers];
decoder = [AudioDecoder audioDecoderForSource:source]; decoder = [AudioDecoder audioDecoderForSource:source];
if(decoder == nil) if(decoder == nil)
@ -67,8 +60,6 @@ static void *kInputNodeContext = &kInputNodeContext;
} }
- (BOOL)openWithDecoder:(id<CogDecoder>)d { - (BOOL)openWithDecoder:(id<CogDecoder>)d {
[self removeObservers];
DLog(@"Opening with old decoder: %@", d); DLog(@"Opening with old decoder: %@", d);
decoder = d; decoder = d;
@ -93,27 +84,24 @@ static void *kInputNodeContext = &kInputNodeContext;
} }
- (void)registerObservers { - (void)registerObservers {
if(!observersAdded) {
DLog(@"REGISTERING OBSERVERS"); DLog(@"REGISTERING OBSERVERS");
[decoder addObserver:self [decoder addObserver:self
forKeyPath:@"properties" forKeyPath:@"properties"
options:(NSKeyValueObservingOptionNew) options:(NSKeyValueObservingOptionNew)
context:kInputNodeContext]; context:NULL];
[decoder addObserver:self [decoder addObserver:self
forKeyPath:@"metadata" forKeyPath:@"metadata"
options:(NSKeyValueObservingOptionNew) options:(NSKeyValueObservingOptionNew)
context:kInputNodeContext]; context:NULL];
observersAdded = YES; observersAdded = YES;
}
} }
- (void)observeValueForKeyPath:(NSString *)keyPath - (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object ofObject:(id)object
change:(NSDictionary *)change change:(NSDictionary *)change
context:(void *)context { context:(void *)context {
if(context == kInputNodeContext) {
DLog(@"SOMETHING CHANGED!"); DLog(@"SOMETHING CHANGED!");
if([keyPath isEqual:@"properties"]) { if([keyPath isEqual:@"properties"]) {
DLog(@"Input format changed"); DLog(@"Input format changed");
@ -138,12 +126,13 @@ static void *kInputNodeContext = &kInputNodeContext;
[controller pushInfo:entryInfo]; [controller pushInfo:entryInfo];
} }
} else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
} }
- (void)process { - (void)process {
int amountInBuffer = 0;
int bytesInBuffer = 0;
void *inputBuffer = malloc(CHUNK_SIZE * 8 * 18); // Maximum 18 channels, dunno what we'll receive
BOOL shouldClose = YES; BOOL shouldClose = YES;
BOOL seekError = NO; BOOL seekError = NO;
@ -159,19 +148,20 @@ static void *kInputNodeContext = &kInputNodeContext;
while([self shouldContinue] == YES && [self endOfStream] == NO) { while([self shouldContinue] == YES && [self endOfStream] == NO) {
if(shouldSeek == YES) { if(shouldSeek == YES) {
BufferChain *bufferChain = controller; BufferChain *bufferChain = [[controller controller] bufferChain];
AudioPlayer *audioPlayer = [bufferChain controller]; ConverterNode *converter = [bufferChain converter];
OutputNode *outputNode = [audioPlayer output];
DLog(@"SEEKING! Resetting Buffer"); DLog(@"SEEKING! Resetting Buffer");
[outputNode resetBackwards];
amountInBuffer = 0;
// This resets the converter's buffer
[self resetBuffer];
[converter resetBuffer];
[converter inputFormatDidChange:[bufferChain inputFormat] inputConfig:[bufferChain inputConfig]];
DLog(@"Reset buffer!"); DLog(@"Reset buffer!");
DLog(@"SEEKING!"); DLog(@"SEEKING!");
@autoreleasepool {
seekError = [decoder seek:seekFrame] < 0; seekError = [decoder seek:seekFrame] < 0;
}
shouldSeek = NO; shouldSeek = NO;
DLog(@"Seeked! Resetting Buffer"); DLog(@"Seeked! Resetting Buffer");
@ -182,22 +172,20 @@ static void *kInputNodeContext = &kInputNodeContext;
} }
} }
AudioChunk *chunk; if(amountInBuffer < CHUNK_SIZE) {
int framesToRead = CHUNK_SIZE - amountInBuffer;
int framesRead;
@autoreleasepool { @autoreleasepool {
chunk = [decoder readAudio]; framesRead = [decoder readAudio:((char *)inputBuffer) + bytesInBuffer frames:framesToRead];
} }
if(chunk && [chunk frameCount]) { if(framesRead > 0 && !seekError) {
@autoreleasepool { amountInBuffer += framesRead;
[self writeChunk:chunk]; bytesInBuffer += framesRead * bytesPerFrame;
chunk = nil; [self writeData:inputBuffer amount:bytesInBuffer];
} amountInBuffer = 0;
bytesInBuffer = 0;
} else { } else {
if(chunk) {
@autoreleasepool {
chunk = nil;
}
}
DLog(@"End of stream? %@", [self properties]); DLog(@"End of stream? %@", [self properties]);
endOfStream = YES; endOfStream = YES;
@ -213,22 +201,24 @@ static void *kInputNodeContext = &kInputNodeContext;
// wait before exiting, as we might still get seeking request // wait before exiting, as we might still get seeking request
DLog("InputNode: Before wait") DLog("InputNode: Before wait")
[exitAtTheEndOfTheStream waitIndefinitely]; [exitAtTheEndOfTheStream waitIndefinitely];
DLog("InputNode: After wait, should seek = %d", shouldSeek); DLog("InputNode: After wait, should seek = %d", shouldSeek) if(shouldSeek) {
if(shouldSeek) {
endOfStream = NO; endOfStream = NO;
shouldClose = NO; shouldClose = NO;
continue; continue;
} else { }
else {
break; break;
} }
} }
} }
}
if(shouldClose) if(shouldClose)
[decoder close]; [decoder close];
free(inputBuffer);
[exitAtTheEndOfTheStream signal]; [exitAtTheEndOfTheStream signal];
threadExited = YES;
DLog("Input node thread stopping"); DLog("Input node thread stopping");
} }
@ -237,8 +227,7 @@ static void *kInputNodeContext = &kInputNodeContext;
seekFrame = frame; seekFrame = frame;
shouldSeek = YES; shouldSeek = YES;
DLog(@"Should seek!"); DLog(@"Should seek!");
[self resetBuffer]; [semaphore signal];
[writeSemaphore signal];
if(endOfStream) { if(endOfStream) {
[exitAtTheEndOfTheStream signal]; [exitAtTheEndOfTheStream signal];
@ -257,8 +246,8 @@ static void *kInputNodeContext = &kInputNodeContext;
- (void)removeObservers { - (void)removeObservers {
if(observersAdded) { if(observersAdded) {
[decoder removeObserver:self forKeyPath:@"properties" context:kInputNodeContext]; [decoder removeObserver:self forKeyPath:@"properties"];
[decoder removeObserver:self forKeyPath:@"metadata" context:kInputNodeContext]; [decoder removeObserver:self forKeyPath:@"metadata"];
observersAdded = NO; observersAdded = NO;
} }
} }
@ -272,7 +261,6 @@ static void *kInputNodeContext = &kInputNodeContext;
- (void)dealloc { - (void)dealloc {
DLog(@"Input Node dealloc"); DLog(@"Input Node dealloc");
[self removeObservers]; [self removeObservers];
[super cleanUp];
} }
- (NSDictionary *)properties { - (NSDictionary *)properties {

View file

@ -6,34 +6,24 @@
// Copyright 2006 Vincent Spader. All rights reserved. // Copyright 2006 Vincent Spader. All rights reserved.
// //
#import <CogAudio/ChunkList.h> #import "ChunkList.h"
#import <CogAudio/CogSemaphore.h> #import "Semaphore.h"
#import <Cocoa/Cocoa.h> #import <Cocoa/Cocoa.h>
#import <os/workgroup.h>
#define BUFFER_SIZE 1024 * 1024 #define BUFFER_SIZE 1024 * 1024
#define CHUNK_SIZE 16 * 1024 #define CHUNK_SIZE 16 * 1024
//#define LOG_CHAINS 1
@interface Node : NSObject { @interface Node : NSObject {
ChunkList *buffer; ChunkList *buffer;
Semaphore *writeSemaphore; Semaphore *semaphore;
Semaphore *readSemaphore;
NSLock *accessLock; NSRecursiveLock *accessLock;
id __weak previousNode; id __weak previousNode;
id __weak controller; id __weak controller;
BOOL shouldReset; BOOL shouldReset;
BOOL inWrite;
BOOL inPeek;
BOOL inRead;
BOOL inMerge;
BOOL shouldContinue; BOOL shouldContinue;
BOOL endOfStream; // All data is now in buffer BOOL endOfStream; // All data is now in buffer
BOOL initialBufferFilled; BOOL initialBufferFilled;
@ -41,34 +31,13 @@
AudioStreamBasicDescription nodeFormat; AudioStreamBasicDescription nodeFormat;
uint32_t nodeChannelConfig; uint32_t nodeChannelConfig;
BOOL nodeLossless; BOOL nodeLossless;
double durationPrebuffer;
#ifdef LOG_CHAINS
NSFileHandle *logFileOut;
NSFileHandle *logFileIn;
#endif
} }
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p; - (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p;
#ifdef LOG_CHAINS
- (void)initLogFiles;
#endif
- (void)cleanUp;
- (BOOL)paused;
- (void)writeData:(const void *_Nonnull)ptr amount:(size_t)a; - (void)writeData:(const void *_Nonnull)ptr amount:(size_t)a;
- (void)writeChunk:(AudioChunk *_Nonnull)chunk;
- (AudioChunk *_Nonnull)readChunk:(size_t)maxFrames; - (AudioChunk *_Nonnull)readChunk:(size_t)maxFrames;
- (AudioChunk *_Nonnull)readChunkAsFloat32:(size_t)maxFrames;
- (AudioChunk *_Nonnull)readAndMergeChunks:(size_t)maxFrames;
- (AudioChunk *_Nonnull)readAndMergeChunksAsFloat32:(size_t)maxFrames;
- (BOOL)peekFormat:(AudioStreamBasicDescription *_Nonnull)format channelConfig:(uint32_t *_Nonnull)config; - (BOOL)peekFormat:(AudioStreamBasicDescription *_Nonnull)format channelConfig:(uint32_t *_Nonnull)config;
- (BOOL)peekTimestamp:(double *_Nonnull)timestamp timeRatio:(double *_Nonnull)timeRatio;
- (void)process; // Should be overwriten by subclass - (void)process; // Should be overwriten by subclass
- (void)threadEntry:(id _Nullable)arg; - (void)threadEntry:(id _Nullable)arg;
@ -77,7 +46,6 @@
- (void)setShouldReset:(BOOL)s; - (void)setShouldReset:(BOOL)s;
- (BOOL)shouldReset; - (BOOL)shouldReset;
- (void)resetBackwards;
- (void)setPreviousNode:(id _Nullable)p; - (void)setPreviousNode:(id _Nullable)p;
- (id _Nullable)previousNode; - (id _Nullable)previousNode;
@ -92,8 +60,7 @@
- (uint32_t)nodeChannelConfig; - (uint32_t)nodeChannelConfig;
- (BOOL)nodeLossless; - (BOOL)nodeLossless;
- (Semaphore *_Nonnull)writeSemaphore; - (Semaphore *_Nonnull)semaphore;
- (Semaphore *_Nonnull)readSemaphore;
//-(void)resetBuffer; //-(void)resetBuffer;

View file

@ -11,47 +11,15 @@
#import "BufferChain.h" #import "BufferChain.h"
#import "Logging.h" #import "Logging.h"
#import "OutputCoreAudio.h"
#import <pthread.h>
#import <mach/mach_time.h>
#ifdef LOG_CHAINS
#import "NSFileHandle+CreateFile.h"
static NSLock * _Node_lock = nil;
static uint64_t _Node_serial;
#endif
@implementation Node @implementation Node
#ifdef LOG_CHAINS
+ (void)initialize {
@synchronized (_Node_lock) {
if(!_Node_lock) {
_Node_lock = [[NSLock alloc] init];
_Node_serial = 0;
}
}
}
- (void)initLogFiles {
[_Node_lock lock];
logFileOut = [NSFileHandle fileHandleForWritingAtPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"%@_output_%08lld.raw", [self className], _Node_serial++]] createFile:YES];
logFileIn = [NSFileHandle fileHandleForWritingAtPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"%@_input_%08lld.raw", [self className], _Node_serial++]] createFile:YES];
[_Node_lock unlock];
}
#endif
- (id)initWithController:(id)c previous:(id)p { - (id)initWithController:(id)c previous:(id)p {
self = [super init]; self = [super init];
if(self) { if(self) {
buffer = [[ChunkList alloc] initWithMaximumDuration:10.0]; buffer = [[ChunkList alloc] initWithMaximumDuration:3.0];
writeSemaphore = [[Semaphore alloc] init]; semaphore = [[Semaphore alloc] init];
readSemaphore = [[Semaphore alloc] init];
accessLock = [[NSLock alloc] init]; accessLock = [[NSRecursiveLock alloc] init];
initialBufferFilled = NO; initialBufferFilled = NO;
@ -62,38 +30,12 @@ static uint64_t _Node_serial;
nodeChannelConfig = 0; nodeChannelConfig = 0;
nodeLossless = NO; nodeLossless = NO;
durationPrebuffer = 2.0;
inWrite = NO;
inPeek = NO;
inRead = NO;
inMerge = NO;
[self setPreviousNode:p]; [self setPreviousNode:p];
#ifdef LOG_CHAINS
[self initLogFiles];
#endif
} }
return self; return self;
} }
- (void)dealloc {
[self cleanUp];
}
- (void)cleanUp {
[self setShouldContinue:NO];
while(inWrite || inPeek || inRead || inMerge) {
[writeSemaphore signal];
if(previousNode) {
[[previousNode readSemaphore] signal];
}
usleep(500);
}
}
- (AudioStreamBasicDescription)nodeFormat { - (AudioStreamBasicDescription)nodeFormat {
return nodeFormat; return nodeFormat;
} }
@ -107,12 +49,6 @@ static uint64_t _Node_serial;
} }
- (void)writeData:(const void *)ptr amount:(size_t)amount { - (void)writeData:(const void *)ptr amount:(size_t)amount {
inWrite = YES;
if(!shouldContinue || [self paused]) {
inWrite = NO;
return;
}
[accessLock lock]; [accessLock lock];
AudioChunk *chunk = [[AudioChunk alloc] init]; AudioChunk *chunk = [[AudioChunk alloc] init];
@ -123,16 +59,11 @@ static uint64_t _Node_serial;
[chunk setLossless:nodeLossless]; [chunk setLossless:nodeLossless];
[chunk assignSamples:ptr frameCount:amount / nodeFormat.mBytesPerPacket]; [chunk assignSamples:ptr frameCount:amount / nodeFormat.mBytesPerPacket];
#ifdef LOG_CHAINS const double chunkDuration = [chunk duration];
if(logFileOut) { double durationLeft = [buffer maxDuration] - [buffer listDuration];
[logFileOut writeData:[NSData dataWithBytes:ptr length:amount]];
}
#endif
double durationList = [buffer listDuration]; while(shouldContinue == YES && chunkDuration > durationLeft) {
double durationLeft = [buffer maxDuration] - durationList; if(durationLeft < chunkDuration) {
if(shouldContinue == YES && durationList >= durationPrebuffer) {
if(initialBufferFilled == NO) { if(initialBufferFilled == NO) {
initialBufferFilled = YES; initialBufferFilled = YES;
if([controller respondsToSelector:@selector(initialBufferFilled:)]) if([controller respondsToSelector:@selector(initialBufferFilled:)])
@ -140,87 +71,18 @@ static uint64_t _Node_serial;
} }
} }
while(shouldContinue == YES && ![self paused] && durationLeft < 0.0) { if(durationLeft < chunkDuration || shouldReset) {
if(durationLeft < 0.0 || shouldReset) {
[accessLock unlock]; [accessLock unlock];
[writeSemaphore timedWait:2000]; [semaphore wait];
[accessLock lock]; [accessLock lock];
} }
durationLeft = [buffer maxDuration] - [buffer listDuration]; durationLeft = [buffer maxDuration] - [buffer listDuration];
} }
BOOL doSignal = NO;
if([chunk frameCount]) {
[buffer addChunk:chunk]; [buffer addChunk:chunk];
doSignal = YES;
}
[accessLock unlock]; [accessLock unlock];
if(doSignal) {
[readSemaphore signal];
}
inWrite = NO;
}
- (void)writeChunk:(AudioChunk *)chunk {
inWrite = YES;
if(!shouldContinue || [self paused]) {
inWrite = NO;
return;
}
[accessLock lock];
double durationList = [buffer listDuration];
double durationLeft = [buffer maxDuration] - durationList;
if(shouldContinue == YES && durationList >= durationPrebuffer) {
if(initialBufferFilled == NO) {
initialBufferFilled = YES;
if([controller respondsToSelector:@selector(initialBufferFilled:)])
[controller performSelector:@selector(initialBufferFilled:) withObject:self];
}
}
while(shouldContinue == YES && ![self paused] && durationLeft < 0.0) {
if(previousNode && [previousNode shouldContinue] == NO) {
shouldContinue = NO;
break;
}
if(durationLeft < 0.0 || shouldReset) {
[accessLock unlock];
[writeSemaphore timedWait:2000];
[accessLock lock];
}
durationLeft = [buffer maxDuration] - [buffer listDuration];
}
BOOL doSignal = NO;
if([chunk frameCount]) {
#ifdef LOG_CHAINS
if(logFileOut) {
AudioChunk *chunkCopy = [chunk copy];
size_t frameCount = [chunkCopy frameCount];
NSData *chunkData = [chunkCopy removeSamples:frameCount];
[logFileOut writeData:chunkData];
}
#endif
[buffer addChunk:chunk];
doSignal = YES;
}
[accessLock unlock];
if(doSignal) {
[readSemaphore signal];
}
inWrite = NO;
} }
// Should be overwriten by subclass. // Should be overwriten by subclass.
@ -234,110 +96,21 @@ static uint64_t _Node_serial;
} }
- (BOOL)peekFormat:(nonnull AudioStreamBasicDescription *)format channelConfig:(nonnull uint32_t *)config { - (BOOL)peekFormat:(nonnull AudioStreamBasicDescription *)format channelConfig:(nonnull uint32_t *)config {
inPeek = YES;
if(!shouldContinue || [self paused]) {
inPeek = NO;
return NO;
}
[accessLock lock]; [accessLock lock];
while(shouldContinue && ![self paused] &&
[[previousNode buffer] isEmpty] && [previousNode endOfStream] == NO) {
[accessLock unlock];
[writeSemaphore signal];
[[previousNode readSemaphore] timedWait:2000];
[accessLock lock];
}
if(!shouldContinue || [self paused]) {
[accessLock unlock];
inPeek = NO;
return NO;
}
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
[accessLock unlock];
inPeek = NO;
return NO;
}
BOOL ret = [[previousNode buffer] peekFormat:format channelConfig:config]; BOOL ret = [[previousNode buffer] peekFormat:format channelConfig:config];
[accessLock unlock]; [accessLock unlock];
inPeek = NO;
return ret;
}
- (BOOL)peekTimestamp:(double *_Nonnull)timestamp timeRatio:(double *_Nonnull)timeRatio {
inPeek = YES;
if(!shouldContinue || [self paused]) {
inPeek = NO;
return NO;
}
[accessLock lock];
while(shouldContinue && ![self paused] &&
[[previousNode buffer] isEmpty] && [previousNode endOfStream] == NO) {
[accessLock unlock];
[writeSemaphore signal];
[[previousNode readSemaphore] timedWait:2000];
[accessLock lock];
}
if(!shouldContinue || [self paused]) {
[accessLock unlock];
inPeek = NO;
return NO;
}
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
[accessLock unlock];
inPeek = NO;
return NO;
}
BOOL ret = [[previousNode buffer] peekTimestamp:timestamp timeRatio:timeRatio];
[accessLock unlock];
inPeek = NO;
return ret; return ret;
} }
- (AudioChunk *)readChunk:(size_t)maxFrames { - (AudioChunk *)readChunk:(size_t)maxFrames {
inRead = YES;
if(!shouldContinue || [self paused]) {
inRead = NO;
return [[AudioChunk alloc] init];
}
[accessLock lock]; [accessLock lock];
while(shouldContinue && ![self paused] &&
[[previousNode buffer] isEmpty] && [previousNode endOfStream] == NO) {
[accessLock unlock];
[writeSemaphore signal];
[[previousNode readSemaphore] timedWait:2000];
[accessLock lock];
if([previousNode shouldReset] == YES) {
break;
}
}
if(!shouldContinue || [self paused]) {
[accessLock unlock];
inRead = NO;
return [[AudioChunk alloc] init];
}
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) { if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
endOfStream = YES;
[accessLock unlock]; [accessLock unlock];
inRead = NO;
return [[AudioChunk alloc] init]; return [[AudioChunk alloc] init];
} }
@ -349,7 +122,7 @@ static uint64_t _Node_serial;
shouldReset = YES; shouldReset = YES;
[previousNode setShouldReset:NO]; [previousNode setShouldReset:NO];
[[previousNode writeSemaphore] signal]; [[previousNode semaphore] signal];
} }
AudioChunk *ret; AudioChunk *ret;
@ -361,203 +134,9 @@ static uint64_t _Node_serial;
[accessLock unlock]; [accessLock unlock];
if([ret frameCount]) { if([ret frameCount]) {
[[previousNode writeSemaphore] signal]; [[previousNode semaphore] signal];
} }
#ifdef LOG_CHAINS
if(logFileIn) {
AudioChunk *chunkCopy = [ret copy];
size_t frameCount = [chunkCopy frameCount];
NSData *chunkData = [chunkCopy removeSamples:frameCount];
[logFileIn writeData:chunkData];
}
#endif
inRead = NO;
return ret;
}
- (AudioChunk *)readChunkAsFloat32:(size_t)maxFrames {
inRead = YES;
if(!shouldContinue || [self paused]) {
inRead = NO;
return [[AudioChunk alloc] init];
}
[accessLock lock];
while(shouldContinue && ![self paused] &&
[[previousNode buffer] isEmpty] && [previousNode endOfStream] == NO) {
[accessLock unlock];
[writeSemaphore signal];
[[previousNode readSemaphore] timedWait:2000];
[accessLock lock];
if([previousNode shouldReset] == YES) {
break;
}
}
if(!shouldContinue || [self paused]) {
[accessLock unlock];
inRead = NO;
return [[AudioChunk alloc] init];
}
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
[accessLock unlock];
inRead = NO;
return [[AudioChunk alloc] init];
}
if([previousNode shouldReset] == YES) {
@autoreleasepool {
[buffer reset];
}
shouldReset = YES;
[previousNode setShouldReset:NO];
[[previousNode writeSemaphore] signal];
}
AudioChunk *ret;
@autoreleasepool {
ret = [[previousNode buffer] removeSamplesAsFloat32:maxFrames];
}
[accessLock unlock];
if([ret frameCount]) {
[[previousNode writeSemaphore] signal];
}
#ifdef LOG_CHAINS
if(logFileIn) {
AudioChunk *chunkCopy = [ret copy];
size_t frameCount = [chunkCopy frameCount];
NSData *chunkData = [chunkCopy removeSamples:frameCount];
[logFileIn writeData:chunkData];
}
#endif
inRead = NO;
return ret;
}
- (AudioChunk *)readAndMergeChunks:(size_t)maxFrames {
inMerge = YES;
if(!shouldContinue || [self paused]) {
inMerge = NO;
return [[AudioChunk alloc] init];
}
[accessLock lock];
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
[accessLock unlock];
inMerge = NO;
return [[AudioChunk alloc] init];
}
AudioChunk *ret;
@autoreleasepool {
ret = [[previousNode buffer] removeAndMergeSamples:maxFrames callBlock:^BOOL{
if([previousNode shouldReset] == YES) {
@autoreleasepool {
[buffer reset];
}
shouldReset = YES;
[previousNode setShouldReset:NO];
}
[accessLock unlock];
[[previousNode writeSemaphore] signal];
[[previousNode readSemaphore] timedWait:2000];
[accessLock lock];
return !shouldContinue || [self paused] || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES);
}];
}
[accessLock unlock];
if([ret frameCount]) {
[[previousNode writeSemaphore] signal];
#ifdef LOG_CHAINS
if(logFileIn) {
AudioChunk *chunkCopy = [ret copy];
size_t frameCount = [chunkCopy frameCount];
NSData *chunkData = [chunkCopy removeSamples:frameCount];
[logFileIn writeData:chunkData];
}
#endif
}
inMerge = NO;
return ret;
}
- (AudioChunk *)readAndMergeChunksAsFloat32:(size_t)maxFrames {
inMerge = YES;
if(!shouldContinue || [self paused]) {
inMerge = NO;
return [[AudioChunk alloc] init];
}
[accessLock lock];
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
[accessLock unlock];
inMerge = NO;
return [[AudioChunk alloc] init];
}
AudioChunk *ret;
@autoreleasepool {
ret = [[previousNode buffer] removeAndMergeSamplesAsFloat32:maxFrames callBlock:^BOOL{
if([previousNode shouldReset] == YES) {
@autoreleasepool {
[buffer reset];
}
shouldReset = YES;
[previousNode setShouldReset:NO];
}
[accessLock unlock];
[[previousNode writeSemaphore] signal];
[[previousNode readSemaphore] timedWait:2000];
[accessLock lock];
return !shouldContinue || [self paused] || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES);
}];
}
[accessLock unlock];
if([ret frameCount]) {
[[previousNode writeSemaphore] signal];
#ifdef LOG_CHAINS
if(logFileIn) {
AudioChunk *chunkCopy = [ret copy];
size_t frameCount = [chunkCopy frameCount];
NSData *chunkData = [chunkCopy removeSamples:frameCount];
[logFileIn writeData:chunkData];
}
#endif
}
inMerge = NO;
return ret; return ret;
} }
@ -596,31 +175,8 @@ static uint64_t _Node_serial;
} }
} }
- (void)lockedResetBuffer { - (Semaphore *)semaphore {
@autoreleasepool { return semaphore;
[buffer reset];
}
}
- (void)unlockedResetBuffer {
@autoreleasepool {
[accessLock lock];
[buffer reset];
[accessLock unlock];
}
}
// Implementations should override
- (BOOL)paused {
return NO;
}
- (Semaphore *)writeSemaphore {
return writeSemaphore;
}
- (Semaphore *)readSemaphore {
return readSemaphore;
} }
- (BOOL)endOfStream { - (BOOL)endOfStream {
@ -643,23 +199,4 @@ static uint64_t _Node_serial;
return 0.0; return 0.0;
} }
// Reset everything in the chain
- (void)resetBackwards {
[accessLock lock];
if(buffer) {
[self lockedResetBuffer];
[writeSemaphore signal];
[readSemaphore signal];
}
Node *node = previousNode;
while(node) {
[node unlockedResetBuffer];
[node setShouldReset:YES];
[[node writeSemaphore] signal];
[[node readSemaphore] signal];
node = [node previousNode];
}
[accessLock unlock];
}
@end @end

View file

@ -12,63 +12,50 @@
#import <AudioUnit/AudioUnit.h> #import <AudioUnit/AudioUnit.h>
#import <CoreAudio/AudioHardware.h> #import <CoreAudio/AudioHardware.h>
#import <CogAudio/Node.h> #import "Node.h"
#import <CogAudio/OutputCoreAudio.h> #import "OutputCoreAudio.h"
@interface OutputNode : Node { @interface OutputNode : Node {
AudioStreamBasicDescription format; AudioStreamBasicDescription format;
uint32_t config; uint32_t config;
double amountPlayed; double amountPlayed;
double amountPlayedInterval;
OutputCoreAudio *output; OutputCoreAudio *output;
BOOL paused; BOOL paused;
BOOL started; BOOL started;
BOOL intervalReported;
} }
- (void)beginEqualizer:(AudioUnit)eq;
- (void)refreshEqualizer:(AudioUnit)eq;
- (void)endEqualizer:(AudioUnit)eq;
- (double)amountPlayed; - (double)amountPlayed;
- (double)amountPlayedInterval;
- (void)incrementAmountPlayed:(double)seconds; - (void)incrementAmountPlayed:(double)seconds;
- (void)setAmountPlayed:(double)seconds;
- (void)resetAmountPlayed; - (void)resetAmountPlayed;
- (void)resetAmountPlayedInterval;
- (BOOL)selectNextBuffer;
- (void)endOfInputPlayed; - (void)endOfInputPlayed;
- (BOOL)chainQueueHasTracks; - (BOOL)chainQueueHasTracks;
- (double)secondsBuffered; - (double)secondsBuffered;
- (BOOL)setup; - (void)setup;
- (BOOL)setupWithInterval:(BOOL)resumeInterval;
- (void)process; - (void)process;
- (void)close; - (void)close;
- (void)seek:(double)time; - (void)seek:(double)time;
- (void)fadeOut;
- (void)fadeOutBackground;
- (void)fadeIn;
- (AudioChunk *)readChunk:(size_t)amount; - (AudioChunk *)readChunk:(size_t)amount;
- (void)setFormat:(AudioStreamBasicDescription *)f channelConfig:(uint32_t)channelConfig; - (void)setFormat:(AudioStreamBasicDescription *)f channelConfig:(uint32_t)channelConfig;
- (AudioStreamBasicDescription)format; - (AudioStreamBasicDescription)format;
- (uint32_t)config; - (uint32_t)config;
- (AudioStreamBasicDescription)deviceFormat;
- (uint32_t)deviceChannelConfig;
- (double)volume;
- (void)setVolume:(double)v; - (void)setVolume:(double)v;
- (void)setShouldContinue:(BOOL)s; - (void)setShouldContinue:(BOOL)s;
- (void)setShouldPlayOutBuffer:(BOOL)s;
- (void)pause; - (void)pause;
- (void)resume; - (void)resume;
@ -78,12 +65,4 @@
- (void)restartPlaybackAtCurrentPosition; - (void)restartPlaybackAtCurrentPosition;
- (double)latency;
- (double)getVisLatency;
- (double)getTotalLatency;
- (id)controller;
- (id)downmix;
@end @end

View file

@ -11,75 +11,19 @@
#import "BufferChain.h" #import "BufferChain.h"
#import "OutputCoreAudio.h" #import "OutputCoreAudio.h"
#import "DSPRubberbandNode.h"
#import "DSPFSurroundNode.h"
#import "DSPHRTFNode.h"
#import "DSPEqualizerNode.h"
#import "VisualizationNode.h"
#import "DSPDownmixNode.h"
#import "Logging.h" #import "Logging.h"
@implementation OutputNode { @implementation OutputNode
BOOL DSPsLaunched;
Node *previousInput; - (void)setup {
DSPRubberbandNode *rubberbandNode;
DSPFSurroundNode *fsurroundNode;
DSPHRTFNode *hrtfNode;
DSPEqualizerNode *equalizerNode;
DSPDownmixNode *downmixNode;
VisualizationNode *visualizationNode;
}
- (BOOL)setup {
return [self setupWithInterval:NO];
}
- (BOOL)setupWithInterval:(BOOL)resumeInterval {
if(!resumeInterval) {
amountPlayed = 0.0; amountPlayed = 0.0;
amountPlayedInterval = 0.0;
intervalReported = NO;
}
paused = YES; paused = YES;
started = NO; started = NO;
output = [[OutputCoreAudio alloc] initWithController:self]; output = [[OutputCoreAudio alloc] initWithController:self];
if(![output setup]) { [output setup];
output = nil;
return NO;
}
if(!DSPsLaunched) {
rubberbandNode = [[DSPRubberbandNode alloc] initWithController:self previous:nil latency:0.1];
if(!rubberbandNode) return NO;
fsurroundNode = [[DSPFSurroundNode alloc] initWithController:self previous:rubberbandNode latency:0.03];
if(!fsurroundNode) return NO;
equalizerNode = [[DSPEqualizerNode alloc] initWithController:self previous:fsurroundNode latency:0.03];
if(!equalizerNode) return NO;
hrtfNode = [[DSPHRTFNode alloc] initWithController:self previous:equalizerNode latency:0.03];
if(!hrtfNode) return NO;
downmixNode = [[DSPDownmixNode alloc] initWithController:self previous:hrtfNode latency:0.03];
if(!downmixNode) return NO;
// Approximately double the chunk size for Vis at 44100Hz
visualizationNode = [[VisualizationNode alloc] initWithController:self previous:downmixNode latency:8192.0 / 44100.0];
if(!visualizationNode) return NO;
[self setPreviousNode:visualizationNode];
DSPsLaunched = YES;
[self launchDSPs];
previousInput = nil;
}
return YES;
} }
- (void)seek:(double)time { - (void)seek:(double)time {
@ -104,61 +48,16 @@
[output resume]; [output resume];
} }
- (void)fadeOut {
[output fadeOut];
}
- (void)fadeOutBackground {
[output fadeOutBackground];
}
- (void)fadeIn {
[self reconnectInputAndReplumb];
[output fadeIn];
}
- (void)incrementAmountPlayed:(double)seconds { - (void)incrementAmountPlayed:(double)seconds {
amountPlayed += seconds; amountPlayed += seconds;
amountPlayedInterval += seconds;
if(!intervalReported && amountPlayedInterval >= 60.0) {
intervalReported = YES;
[controller reportPlayCount];
}
}
- (void)setAmountPlayed:(double)seconds {
double delta = seconds - amountPlayed;
if(delta > 0.0 && delta < 5.0) {
[self incrementAmountPlayed:delta];
} else if(delta) {
amountPlayed = seconds;
}
} }
- (void)resetAmountPlayed { - (void)resetAmountPlayed {
amountPlayed = 0; amountPlayed = 0;
} }
- (void)resetAmountPlayedInterval {
amountPlayedInterval = 0;
intervalReported = NO;
}
- (BOOL)selectNextBuffer {
BOOL ret = [controller selectNextBuffer];
if(!ret) {
[self reconnectInputAndReplumb];
}
return ret;
}
- (void)endOfInputPlayed { - (void)endOfInputPlayed {
if(!intervalReported) {
intervalReported = YES;
[controller reportPlayCount];
}
[controller endOfInputPlayed]; [controller endOfInputPlayed];
[self resetAmountPlayedInterval];
} }
- (BOOL)chainQueueHasTracks { - (BOOL)chainQueueHasTracks {
@ -169,78 +68,17 @@
return [buffer listDuration]; return [buffer listDuration];
} }
- (NSArray *)DSPs {
if(DSPsLaunched) {
return @[rubberbandNode, fsurroundNode, equalizerNode, hrtfNode, downmixNode, visualizationNode];
} else {
return @[];
}
}
- (BOOL)reconnectInput {
Node *finalNode = nil;
if(rubberbandNode) {
finalNode = [[controller bufferChain] finalNode];
if(finalNode) {
[rubberbandNode setPreviousNode:finalNode];
}
}
return !!finalNode;
}
- (void)reconnectInputAndReplumb {
Node *finalNode = nil;
if(DSPsLaunched) {
finalNode = [[controller bufferChain] finalNode];
if(finalNode) {
[rubberbandNode setPreviousNode:finalNode];
}
}
NSArray *DSPs = [self DSPs];
for (Node *node in DSPs) {
[node setEndOfStream:NO];
[node setShouldContinue:YES];
}
}
- (void)launchDSPs {
NSArray *DSPs = [self DSPs];
for (Node *node in DSPs) {
[node launchThread];
}
}
- (AudioChunk *)readChunk:(size_t)amount { - (AudioChunk *)readChunk:(size_t)amount {
@autoreleasepool { @autoreleasepool {
if([self reconnectInput]) { [self setPreviousNode:[[controller bufferChain] finalNode]];
AudioChunk *ret = [super readChunk:amount]; AudioChunk *ret = [super readChunk:amount];
if((!ret || ![ret frameCount]) && [previousNode endOfStream]) { /* if (n == 0) {
endOfStream = YES; DLog(@"Output Buffer dry!");
} }
*/
return ret; return ret;
} else {
return [[AudioChunk alloc] init];
}
}
}
- (BOOL)peekFormat:(nonnull AudioStreamBasicDescription *)format channelConfig:(nonnull uint32_t *)config {
@autoreleasepool {
if([self reconnectInput]) {
BOOL ret = [super peekFormat:format channelConfig:config];
if(!ret && [previousNode endOfStream]) {
endOfStream = YES;
}
return ret;
} else {
return NO;
}
} }
} }
@ -248,10 +86,6 @@
return amountPlayed; return amountPlayed;
} }
- (double)amountPlayedInterval {
return amountPlayedInterval;
}
- (AudioStreamBasicDescription)format { - (AudioStreamBasicDescription)format {
return format; return format;
} }
@ -260,59 +94,31 @@
return config; return config;
} }
- (AudioStreamBasicDescription)deviceFormat {
return [output deviceFormat];
}
- (uint32_t)deviceChannelConfig {
return [output deviceChannelConfig];
}
- (void)setFormat:(AudioStreamBasicDescription *)f channelConfig:(uint32_t)channelConfig { - (void)setFormat:(AudioStreamBasicDescription *)f channelConfig:(uint32_t)channelConfig {
if(!shouldContinue) return;
format = *f; format = *f;
config = channelConfig; config = channelConfig;
// Calculate a ratio and add to double(seconds) instead, as format may change // Calculate a ratio and add to double(seconds) instead, as format may change
// double oldSampleRatio = sampleRatio; // double oldSampleRatio = sampleRatio;
AudioPlayer *audioPlayer = controller; BufferChain *bufferChain = [controller bufferChain];
BufferChain *bufferChain = [audioPlayer bufferChain];
if(bufferChain) { if(bufferChain) {
ConverterNode *converter = [bufferChain converter]; ConverterNode *converter = [bufferChain converter];
AudioStreamBasicDescription outputFormat;
uint32_t outputChannelConfig;
BOOL formatChanged = NO;
if(converter) { if(converter) {
AudioStreamBasicDescription converterFormat = [converter nodeFormat]; // This clears the resampler buffer, but not the input buffer
if(memcmp(&converterFormat, &format, sizeof(converterFormat)) != 0) { // We also have to jump the play position ahead accounting for
formatChanged = YES; // the data we are flushing
} amountPlayed += [[converter buffer] listDuration];
}
if(downmixNode && output && !formatChanged) { AudioStreamBasicDescription inf = [bufferChain inputFormat];
outputFormat = [output deviceFormat]; uint32_t config = [bufferChain inputConfig];
outputChannelConfig = [output deviceChannelConfig];
AudioStreamBasicDescription currentOutputFormat = [downmixNode nodeFormat]; format.mChannelsPerFrame = inf.mChannelsPerFrame;
uint32_t currentOutputChannelConfig = [downmixNode nodeChannelConfig]; format.mBytesPerFrame = ((inf.mBitsPerChannel + 7) / 8) * format.mChannelsPerFrame;
if(memcmp(&currentOutputFormat, &outputFormat, sizeof(currentOutputFormat)) != 0 || format.mBytesPerPacket = format.mBytesPerFrame * format.mFramesPerPacket;
currentOutputChannelConfig != outputChannelConfig) { channelConfig = config;
formatChanged = YES;
} [converter setOutputFormat:format
} outputConfig:channelConfig];
if(formatChanged) { [converter inputFormatDidChange:[bufferChain inputFormat] inputConfig:[bufferChain inputConfig]];
InputNode *inputNode = [bufferChain inputNode];
if(converter) {
[converter setOutputFormat:format];
}
if(downmixNode && output) {
[downmixNode setOutputFormat:[output deviceFormat] withChannelConfig:[output deviceChannelConfig]];
}
if(inputNode) {
AudioStreamBasicDescription inputFormat = [inputNode nodeFormat];
if(converter) {
[converter inputFormatDidChange:inputFormat inputConfig:[inputNode nodeChannelConfig]];
}
[inputNode seek:(long)(amountPlayed * inputFormat.mSampleRate)];
}
} }
} }
} }
@ -320,24 +126,6 @@
- (void)close { - (void)close {
[output stop]; [output stop];
output = nil; output = nil;
if(DSPsLaunched) {
NSArray *DSPs = [self DSPs];
for(Node *node in DSPs) {
[node setShouldContinue:NO];
}
previousNode = nil;
visualizationNode = nil;
downmixNode = nil;
hrtfNode = nil;
fsurroundNode = nil;
rubberbandNode = nil;
previousInput = nil;
DSPsLaunched = NO;
}
}
- (double)volume {
return [output volume];
} }
- (void)setVolume:(double)v { - (void)setVolume:(double)v {
@ -347,22 +135,26 @@
- (void)setShouldContinue:(BOOL)s { - (void)setShouldContinue:(BOOL)s {
[super setShouldContinue:s]; [super setShouldContinue:s];
NSArray *DSPs = [self DSPs];
for(Node *node in DSPs) {
[node setShouldContinue:s];
}
// if (s == NO) // if (s == NO)
// [output stop]; // [output stop];
} }
- (void)setShouldPlayOutBuffer:(BOOL)s {
[output setShouldPlayOutBuffer:s];
}
- (BOOL)isPaused { - (BOOL)isPaused {
return paused; return paused;
} }
- (void)beginEqualizer:(AudioUnit)eq {
[controller beginEqualizer:eq];
}
- (void)refreshEqualizer:(AudioUnit)eq {
[controller refreshEqualizer:eq];
}
- (void)endEqualizer:(AudioUnit)eq {
[controller endEqualizer:eq];
}
- (void)sustainHDCD { - (void)sustainHDCD {
[output sustainHDCD]; [output sustainHDCD];
} }
@ -371,29 +163,4 @@
[controller restartPlaybackAtCurrentPosition]; [controller restartPlaybackAtCurrentPosition];
} }
- (double)latency {
double latency = 0.0;
NSArray *DSPs = [self DSPs];
for(Node *node in DSPs) {
latency += [node secondsBuffered];
}
return [output latency] + latency;
}
- (double)getVisLatency {
return [output latency] + [visualizationNode secondsBuffered];
}
- (double)getTotalLatency {
return [[controller bufferChain] secondsBuffered] + [self latency];
}
- (id)controller {
return controller;
}
- (id)downmix {
return downmixNode;
}
@end @end

View file

@ -1,35 +0,0 @@
//
// VisualizationNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/12/25.
//
#ifndef VisualizationNode_h
#define VisualizationNode_h
#import <CogAudio/Node.h>
@interface VisualizationNode : Node {
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (void)threadEntry:(id _Nullable)arg;
- (BOOL)setup;
- (void)cleanUp;
- (BOOL)paused;
- (void)resetBuffer;
- (void)setShouldContinue:(BOOL)s;
- (void)process;
- (double)secondsBuffered;
@end
#endif /* VisualizationNode_h */

View file

@ -1,273 +0,0 @@
//
// VisualizationNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/12/25.
//
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import <Accelerate/Accelerate.h>
#import "Downmix.h"
#import <CogAudio/VisualizationController.h>
#import "BufferChain.h"
#import "Logging.h"
#import "rsstate.h"
#import "VisualizationNode.h"
@implementation VisualizationNode {
void *rs;
double lastVisRate;
BOOL processEntered;
BOOL stopping;
BOOL paused;
BOOL threadTerminated;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription visFormat; // Mono format for vis
uint32_t inputChannelConfig;
uint32_t visChannelConfig;
size_t resamplerRemain;
DownmixProcessor *downmixer;
VisualizationController *visController;
float visAudio[512];
float resamplerInput[8192];
float visTemp[8192];
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super init];
if(self) {
buffer = [[ChunkList alloc] initWithMaximumDuration:latency];
writeSemaphore = [[Semaphore alloc] init];
readSemaphore = [[Semaphore alloc] init];
accessLock = [[NSLock alloc] init];
initialBufferFilled = NO;
controller = c;
endOfStream = NO;
shouldContinue = YES;
nodeChannelConfig = 0;
nodeLossless = NO;
durationPrebuffer = latency * 0.25;
visController = [VisualizationController sharedController];
inWrite = NO;
inPeek = NO;
inRead = NO;
inMerge = NO;
[self setPreviousNode:p];
}
return self;
}
- (void)dealloc {
DLog(@"Visualization node dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[super cleanUp];
}
// Visualization thread should be fairly high priority, too
- (void)threadEntry:(id _Nullable)arg {
@autoreleasepool {
NSThread *currentThread = [NSThread currentThread];
[currentThread setThreadPriority:0.75];
[currentThread setQualityOfService:NSQualityOfServiceUserInitiated];
threadTerminated = NO;
[self process];
threadTerminated = YES;
}
}
- (void)resetBuffer {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
[self fullShutdown];
paused = NO;
}
- (double)secondsBuffered {
return [buffer listDuration];
}
- (void)setShouldContinue:(BOOL)s {
BOOL currentShouldContinue = shouldContinue;
shouldContinue = s;
if(!currentShouldContinue && s && threadTerminated) {
[self launchThread];
}
}
- (BOOL)setup {
if(fabs(inputFormat.mSampleRate - 44100.0) > 1e-6) {
rs = rsstate_new(1, inputFormat.mSampleRate, 44100.0);
if(!rs) {
return NO;
}
resamplerRemain = 0;
}
visFormat = inputFormat;
visFormat.mChannelsPerFrame = 1;
visFormat.mBytesPerFrame = sizeof(float);
visFormat.mBytesPerPacket = visFormat.mBytesPerFrame * visFormat.mFramesPerPacket;
visChannelConfig = AudioChannelFrontCenter;
downmixer = [[DownmixProcessor alloc] initWithInputFormat:inputFormat inputConfig:inputChannelConfig andOutputFormat:visFormat outputConfig:visChannelConfig];
if(!downmixer) {
return NO;
}
return YES;
}
- (void)cleanUp {
stopping = YES;
while(processEntered) {
usleep(500);
}
[self fullShutdown];
}
- (void)fullShutdown {
if(rs) {
rsstate_delete(rs);
rs = NULL;
}
downmixer = nil;
}
- (BOOL)paused {
return paused;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self readAndMergeChunksAsFloat32:512];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
usleep(500);
endOfStream = YES;
continue;
}
} else {
[self processVis:[chunk copy]];
[self writeChunk:chunk];
chunk = nil;
}
}
}
endOfStream = YES;
}
- (void)postVisPCM:(const float *)visTemp amount:(size_t)samples {
[visController postVisPCM:visTemp amount:(int)samples];
}
- (void)processVis:(AudioChunk *)chunk {
processEntered = YES;
if(paused) {
processEntered = NO;
return;
}
AudioStreamBasicDescription format = [chunk format];
uint32_t channelConfig = [chunk channelConfig];
[visController postSampleRate:44100.0];
if(!rs || !downmixer ||
memcmp(&format, &inputFormat, sizeof(format)) != 0 ||
channelConfig != inputChannelConfig) {
if(rs) {
while(!stopping) {
int samplesFlushed;
samplesFlushed = (int)rsstate_flush(rs, &visTemp[0], 8192);
if(samplesFlushed > 1) {
[self postVisPCM:visTemp amount:samplesFlushed];
} else {
break;
}
}
}
[self fullShutdown];
inputFormat = format;
inputChannelConfig = channelConfig;
if(![self setup]) {
processEntered = NO;
return;
}
}
size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount];
[downmixer process:[sampleData bytes] frameCount:frameCount output:&visAudio[0]];
if(rs) {
int samplesProcessed;
size_t totalDone = 0;
size_t inDone = 0;
size_t visFrameCount = frameCount;
do {
if(stopping) {
break;
}
int visTodo = (int)MIN(visFrameCount, resamplerRemain + visFrameCount - 8192);
if(visTodo) {
cblas_scopy(visTodo, &visAudio[0], 1, &resamplerInput[resamplerRemain], 1);
}
visTodo += resamplerRemain;
resamplerRemain = 0;
samplesProcessed = (int)rsstate_resample(rs, &resamplerInput[0], visTodo, &inDone, &visTemp[0], 8192);
resamplerRemain = (int)(visTodo - inDone);
if(resamplerRemain && inDone) {
memmove(&resamplerInput[0], &resamplerInput[inDone], resamplerRemain * sizeof(float));
}
if(samplesProcessed) {
[self postVisPCM:&visTemp[0] amount:samplesProcessed];
}
totalDone += inDone;
visFrameCount -= inDone;
} while(samplesProcessed && visFrameCount);
} else {
[self postVisPCM:&visAudio[0] amount:frameCount];
}
processEntered = NO;
}
@end

View file

@ -1 +0,0 @@
#import "ThirdParty/deadbeef/fft.h"

View file

@ -3,7 +3,7 @@
archiveVersion = 1; archiveVersion = 1;
classes = { classes = {
}; };
objectVersion = 54; objectVersion = 46;
objects = { objects = {
/* Begin PBXBuildFile section */ /* Begin PBXBuildFile section */
@ -25,9 +25,11 @@
17D21CA80B8BE4BA00D1EBDE /* Node.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21C7D0B8BE4BA00D1EBDE /* Node.m */; }; 17D21CA80B8BE4BA00D1EBDE /* Node.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21C7D0B8BE4BA00D1EBDE /* Node.m */; };
17D21CA90B8BE4BA00D1EBDE /* OutputNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21C7E0B8BE4BA00D1EBDE /* OutputNode.h */; settings = {ATTRIBUTES = (Public, ); }; }; 17D21CA90B8BE4BA00D1EBDE /* OutputNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21C7E0B8BE4BA00D1EBDE /* OutputNode.h */; settings = {ATTRIBUTES = (Public, ); }; };
17D21CAA0B8BE4BA00D1EBDE /* OutputNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21C7F0B8BE4BA00D1EBDE /* OutputNode.m */; }; 17D21CAA0B8BE4BA00D1EBDE /* OutputNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21C7F0B8BE4BA00D1EBDE /* OutputNode.m */; };
17D21CC50B8BE4BA00D1EBDE /* OutputCoreAudio.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21C9C0B8BE4BA00D1EBDE /* OutputCoreAudio.h */; settings = {ATTRIBUTES = (Public, ); }; };
17D21CC60B8BE4BA00D1EBDE /* OutputCoreAudio.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21C9D0B8BE4BA00D1EBDE /* OutputCoreAudio.m */; };
17D21CC70B8BE4BA00D1EBDE /* Status.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21C9E0B8BE4BA00D1EBDE /* Status.h */; settings = {ATTRIBUTES = (Public, ); }; }; 17D21CC70B8BE4BA00D1EBDE /* Status.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21C9E0B8BE4BA00D1EBDE /* Status.h */; settings = {ATTRIBUTES = (Public, ); }; };
17D21CF30B8BE5EF00D1EBDE /* CogSemaphore.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21CF10B8BE5EF00D1EBDE /* CogSemaphore.h */; settings = {ATTRIBUTES = (Public, ); }; }; 17D21CF30B8BE5EF00D1EBDE /* Semaphore.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21CF10B8BE5EF00D1EBDE /* Semaphore.h */; settings = {ATTRIBUTES = (Public, ); }; };
17D21CF40B8BE5EF00D1EBDE /* CogSemaphore.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21CF20B8BE5EF00D1EBDE /* CogSemaphore.m */; }; 17D21CF40B8BE5EF00D1EBDE /* Semaphore.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21CF20B8BE5EF00D1EBDE /* Semaphore.m */; };
17D21DAD0B8BE76800D1EBDE /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 17D21DA90B8BE76800D1EBDE /* AudioToolbox.framework */; }; 17D21DAD0B8BE76800D1EBDE /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 17D21DA90B8BE76800D1EBDE /* AudioToolbox.framework */; };
17D21DAE0B8BE76800D1EBDE /* AudioUnit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 17D21DAA0B8BE76800D1EBDE /* AudioUnit.framework */; }; 17D21DAE0B8BE76800D1EBDE /* AudioUnit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 17D21DAA0B8BE76800D1EBDE /* AudioUnit.framework */; };
17D21DAF0B8BE76800D1EBDE /* CoreAudio.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 17D21DAB0B8BE76800D1EBDE /* CoreAudio.framework */; }; 17D21DAF0B8BE76800D1EBDE /* CoreAudio.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 17D21DAB0B8BE76800D1EBDE /* CoreAudio.framework */; };
@ -39,88 +41,88 @@
17F94DD50B8D0F7000A34E87 /* PluginController.h in Headers */ = {isa = PBXBuildFile; fileRef = 17F94DD30B8D0F7000A34E87 /* PluginController.h */; settings = {ATTRIBUTES = (Public, ); }; }; 17F94DD50B8D0F7000A34E87 /* PluginController.h in Headers */ = {isa = PBXBuildFile; fileRef = 17F94DD30B8D0F7000A34E87 /* PluginController.h */; settings = {ATTRIBUTES = (Public, ); }; };
17F94DD60B8D0F7000A34E87 /* PluginController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 17F94DD40B8D0F7000A34E87 /* PluginController.mm */; }; 17F94DD60B8D0F7000A34E87 /* PluginController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 17F94DD40B8D0F7000A34E87 /* PluginController.mm */; };
17F94DDD0B8D101100A34E87 /* Plugin.h in Headers */ = {isa = PBXBuildFile; fileRef = 17F94DDC0B8D101100A34E87 /* Plugin.h */; settings = {ATTRIBUTES = (Public, ); }; }; 17F94DDD0B8D101100A34E87 /* Plugin.h in Headers */ = {isa = PBXBuildFile; fileRef = 17F94DDC0B8D101100A34E87 /* Plugin.h */; settings = {ATTRIBUTES = (Public, ); }; };
831A50142865A7FD0049CFE4 /* rsstate.hpp in Headers */ = {isa = PBXBuildFile; fileRef = 831A50132865A7FD0049CFE4 /* rsstate.hpp */; };
831A50162865A8800049CFE4 /* rsstate.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 831A50152865A8800049CFE4 /* rsstate.cpp */; };
831A50182865A8B30049CFE4 /* rsstate.h in Headers */ = {isa = PBXBuildFile; fileRef = 831A50172865A8B30049CFE4 /* rsstate.h */; };
8328995327CB511000D7F028 /* RedundantPlaylistDataStore.m in Sources */ = {isa = PBXBuildFile; fileRef = 8328995127CB510F00D7F028 /* RedundantPlaylistDataStore.m */; }; 8328995327CB511000D7F028 /* RedundantPlaylistDataStore.m in Sources */ = {isa = PBXBuildFile; fileRef = 8328995127CB510F00D7F028 /* RedundantPlaylistDataStore.m */; };
8328995427CB511000D7F028 /* RedundantPlaylistDataStore.h in Headers */ = {isa = PBXBuildFile; fileRef = 8328995227CB511000D7F028 /* RedundantPlaylistDataStore.h */; }; 8328995427CB511000D7F028 /* RedundantPlaylistDataStore.h in Headers */ = {isa = PBXBuildFile; fileRef = 8328995227CB511000D7F028 /* RedundantPlaylistDataStore.h */; };
8328995727CB51B700D7F028 /* SHA256Digest.h in Headers */ = {isa = PBXBuildFile; fileRef = 8328995527CB51B700D7F028 /* SHA256Digest.h */; }; 8328995727CB51B700D7F028 /* SHA256Digest.h in Headers */ = {isa = PBXBuildFile; fileRef = 8328995527CB51B700D7F028 /* SHA256Digest.h */; };
8328995827CB51B700D7F028 /* SHA256Digest.m in Sources */ = {isa = PBXBuildFile; fileRef = 8328995627CB51B700D7F028 /* SHA256Digest.m */; }; 8328995827CB51B700D7F028 /* SHA256Digest.m in Sources */ = {isa = PBXBuildFile; fileRef = 8328995627CB51B700D7F028 /* SHA256Digest.m */; };
8328995A27CB51C900D7F028 /* Security.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8328995927CB51C900D7F028 /* Security.framework */; }; 8328995A27CB51C900D7F028 /* Security.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8328995927CB51C900D7F028 /* Security.framework */; };
833442422D6EFA6700C51D38 /* VisualizationController.h in Headers */ = {isa = PBXBuildFile; fileRef = 833442402D6EFA6700C51D38 /* VisualizationController.h */; settings = {ATTRIBUTES = (Public, ); }; };
833442432D6EFA6700C51D38 /* VisualizationController.m in Sources */ = {isa = PBXBuildFile; fileRef = 833442412D6EFA6700C51D38 /* VisualizationController.m */; };
833738EA2D5EA52500278628 /* DSPDownmixNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 833738E92D5EA52500278628 /* DSPDownmixNode.h */; settings = {ATTRIBUTES = (Public, ); }; };
833738EC2D5EA53500278628 /* DSPDownmixNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 833738EB2D5EA53500278628 /* DSPDownmixNode.m */; };
833738EF2D5EA5B700278628 /* Downmix.m in Sources */ = {isa = PBXBuildFile; fileRef = 833738EE2D5EA5B700278628 /* Downmix.m */; };
833738F02D5EA5B700278628 /* Downmix.h in Headers */ = {isa = PBXBuildFile; fileRef = 833738ED2D5EA5B700278628 /* Downmix.h */; settings = {ATTRIBUTES = (Public, ); }; };
8347C7412796C58800FA8A7D /* NSFileHandle+CreateFile.h in Headers */ = {isa = PBXBuildFile; fileRef = 8347C73F2796C58800FA8A7D /* NSFileHandle+CreateFile.h */; }; 8347C7412796C58800FA8A7D /* NSFileHandle+CreateFile.h in Headers */ = {isa = PBXBuildFile; fileRef = 8347C73F2796C58800FA8A7D /* NSFileHandle+CreateFile.h */; };
8347C7422796C58800FA8A7D /* NSFileHandle+CreateFile.m in Sources */ = {isa = PBXBuildFile; fileRef = 8347C7402796C58800FA8A7D /* NSFileHandle+CreateFile.m */; }; 8347C7422796C58800FA8A7D /* NSFileHandle+CreateFile.m in Sources */ = {isa = PBXBuildFile; fileRef = 8347C7402796C58800FA8A7D /* NSFileHandle+CreateFile.m */; };
834A41A9287A90AB00EB9D9B /* freesurround_decoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 834A41A5287A90AB00EB9D9B /* freesurround_decoder.h */; }; 834FD4EB27AF8F380063BC83 /* AudioChunk.h in Headers */ = {isa = PBXBuildFile; fileRef = 834FD4EA27AF8F380063BC83 /* AudioChunk.h */; };
834A41AA287A90AB00EB9D9B /* freesurround_decoder.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 834A41A6287A90AB00EB9D9B /* freesurround_decoder.cpp */; };
834A41AB287A90AB00EB9D9B /* channelmaps.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 834A41A7287A90AB00EB9D9B /* channelmaps.cpp */; };
834A41AC287A90AB00EB9D9B /* channelmaps.h in Headers */ = {isa = PBXBuildFile; fileRef = 834A41A8287A90AB00EB9D9B /* channelmaps.h */; };
834FD4EB27AF8F380063BC83 /* AudioChunk.h in Headers */ = {isa = PBXBuildFile; fileRef = 834FD4EA27AF8F380063BC83 /* AudioChunk.h */; settings = {ATTRIBUTES = (Public, ); }; };
834FD4ED27AF91220063BC83 /* AudioChunk.m in Sources */ = {isa = PBXBuildFile; fileRef = 834FD4EC27AF91220063BC83 /* AudioChunk.m */; }; 834FD4ED27AF91220063BC83 /* AudioChunk.m in Sources */ = {isa = PBXBuildFile; fileRef = 834FD4EC27AF91220063BC83 /* AudioChunk.m */; };
834FD4F027AF93680063BC83 /* ChunkList.h in Headers */ = {isa = PBXBuildFile; fileRef = 834FD4EE27AF93680063BC83 /* ChunkList.h */; settings = {ATTRIBUTES = (Public, ); }; }; 834FD4F027AF93680063BC83 /* ChunkList.h in Headers */ = {isa = PBXBuildFile; fileRef = 834FD4EE27AF93680063BC83 /* ChunkList.h */; };
834FD4F127AF93680063BC83 /* ChunkList.m in Sources */ = {isa = PBXBuildFile; fileRef = 834FD4EF27AF93680063BC83 /* ChunkList.m */; }; 834FD4F127AF93680063BC83 /* ChunkList.m in Sources */ = {isa = PBXBuildFile; fileRef = 834FD4EF27AF93680063BC83 /* ChunkList.m */; };
8350416D28646149006B32CC /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8350416C28646149006B32CC /* CoreMedia.framework */; }; 834FD4F427AFA2150063BC83 /* Downmix.h in Headers */ = {isa = PBXBuildFile; fileRef = 834FD4F227AFA2150063BC83 /* Downmix.h */; };
834FD4F527AFA2150063BC83 /* Downmix.m in Sources */ = {isa = PBXBuildFile; fileRef = 834FD4F327AFA2150063BC83 /* Downmix.m */; };
835C88A82797D4D400E28EAE /* LICENSE.LGPL in Resources */ = {isa = PBXBuildFile; fileRef = 835C88A42797D4D400E28EAE /* LICENSE.LGPL */; };
835C88A92797D4D400E28EAE /* License.txt in Resources */ = {isa = PBXBuildFile; fileRef = 835C88A52797D4D400E28EAE /* License.txt */; };
835C88AA2797D4D400E28EAE /* lpc.c in Sources */ = {isa = PBXBuildFile; fileRef = 835C88A62797D4D400E28EAE /* lpc.c */; };
835C88AB2797D4D400E28EAE /* lpc.h in Headers */ = {isa = PBXBuildFile; fileRef = 835C88A72797D4D400E28EAE /* lpc.h */; };
835C88AD2797DA5800E28EAE /* util.h in Headers */ = {isa = PBXBuildFile; fileRef = 835C88AC2797DA5800E28EAE /* util.h */; };
835C88B1279811A500E28EAE /* hdcd_decode2.h in Headers */ = {isa = PBXBuildFile; fileRef = 835C88AF279811A500E28EAE /* hdcd_decode2.h */; }; 835C88B1279811A500E28EAE /* hdcd_decode2.h in Headers */ = {isa = PBXBuildFile; fileRef = 835C88AF279811A500E28EAE /* hdcd_decode2.h */; };
835C88B2279811A500E28EAE /* hdcd_decode2.c in Sources */ = {isa = PBXBuildFile; fileRef = 835C88B0279811A500E28EAE /* hdcd_decode2.c */; }; 835C88B2279811A500E28EAE /* hdcd_decode2.c in Sources */ = {isa = PBXBuildFile; fileRef = 835C88B0279811A500E28EAE /* hdcd_decode2.c */; };
835DD2672ACAF1D90057E319 /* OutputCoreAudio.m in Sources */ = {isa = PBXBuildFile; fileRef = 835DD2652ACAF1D90057E319 /* OutputCoreAudio.m */; }; 835EDD7B279FE23A001EDCCE /* HeadphoneFilter.mm in Sources */ = {isa = PBXBuildFile; fileRef = 835EDD7A279FE23A001EDCCE /* HeadphoneFilter.mm */; };
835DD2682ACAF1D90057E319 /* OutputCoreAudio.h in Headers */ = {isa = PBXBuildFile; fileRef = 835DD2662ACAF1D90057E319 /* OutputCoreAudio.h */; settings = {ATTRIBUTES = (Public, ); }; }; 835EDD7D279FE307001EDCCE /* HeadphoneFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 835EDD7C279FE307001EDCCE /* HeadphoneFilter.h */; };
835DD2722ACAF5AD0057E319 /* lpc.h in Headers */ = {isa = PBXBuildFile; fileRef = 835DD26D2ACAF5AD0057E319 /* lpc.h */; };
835DD2732ACAF5AD0057E319 /* util.h in Headers */ = {isa = PBXBuildFile; fileRef = 835DD26E2ACAF5AD0057E319 /* util.h */; };
835DD2742ACAF5AD0057E319 /* lpc.c in Sources */ = {isa = PBXBuildFile; fileRef = 835DD26F2ACAF5AD0057E319 /* lpc.c */; };
835FAC5E27BCA14D00BA8562 /* BadSampleCleaner.h in Headers */ = {isa = PBXBuildFile; fileRef = 835FAC5C27BCA14D00BA8562 /* BadSampleCleaner.h */; }; 835FAC5E27BCA14D00BA8562 /* BadSampleCleaner.h in Headers */ = {isa = PBXBuildFile; fileRef = 835FAC5C27BCA14D00BA8562 /* BadSampleCleaner.h */; };
835FAC5F27BCA14D00BA8562 /* BadSampleCleaner.m in Sources */ = {isa = PBXBuildFile; fileRef = 835FAC5D27BCA14D00BA8562 /* BadSampleCleaner.m */; }; 835FAC5F27BCA14D00BA8562 /* BadSampleCleaner.m in Sources */ = {isa = PBXBuildFile; fileRef = 835FAC5D27BCA14D00BA8562 /* BadSampleCleaner.m */; };
83725A9027AA16C90003F694 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 83725A7B27AA0D8A0003F694 /* Accelerate.framework */; }; 83725A9027AA16C90003F694 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 83725A7B27AA0D8A0003F694 /* Accelerate.framework */; };
83725A9127AA16D50003F694 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 83725A7C27AA0D8E0003F694 /* AVFoundation.framework */; }; 83725A9127AA16D50003F694 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 83725A7C27AA0D8E0003F694 /* AVFoundation.framework */; };
8377C64C27B8C51500E8BC0F /* fft_accelerate.c in Sources */ = {isa = PBXBuildFile; fileRef = 8377C64B27B8C51500E8BC0F /* fft_accelerate.c */; }; 8377C65227B8CAD100E8BC0F /* VisualizationController.h in Headers */ = {isa = PBXBuildFile; fileRef = 8377C65027B8CAD100E8BC0F /* VisualizationController.h */; };
8377C64E27B8C54400E8BC0F /* fft.h in Headers */ = {isa = PBXBuildFile; fileRef = 8377C64D27B8C54400E8BC0F /* fft.h */; }; 8377C65327B8CAD100E8BC0F /* VisualizationController.m in Sources */ = {isa = PBXBuildFile; fileRef = 8377C65127B8CAD100E8BC0F /* VisualizationController.m */; };
8384912718080FF100E7332D /* Logging.h in Headers */ = {isa = PBXBuildFile; fileRef = 8384912618080FF100E7332D /* Logging.h */; }; 8384912718080FF100E7332D /* Logging.h in Headers */ = {isa = PBXBuildFile; fileRef = 8384912618080FF100E7332D /* Logging.h */; };
838A33722D06A97D00D0D770 /* librubberband.3.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 838A33712D06A97D00D0D770 /* librubberband.3.dylib */; };
839065F32853338700636FBB /* dsd2float.h in Headers */ = {isa = PBXBuildFile; fileRef = 839065F22853338700636FBB /* dsd2float.h */; };
839366671815923C006DD712 /* CogPluginMulti.h in Headers */ = {isa = PBXBuildFile; fileRef = 839366651815923C006DD712 /* CogPluginMulti.h */; }; 839366671815923C006DD712 /* CogPluginMulti.h in Headers */ = {isa = PBXBuildFile; fileRef = 839366651815923C006DD712 /* CogPluginMulti.h */; };
839366681815923C006DD712 /* CogPluginMulti.m in Sources */ = {isa = PBXBuildFile; fileRef = 839366661815923C006DD712 /* CogPluginMulti.m */; }; 839366681815923C006DD712 /* CogPluginMulti.m in Sources */ = {isa = PBXBuildFile; fileRef = 839366661815923C006DD712 /* CogPluginMulti.m */; };
8399CF2C27B5D1D5008751F1 /* NSDictionary+Merge.h in Headers */ = {isa = PBXBuildFile; fileRef = 8399CF2A27B5D1D4008751F1 /* NSDictionary+Merge.h */; }; 8399CF2C27B5D1D5008751F1 /* NSDictionary+Merge.h in Headers */ = {isa = PBXBuildFile; fileRef = 8399CF2A27B5D1D4008751F1 /* NSDictionary+Merge.h */; };
8399CF2D27B5D1D5008751F1 /* NSDictionary+Merge.m in Sources */ = {isa = PBXBuildFile; fileRef = 8399CF2B27B5D1D4008751F1 /* NSDictionary+Merge.m */; }; 8399CF2D27B5D1D5008751F1 /* NSDictionary+Merge.m in Sources */ = {isa = PBXBuildFile; fileRef = 8399CF2B27B5D1D4008751F1 /* NSDictionary+Merge.m */; };
839E56E52879450300DFB5F4 /* HrtfData.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E12879450300DFB5F4 /* HrtfData.h */; }; 83D44DC02839C60A00D4DD10 /* cqt.h in Headers */ = {isa = PBXBuildFile; fileRef = 83D44DBE2839C60A00D4DD10 /* cqt.h */; };
839E56E62879450300DFB5F4 /* Endianness.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E22879450300DFB5F4 /* Endianness.h */; }; 83D44DC12839C60A00D4DD10 /* cqt.c in Sources */ = {isa = PBXBuildFile; fileRef = 83D44DBF2839C60A00D4DD10 /* cqt.c */; };
839E56E72879450300DFB5F4 /* HrtfData.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 839E56E32879450300DFB5F4 /* HrtfData.cpp */; }; 83F18B1E27D1E8EF00385946 /* CDSPHBDownsampler.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18ADF27D1E8EF00385946 /* CDSPHBDownsampler.h */; };
839E56E82879450300DFB5F4 /* IHrtfData.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E42879450300DFB5F4 /* IHrtfData.h */; }; 83F18B1F27D1E8EF00385946 /* pffft_double.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18AE127D1E8EF00385946 /* pffft_double.h */; };
839E56EA28794F6300DFB5F4 /* HrtfTypes.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E928794F6300DFB5F4 /* HrtfTypes.h */; }; 83F18B2027D1E8EF00385946 /* pf_neon_double_from_avx.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18AE327D1E8EF00385946 /* pf_neon_double_from_avx.h */; };
839E56F7287974A100DFB5F4 /* SandboxBroker.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56F6287974A100DFB5F4 /* SandboxBroker.h */; }; 83F18B2127D1E8EF00385946 /* pf_double.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18AE427D1E8EF00385946 /* pf_double.h */; };
839E899E2D5DB9D500A13526 /* VisualizationNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E899D2D5DB9D500A13526 /* VisualizationNode.h */; settings = {ATTRIBUTES = (Public, ); }; }; 83F18B2227D1E8EF00385946 /* pf_neon_double.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18AE527D1E8EF00385946 /* pf_neon_double.h */; };
839E89A02D5DBA1700A13526 /* VisualizationNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 839E899F2D5DBA1700A13526 /* VisualizationNode.m */; }; 83F18B2327D1E8EF00385946 /* pf_sse2_double.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18AE627D1E8EF00385946 /* pf_sse2_double.h */; };
83A3496A2D5C3F430096D530 /* DSPRubberbandNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83A349682D5C3F430096D530 /* DSPRubberbandNode.m */; }; 83F18B2427D1E8EF00385946 /* pf_avx_double.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18AE727D1E8EF00385946 /* pf_avx_double.h */; };
83A3496B2D5C3F430096D530 /* DSPRubberbandNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A349672D5C3F430096D530 /* DSPRubberbandNode.h */; settings = {ATTRIBUTES = (Public, ); }; }; 83F18B2527D1E8EF00385946 /* pf_scalar_double.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18AE827D1E8EF00385946 /* pf_scalar_double.h */; };
83A3496D2D5C40490096D530 /* DSPFSurroundNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A3496C2D5C40490096D530 /* DSPFSurroundNode.h */; settings = {ATTRIBUTES = (Public, ); }; }; 83F18B2627D1E8EF00385946 /* pffft_priv_impl.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18AE927D1E8EF00385946 /* pffft_priv_impl.h */; };
83A3496F2D5C405E0096D530 /* DSPFSurroundNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83A3496E2D5C405E0096D530 /* DSPFSurroundNode.m */; }; 83F18B2727D1E8EF00385946 /* pffft_double.c in Sources */ = {isa = PBXBuildFile; fileRef = 83F18AEA27D1E8EF00385946 /* pffft_double.c */; };
83A349722D5C41810096D530 /* FSurroundFilter.mm in Sources */ = {isa = PBXBuildFile; fileRef = 83A349712D5C41810096D530 /* FSurroundFilter.mm */; }; 83F18B3327D1E8EF00385946 /* CDSPSincFilterGen.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18AF827D1E8EF00385946 /* CDSPSincFilterGen.h */; };
83A349732D5C41810096D530 /* FSurroundFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A349702D5C41810096D530 /* FSurroundFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; 83F18B3427D1E8EF00385946 /* r8butil.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18AF927D1E8EF00385946 /* r8butil.h */; };
83A349752D5C50A10096D530 /* DSPHRTFNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A349742D5C50A10096D530 /* DSPHRTFNode.h */; settings = {ATTRIBUTES = (Public, ); }; }; 83F18B3627D1E8EF00385946 /* LICENSE in Resources */ = {isa = PBXBuildFile; fileRef = 83F18AFB27D1E8EF00385946 /* LICENSE */; };
83A349772D5C50B20096D530 /* DSPHRTFNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83A349762D5C50B20096D530 /* DSPHRTFNode.m */; }; 83F18B3727D1E8EF00385946 /* r8bbase.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18AFC27D1E8EF00385946 /* r8bbase.h */; };
83B74281289E027F005AAC28 /* CogAudio-Bridging-Header.h in Headers */ = {isa = PBXBuildFile; fileRef = 83B74280289E027F005AAC28 /* CogAudio-Bridging-Header.h */; }; 83F18B3827D1E8EF00385946 /* CDSPFIRFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18AFD27D1E8EF00385946 /* CDSPFIRFilter.h */; };
83CB56652E06464D003DD379 /* NSDictionary+Optional.h in Headers */ = {isa = PBXBuildFile; fileRef = 83CB56632E06464D003DD379 /* NSDictionary+Optional.h */; }; 83F18B4227D1E8EF00385946 /* CDSPProcessor.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18B0827D1E8EF00385946 /* CDSPProcessor.h */; };
83CB56662E06464D003DD379 /* NSDictionary+Optional.m in Sources */ = {isa = PBXBuildFile; fileRef = 83CB56642E06464D003DD379 /* NSDictionary+Optional.m */; }; 83F18B4327D1E8EF00385946 /* README.md in Resources */ = {isa = PBXBuildFile; fileRef = 83F18B0927D1E8EF00385946 /* README.md */; };
83F843202D5C6272008C123B /* HeadphoneFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F8431E2D5C6272008C123B /* HeadphoneFilter.h */; settings = {ATTRIBUTES = (Public, ); }; }; 83F18B4427D1E8EF00385946 /* fft4g.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18B0A27D1E8EF00385946 /* fft4g.h */; };
83F843212D5C6272008C123B /* HeadphoneFilter.mm in Sources */ = {isa = PBXBuildFile; fileRef = 83F8431F2D5C6272008C123B /* HeadphoneFilter.mm */; }; 83F18B4527D1E8EF00385946 /* CDSPRealFFT.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18B0B27D1E8EF00385946 /* CDSPRealFFT.h */; };
83F843232D5C66DA008C123B /* DSPEqualizerNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F843222D5C66DA008C123B /* DSPEqualizerNode.h */; settings = {ATTRIBUTES = (Public, ); }; }; 83F18B4627D1E8EF00385946 /* CDSPFracInterpolator.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18B0C27D1E8EF00385946 /* CDSPFracInterpolator.h */; };
83F843252D5C66E9008C123B /* DSPEqualizerNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83F843242D5C66E9008C123B /* DSPEqualizerNode.m */; }; 83F18B4E27D1E8F000385946 /* CDSPBlockConvolver.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18B1727D1E8EF00385946 /* CDSPBlockConvolver.h */; };
83F9FFF62D6EC43900026576 /* soxr.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F9FFF02D6EC43900026576 /* soxr.h */; settings = {ATTRIBUTES = (Public, ); }; }; 83F18B4F27D1E8F000385946 /* CDSPHBUpsampler.inc in Sources */ = {isa = PBXBuildFile; fileRef = 83F18B1827D1E8EF00385946 /* CDSPHBUpsampler.inc */; };
83F9FFF82D6EC43900026576 /* libsoxr.0.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 83F9FFF22D6EC43900026576 /* libsoxr.0.dylib */; }; 83F18B5027D1E8F000385946 /* r8bconf.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18B1927D1E8EF00385946 /* r8bconf.h */; };
83FFED512D5B08BC0044CCAF /* DSPNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83FFED502D5B08BC0044CCAF /* DSPNode.h */; settings = {ATTRIBUTES = (Public, ); }; }; 83F18B5127D1E8F000385946 /* r8bbase.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 83F18B1A27D1E8EF00385946 /* r8bbase.cpp */; };
83FFED532D5B09320044CCAF /* DSPNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83FFED522D5B09320044CCAF /* DSPNode.m */; }; 83F18B5227D1E8F000385946 /* pffft.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18B1B27D1E8EF00385946 /* pffft.h */; };
83F18B5327D1E8F000385946 /* CDSPResampler.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18B1C27D1E8EF00385946 /* CDSPResampler.h */; };
83F18B5427D1E8F000385946 /* CDSPHBUpsampler.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18B1D27D1E8EF00385946 /* CDSPHBUpsampler.h */; };
83F18B5627D1F5E900385946 /* r8bstate.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F18B5527D1F5E900385946 /* r8bstate.h */; };
8DC2EF570486A6940098B216 /* Cocoa.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1058C7B1FEA5585E11CA2CBB /* Cocoa.framework */; }; 8DC2EF570486A6940098B216 /* Cocoa.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1058C7B1FEA5585E11CA2CBB /* Cocoa.framework */; };
8E8D3D2F0CBAEE6E00135C1B /* AudioContainer.h in Headers */ = {isa = PBXBuildFile; fileRef = 8E8D3D2D0CBAEE6E00135C1B /* AudioContainer.h */; settings = {ATTRIBUTES = (Public, ); }; }; 8E8D3D2F0CBAEE6E00135C1B /* AudioContainer.h in Headers */ = {isa = PBXBuildFile; fileRef = 8E8D3D2D0CBAEE6E00135C1B /* AudioContainer.h */; settings = {ATTRIBUTES = (Public, ); }; };
8E8D3D300CBAEE6E00135C1B /* AudioContainer.m in Sources */ = {isa = PBXBuildFile; fileRef = 8E8D3D2E0CBAEE6E00135C1B /* AudioContainer.m */; }; 8E8D3D300CBAEE6E00135C1B /* AudioContainer.m in Sources */ = {isa = PBXBuildFile; fileRef = 8E8D3D2E0CBAEE6E00135C1B /* AudioContainer.m */; };
8EC1225F0B993BD500C5B3AD /* ConverterNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 8EC1225D0B993BD500C5B3AD /* ConverterNode.h */; settings = {ATTRIBUTES = (Public, ); }; }; 8EC1225F0B993BD500C5B3AD /* ConverterNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 8EC1225D0B993BD500C5B3AD /* ConverterNode.h */; };
8EC122600B993BD500C5B3AD /* ConverterNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 8EC1225E0B993BD500C5B3AD /* ConverterNode.m */; }; 8EC122600B993BD500C5B3AD /* ConverterNode.mm in Sources */ = {isa = PBXBuildFile; fileRef = 8EC1225E0B993BD500C5B3AD /* ConverterNode.mm */; };
B0575F2D0D687A0800411D77 /* Helper.h in Headers */ = {isa = PBXBuildFile; fileRef = B0575F2C0D687A0800411D77 /* Helper.h */; settings = {ATTRIBUTES = (Public, ); }; }; B0575F2D0D687A0800411D77 /* Helper.h in Headers */ = {isa = PBXBuildFile; fileRef = B0575F2C0D687A0800411D77 /* Helper.h */; settings = {ATTRIBUTES = (Public, ); }; };
B0575F300D687A4000411D77 /* Helper.m in Sources */ = {isa = PBXBuildFile; fileRef = B0575F2F0D687A4000411D77 /* Helper.m */; }; B0575F300D687A4000411D77 /* Helper.m in Sources */ = {isa = PBXBuildFile; fileRef = B0575F2F0D687A4000411D77 /* Helper.m */; };
/* End PBXBuildFile section */ /* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */ /* Begin PBXCopyFilesBuildPhase section */
17D21D2B0B8BE6A200D1EBDE /* CopyFiles */ = {
isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647;
dstPath = "";
dstSubfolderSpec = 10;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
83725A8D27AA0DDB0003F694 /* CopyFiles */ = { 83725A8D27AA0DDB0003F694 /* CopyFiles */ = {
isa = PBXCopyFilesBuildPhase; isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
@ -154,9 +156,11 @@
17D21C7D0B8BE4BA00D1EBDE /* Node.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = Node.m; sourceTree = "<group>"; }; 17D21C7D0B8BE4BA00D1EBDE /* Node.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = Node.m; sourceTree = "<group>"; };
17D21C7E0B8BE4BA00D1EBDE /* OutputNode.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = OutputNode.h; sourceTree = "<group>"; }; 17D21C7E0B8BE4BA00D1EBDE /* OutputNode.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = OutputNode.h; sourceTree = "<group>"; };
17D21C7F0B8BE4BA00D1EBDE /* OutputNode.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = OutputNode.m; sourceTree = "<group>"; }; 17D21C7F0B8BE4BA00D1EBDE /* OutputNode.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = OutputNode.m; sourceTree = "<group>"; };
17D21C9C0B8BE4BA00D1EBDE /* OutputCoreAudio.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = OutputCoreAudio.h; sourceTree = "<group>"; };
17D21C9D0B8BE4BA00D1EBDE /* OutputCoreAudio.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = OutputCoreAudio.m; sourceTree = "<group>"; };
17D21C9E0B8BE4BA00D1EBDE /* Status.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = Status.h; sourceTree = "<group>"; }; 17D21C9E0B8BE4BA00D1EBDE /* Status.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = Status.h; sourceTree = "<group>"; };
17D21CF10B8BE5EF00D1EBDE /* CogSemaphore.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = CogSemaphore.h; sourceTree = "<group>"; }; 17D21CF10B8BE5EF00D1EBDE /* Semaphore.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = Semaphore.h; sourceTree = "<group>"; };
17D21CF20B8BE5EF00D1EBDE /* CogSemaphore.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = CogSemaphore.m; sourceTree = "<group>"; }; 17D21CF20B8BE5EF00D1EBDE /* Semaphore.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = Semaphore.m; sourceTree = "<group>"; };
17D21DA90B8BE76800D1EBDE /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = /System/Library/Frameworks/AudioToolbox.framework; sourceTree = "<absolute>"; }; 17D21DA90B8BE76800D1EBDE /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = /System/Library/Frameworks/AudioToolbox.framework; sourceTree = "<absolute>"; };
17D21DAA0B8BE76800D1EBDE /* AudioUnit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioUnit.framework; path = /System/Library/Frameworks/AudioUnit.framework; sourceTree = "<absolute>"; }; 17D21DAA0B8BE76800D1EBDE /* AudioUnit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioUnit.framework; path = /System/Library/Frameworks/AudioUnit.framework; sourceTree = "<absolute>"; };
17D21DAB0B8BE76800D1EBDE /* CoreAudio.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreAudio.framework; path = /System/Library/Frameworks/CoreAudio.framework; sourceTree = "<absolute>"; }; 17D21DAB0B8BE76800D1EBDE /* CoreAudio.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreAudio.framework; path = /System/Library/Frameworks/CoreAudio.framework; sourceTree = "<absolute>"; };
@ -169,87 +173,75 @@
17F94DD40B8D0F7000A34E87 /* PluginController.mm */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.cpp.objcpp; path = PluginController.mm; sourceTree = "<group>"; }; 17F94DD40B8D0F7000A34E87 /* PluginController.mm */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.cpp.objcpp; path = PluginController.mm; sourceTree = "<group>"; };
17F94DDC0B8D101100A34E87 /* Plugin.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = Plugin.h; sourceTree = "<group>"; }; 17F94DDC0B8D101100A34E87 /* Plugin.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = Plugin.h; sourceTree = "<group>"; };
32DBCF5E0370ADEE00C91783 /* CogAudio_Prefix.pch */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CogAudio_Prefix.pch; sourceTree = "<group>"; }; 32DBCF5E0370ADEE00C91783 /* CogAudio_Prefix.pch */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CogAudio_Prefix.pch; sourceTree = "<group>"; };
831A50132865A7FD0049CFE4 /* rsstate.hpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = rsstate.hpp; sourceTree = "<group>"; };
831A50152865A8800049CFE4 /* rsstate.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = rsstate.cpp; sourceTree = "<group>"; };
831A50172865A8B30049CFE4 /* rsstate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = rsstate.h; sourceTree = "<group>"; };
8328995127CB510F00D7F028 /* RedundantPlaylistDataStore.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = RedundantPlaylistDataStore.m; path = ../../Utils/RedundantPlaylistDataStore.m; sourceTree = "<group>"; }; 8328995127CB510F00D7F028 /* RedundantPlaylistDataStore.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = RedundantPlaylistDataStore.m; path = ../../Utils/RedundantPlaylistDataStore.m; sourceTree = "<group>"; };
8328995227CB511000D7F028 /* RedundantPlaylistDataStore.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = RedundantPlaylistDataStore.h; path = ../../Utils/RedundantPlaylistDataStore.h; sourceTree = "<group>"; }; 8328995227CB511000D7F028 /* RedundantPlaylistDataStore.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = RedundantPlaylistDataStore.h; path = ../../Utils/RedundantPlaylistDataStore.h; sourceTree = "<group>"; };
8328995527CB51B700D7F028 /* SHA256Digest.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = SHA256Digest.h; path = ../../Utils/SHA256Digest.h; sourceTree = "<group>"; }; 8328995527CB51B700D7F028 /* SHA256Digest.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = SHA256Digest.h; path = ../../Utils/SHA256Digest.h; sourceTree = "<group>"; };
8328995627CB51B700D7F028 /* SHA256Digest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = SHA256Digest.m; path = ../../Utils/SHA256Digest.m; sourceTree = "<group>"; }; 8328995627CB51B700D7F028 /* SHA256Digest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = SHA256Digest.m; path = ../../Utils/SHA256Digest.m; sourceTree = "<group>"; };
8328995927CB51C900D7F028 /* Security.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Security.framework; path = System/Library/Frameworks/Security.framework; sourceTree = SDKROOT; }; 8328995927CB51C900D7F028 /* Security.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Security.framework; path = System/Library/Frameworks/Security.framework; sourceTree = SDKROOT; };
833442402D6EFA6700C51D38 /* VisualizationController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisualizationController.h; sourceTree = "<group>"; };
833442412D6EFA6700C51D38 /* VisualizationController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VisualizationController.m; sourceTree = "<group>"; };
833738E92D5EA52500278628 /* DSPDownmixNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPDownmixNode.h; sourceTree = "<group>"; };
833738EB2D5EA53500278628 /* DSPDownmixNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPDownmixNode.m; sourceTree = "<group>"; };
833738ED2D5EA5B700278628 /* Downmix.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Downmix.h; sourceTree = "<group>"; };
833738EE2D5EA5B700278628 /* Downmix.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Downmix.m; sourceTree = "<group>"; };
8347C73F2796C58800FA8A7D /* NSFileHandle+CreateFile.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "NSFileHandle+CreateFile.h"; path = "../../Utils/NSFileHandle+CreateFile.h"; sourceTree = "<group>"; }; 8347C73F2796C58800FA8A7D /* NSFileHandle+CreateFile.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "NSFileHandle+CreateFile.h"; path = "../../Utils/NSFileHandle+CreateFile.h"; sourceTree = "<group>"; };
8347C7402796C58800FA8A7D /* NSFileHandle+CreateFile.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "NSFileHandle+CreateFile.m"; path = "../../Utils/NSFileHandle+CreateFile.m"; sourceTree = "<group>"; }; 8347C7402796C58800FA8A7D /* NSFileHandle+CreateFile.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "NSFileHandle+CreateFile.m"; path = "../../Utils/NSFileHandle+CreateFile.m"; sourceTree = "<group>"; };
834A41A5287A90AB00EB9D9B /* freesurround_decoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = freesurround_decoder.h; sourceTree = "<group>"; };
834A41A6287A90AB00EB9D9B /* freesurround_decoder.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = freesurround_decoder.cpp; sourceTree = "<group>"; };
834A41A7287A90AB00EB9D9B /* channelmaps.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = channelmaps.cpp; sourceTree = "<group>"; };
834A41A8287A90AB00EB9D9B /* channelmaps.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = channelmaps.h; sourceTree = "<group>"; };
834FD4EA27AF8F380063BC83 /* AudioChunk.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AudioChunk.h; sourceTree = "<group>"; }; 834FD4EA27AF8F380063BC83 /* AudioChunk.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AudioChunk.h; sourceTree = "<group>"; };
834FD4EC27AF91220063BC83 /* AudioChunk.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AudioChunk.m; sourceTree = "<group>"; }; 834FD4EC27AF91220063BC83 /* AudioChunk.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AudioChunk.m; sourceTree = "<group>"; };
834FD4EE27AF93680063BC83 /* ChunkList.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ChunkList.h; sourceTree = "<group>"; }; 834FD4EE27AF93680063BC83 /* ChunkList.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ChunkList.h; sourceTree = "<group>"; };
834FD4EF27AF93680063BC83 /* ChunkList.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ChunkList.m; sourceTree = "<group>"; }; 834FD4EF27AF93680063BC83 /* ChunkList.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ChunkList.m; sourceTree = "<group>"; };
8350416C28646149006B32CC /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; }; 834FD4F227AFA2150063BC83 /* Downmix.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Downmix.h; sourceTree = "<group>"; };
834FD4F327AFA2150063BC83 /* Downmix.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = Downmix.m; sourceTree = "<group>"; };
835C88A42797D4D400E28EAE /* LICENSE.LGPL */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = LICENSE.LGPL; sourceTree = "<group>"; };
835C88A52797D4D400E28EAE /* License.txt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = License.txt; sourceTree = "<group>"; };
835C88A62797D4D400E28EAE /* lpc.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = lpc.c; sourceTree = "<group>"; };
835C88A72797D4D400E28EAE /* lpc.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = lpc.h; sourceTree = "<group>"; };
835C88AC2797DA5800E28EAE /* util.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = util.h; path = ThirdParty/lvqcl/util.h; sourceTree = SOURCE_ROOT; };
835C88AF279811A500E28EAE /* hdcd_decode2.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = hdcd_decode2.h; sourceTree = "<group>"; }; 835C88AF279811A500E28EAE /* hdcd_decode2.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = hdcd_decode2.h; sourceTree = "<group>"; };
835C88B0279811A500E28EAE /* hdcd_decode2.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = hdcd_decode2.c; sourceTree = "<group>"; }; 835C88B0279811A500E28EAE /* hdcd_decode2.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = hdcd_decode2.c; sourceTree = "<group>"; };
835DD2652ACAF1D90057E319 /* OutputCoreAudio.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OutputCoreAudio.m; sourceTree = "<group>"; }; 835EDD7A279FE23A001EDCCE /* HeadphoneFilter.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = HeadphoneFilter.mm; sourceTree = "<group>"; };
835DD2662ACAF1D90057E319 /* OutputCoreAudio.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OutputCoreAudio.h; sourceTree = "<group>"; }; 835EDD7C279FE307001EDCCE /* HeadphoneFilter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = HeadphoneFilter.h; sourceTree = "<group>"; };
835DD26B2ACAF5AD0057E319 /* LICENSE.LGPL */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = LICENSE.LGPL; sourceTree = "<group>"; };
835DD26C2ACAF5AD0057E319 /* License.txt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = License.txt; sourceTree = "<group>"; };
835DD26D2ACAF5AD0057E319 /* lpc.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = lpc.h; sourceTree = "<group>"; };
835DD26E2ACAF5AD0057E319 /* util.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = util.h; sourceTree = "<group>"; };
835DD26F2ACAF5AD0057E319 /* lpc.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = lpc.c; sourceTree = "<group>"; };
835FAC5C27BCA14D00BA8562 /* BadSampleCleaner.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = BadSampleCleaner.h; path = Utils/BadSampleCleaner.h; sourceTree = SOURCE_ROOT; }; 835FAC5C27BCA14D00BA8562 /* BadSampleCleaner.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = BadSampleCleaner.h; path = Utils/BadSampleCleaner.h; sourceTree = SOURCE_ROOT; };
835FAC5D27BCA14D00BA8562 /* BadSampleCleaner.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; name = BadSampleCleaner.m; path = Utils/BadSampleCleaner.m; sourceTree = SOURCE_ROOT; }; 835FAC5D27BCA14D00BA8562 /* BadSampleCleaner.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; name = BadSampleCleaner.m; path = Utils/BadSampleCleaner.m; sourceTree = SOURCE_ROOT; };
83725A7B27AA0D8A0003F694 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; 83725A7B27AA0D8A0003F694 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
83725A7C27AA0D8E0003F694 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; 83725A7C27AA0D8E0003F694 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
8377C64B27B8C51500E8BC0F /* fft_accelerate.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = fft_accelerate.c; sourceTree = "<group>"; }; 8377C65027B8CAD100E8BC0F /* VisualizationController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisualizationController.h; sourceTree = "<group>"; };
8377C64D27B8C54400E8BC0F /* fft.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = fft.h; sourceTree = "<group>"; }; 8377C65127B8CAD100E8BC0F /* VisualizationController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VisualizationController.m; sourceTree = "<group>"; };
8384912618080FF100E7332D /* Logging.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = Logging.h; path = ../../Utils/Logging.h; sourceTree = "<group>"; }; 8384912618080FF100E7332D /* Logging.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = Logging.h; path = ../../Utils/Logging.h; sourceTree = "<group>"; };
838A33712D06A97D00D0D770 /* librubberband.3.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = librubberband.3.dylib; path = ../ThirdParty/rubberband/lib/librubberband.3.dylib; sourceTree = SOURCE_ROOT; };
839065F22853338700636FBB /* dsd2float.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = dsd2float.h; sourceTree = "<group>"; };
839366651815923C006DD712 /* CogPluginMulti.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CogPluginMulti.h; sourceTree = "<group>"; }; 839366651815923C006DD712 /* CogPluginMulti.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CogPluginMulti.h; sourceTree = "<group>"; };
839366661815923C006DD712 /* CogPluginMulti.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CogPluginMulti.m; sourceTree = "<group>"; }; 839366661815923C006DD712 /* CogPluginMulti.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CogPluginMulti.m; sourceTree = "<group>"; };
8399CF2A27B5D1D4008751F1 /* NSDictionary+Merge.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "NSDictionary+Merge.h"; path = "../Utils/NSDictionary+Merge.h"; sourceTree = SOURCE_ROOT; }; 8399CF2A27B5D1D4008751F1 /* NSDictionary+Merge.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "NSDictionary+Merge.h"; path = "../../Utils/NSDictionary+Merge.h"; sourceTree = "<group>"; };
8399CF2B27B5D1D4008751F1 /* NSDictionary+Merge.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "NSDictionary+Merge.m"; path = "../Utils/NSDictionary+Merge.m"; sourceTree = SOURCE_ROOT; }; 8399CF2B27B5D1D4008751F1 /* NSDictionary+Merge.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "NSDictionary+Merge.m"; path = "../../Utils/NSDictionary+Merge.m"; sourceTree = "<group>"; };
839E56E12879450300DFB5F4 /* HrtfData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HrtfData.h; sourceTree = "<group>"; }; 83D44DBE2839C60A00D4DD10 /* cqt.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = cqt.h; sourceTree = "<group>"; };
839E56E22879450300DFB5F4 /* Endianness.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Endianness.h; sourceTree = "<group>"; }; 83D44DBF2839C60A00D4DD10 /* cqt.c */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.c; path = cqt.c; sourceTree = "<group>"; };
839E56E32879450300DFB5F4 /* HrtfData.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = HrtfData.cpp; sourceTree = "<group>"; }; 83F18ADF27D1E8EF00385946 /* CDSPHBDownsampler.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CDSPHBDownsampler.h; sourceTree = "<group>"; };
839E56E42879450300DFB5F4 /* IHrtfData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = IHrtfData.h; sourceTree = "<group>"; }; 83F18AE127D1E8EF00385946 /* pffft_double.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = pffft_double.h; sourceTree = "<group>"; };
839E56E928794F6300DFB5F4 /* HrtfTypes.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HrtfTypes.h; sourceTree = "<group>"; }; 83F18AE327D1E8EF00385946 /* pf_neon_double_from_avx.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = pf_neon_double_from_avx.h; sourceTree = "<group>"; };
839E56F6287974A100DFB5F4 /* SandboxBroker.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = SandboxBroker.h; path = ../Utils/SandboxBroker.h; sourceTree = "<group>"; }; 83F18AE427D1E8EF00385946 /* pf_double.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = pf_double.h; sourceTree = "<group>"; };
839E899D2D5DB9D500A13526 /* VisualizationNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisualizationNode.h; sourceTree = "<group>"; }; 83F18AE527D1E8EF00385946 /* pf_neon_double.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = pf_neon_double.h; sourceTree = "<group>"; };
839E899F2D5DBA1700A13526 /* VisualizationNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VisualizationNode.m; sourceTree = "<group>"; }; 83F18AE627D1E8EF00385946 /* pf_sse2_double.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = pf_sse2_double.h; sourceTree = "<group>"; };
83A349672D5C3F430096D530 /* DSPRubberbandNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPRubberbandNode.h; sourceTree = "<group>"; }; 83F18AE727D1E8EF00385946 /* pf_avx_double.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = pf_avx_double.h; sourceTree = "<group>"; };
83A349682D5C3F430096D530 /* DSPRubberbandNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPRubberbandNode.m; sourceTree = "<group>"; }; 83F18AE827D1E8EF00385946 /* pf_scalar_double.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = pf_scalar_double.h; sourceTree = "<group>"; };
83A3496C2D5C40490096D530 /* DSPFSurroundNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPFSurroundNode.h; sourceTree = "<group>"; }; 83F18AE927D1E8EF00385946 /* pffft_priv_impl.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = pffft_priv_impl.h; sourceTree = "<group>"; };
83A3496E2D5C405E0096D530 /* DSPFSurroundNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPFSurroundNode.m; sourceTree = "<group>"; }; 83F18AEA27D1E8EF00385946 /* pffft_double.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = pffft_double.c; sourceTree = "<group>"; };
83A349702D5C41810096D530 /* FSurroundFilter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FSurroundFilter.h; sourceTree = "<group>"; }; 83F18AF827D1E8EF00385946 /* CDSPSincFilterGen.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CDSPSincFilterGen.h; sourceTree = "<group>"; };
83A349712D5C41810096D530 /* FSurroundFilter.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FSurroundFilter.mm; sourceTree = "<group>"; }; 83F18AF927D1E8EF00385946 /* r8butil.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = r8butil.h; sourceTree = "<group>"; };
83A349742D5C50A10096D530 /* DSPHRTFNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPHRTFNode.h; sourceTree = "<group>"; }; 83F18AFB27D1E8EF00385946 /* LICENSE */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = LICENSE; sourceTree = "<group>"; };
83A349762D5C50B20096D530 /* DSPHRTFNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPHRTFNode.m; sourceTree = "<group>"; }; 83F18AFC27D1E8EF00385946 /* r8bbase.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = r8bbase.h; sourceTree = "<group>"; };
83B74280289E027F005AAC28 /* CogAudio-Bridging-Header.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "CogAudio-Bridging-Header.h"; sourceTree = "<group>"; }; 83F18AFD27D1E8EF00385946 /* CDSPFIRFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CDSPFIRFilter.h; sourceTree = "<group>"; };
83CB56632E06464D003DD379 /* NSDictionary+Optional.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "NSDictionary+Optional.h"; path = "../Utils/NSDictionary+Optional.h"; sourceTree = SOURCE_ROOT; }; 83F18B0827D1E8EF00385946 /* CDSPProcessor.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CDSPProcessor.h; sourceTree = "<group>"; };
83CB56642E06464D003DD379 /* NSDictionary+Optional.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; name = "NSDictionary+Optional.m"; path = "../Utils/NSDictionary+Optional.m"; sourceTree = SOURCE_ROOT; }; 83F18B0927D1E8EF00385946 /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = "<group>"; };
83F8431E2D5C6272008C123B /* HeadphoneFilter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = HeadphoneFilter.h; sourceTree = "<group>"; }; 83F18B0A27D1E8EF00385946 /* fft4g.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = fft4g.h; sourceTree = "<group>"; };
83F8431F2D5C6272008C123B /* HeadphoneFilter.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = HeadphoneFilter.mm; sourceTree = "<group>"; }; 83F18B0B27D1E8EF00385946 /* CDSPRealFFT.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CDSPRealFFT.h; sourceTree = "<group>"; };
83F843222D5C66DA008C123B /* DSPEqualizerNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPEqualizerNode.h; sourceTree = "<group>"; }; 83F18B0C27D1E8EF00385946 /* CDSPFracInterpolator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CDSPFracInterpolator.h; sourceTree = "<group>"; };
83F843242D5C66E9008C123B /* DSPEqualizerNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPEqualizerNode.m; sourceTree = "<group>"; }; 83F18B1727D1E8EF00385946 /* CDSPBlockConvolver.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CDSPBlockConvolver.h; sourceTree = "<group>"; };
83F9FFF02D6EC43900026576 /* soxr.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = soxr.h; sourceTree = "<group>"; }; 83F18B1827D1E8EF00385946 /* CDSPHBUpsampler.inc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.pascal; path = CDSPHBUpsampler.inc; sourceTree = "<group>"; };
83F9FFF22D6EC43900026576 /* libsoxr.0.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; path = libsoxr.0.dylib; sourceTree = "<group>"; }; 83F18B1927D1E8EF00385946 /* r8bconf.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = r8bconf.h; sourceTree = "<group>"; };
83F9FFF42D6EC43900026576 /* README.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = "<group>"; }; 83F18B1A27D1E8EF00385946 /* r8bbase.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = r8bbase.cpp; sourceTree = "<group>"; };
83FFED502D5B08BC0044CCAF /* DSPNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPNode.h; sourceTree = "<group>"; }; 83F18B1B27D1E8EF00385946 /* pffft.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = pffft.h; sourceTree = "<group>"; };
83FFED522D5B09320044CCAF /* DSPNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPNode.m; sourceTree = "<group>"; }; 83F18B1C27D1E8EF00385946 /* CDSPResampler.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CDSPResampler.h; sourceTree = "<group>"; };
83F18B1D27D1E8EF00385946 /* CDSPHBUpsampler.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CDSPHBUpsampler.h; sourceTree = "<group>"; };
83F18B5527D1F5E900385946 /* r8bstate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = r8bstate.h; path = ThirdParty/r8bstate.h; sourceTree = SOURCE_ROOT; };
8DC2EF5A0486A6940098B216 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist; path = Info.plist; sourceTree = "<group>"; }; 8DC2EF5A0486A6940098B216 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist; path = Info.plist; sourceTree = "<group>"; };
8DC2EF5B0486A6940098B216 /* CogAudio.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = CogAudio.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 8DC2EF5B0486A6940098B216 /* CogAudio.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = CogAudio.framework; sourceTree = BUILT_PRODUCTS_DIR; };
8E8D3D2D0CBAEE6E00135C1B /* AudioContainer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioContainer.h; sourceTree = "<group>"; }; 8E8D3D2D0CBAEE6E00135C1B /* AudioContainer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioContainer.h; sourceTree = "<group>"; };
8E8D3D2E0CBAEE6E00135C1B /* AudioContainer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AudioContainer.m; sourceTree = "<group>"; }; 8E8D3D2E0CBAEE6E00135C1B /* AudioContainer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AudioContainer.m; sourceTree = "<group>"; };
8EC1225D0B993BD500C5B3AD /* ConverterNode.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = ConverterNode.h; sourceTree = "<group>"; }; 8EC1225D0B993BD500C5B3AD /* ConverterNode.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = ConverterNode.h; sourceTree = "<group>"; };
8EC1225E0B993BD500C5B3AD /* ConverterNode.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = ConverterNode.m; sourceTree = "<group>"; }; 8EC1225E0B993BD500C5B3AD /* ConverterNode.mm */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.cpp.objcpp; path = ConverterNode.mm; sourceTree = "<group>"; };
B0575F2C0D687A0800411D77 /* Helper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Helper.h; sourceTree = "<group>"; }; B0575F2C0D687A0800411D77 /* Helper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Helper.h; sourceTree = "<group>"; };
B0575F2F0D687A4000411D77 /* Helper.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = Helper.m; sourceTree = "<group>"; }; B0575F2F0D687A4000411D77 /* Helper.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = Helper.m; sourceTree = "<group>"; };
D2F7E79907B2D74100F64583 /* CoreData.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreData.framework; path = /System/Library/Frameworks/CoreData.framework; sourceTree = "<absolute>"; }; D2F7E79907B2D74100F64583 /* CoreData.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreData.framework; path = /System/Library/Frameworks/CoreData.framework; sourceTree = "<absolute>"; };
@ -262,15 +254,12 @@
files = ( files = (
8328995A27CB51C900D7F028 /* Security.framework in Frameworks */, 8328995A27CB51C900D7F028 /* Security.framework in Frameworks */,
83725A9127AA16D50003F694 /* AVFoundation.framework in Frameworks */, 83725A9127AA16D50003F694 /* AVFoundation.framework in Frameworks */,
83F9FFF82D6EC43900026576 /* libsoxr.0.dylib in Frameworks */,
8DC2EF570486A6940098B216 /* Cocoa.framework in Frameworks */, 8DC2EF570486A6940098B216 /* Cocoa.framework in Frameworks */,
8350416D28646149006B32CC /* CoreMedia.framework in Frameworks */,
83725A9027AA16C90003F694 /* Accelerate.framework in Frameworks */, 83725A9027AA16C90003F694 /* Accelerate.framework in Frameworks */,
17D21DAD0B8BE76800D1EBDE /* AudioToolbox.framework in Frameworks */, 17D21DAD0B8BE76800D1EBDE /* AudioToolbox.framework in Frameworks */,
17D21DAE0B8BE76800D1EBDE /* AudioUnit.framework in Frameworks */, 17D21DAE0B8BE76800D1EBDE /* AudioUnit.framework in Frameworks */,
17D21DAF0B8BE76800D1EBDE /* CoreAudio.framework in Frameworks */, 17D21DAF0B8BE76800D1EBDE /* CoreAudio.framework in Frameworks */,
17D21DB00B8BE76800D1EBDE /* CoreAudioKit.framework in Frameworks */, 17D21DB00B8BE76800D1EBDE /* CoreAudioKit.framework in Frameworks */,
838A33722D06A97D00D0D770 /* librubberband.3.dylib in Frameworks */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
@ -318,7 +307,6 @@
08FB77AEFE84172EC02AAC07 /* Classes */ = { 08FB77AEFE84172EC02AAC07 /* Classes */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
83B74280289E027F005AAC28 /* CogAudio-Bridging-Header.h */,
8377C64F27B8CAAB00E8BC0F /* Visualization */, 8377C64F27B8CAAB00E8BC0F /* Visualization */,
17F94DDC0B8D101100A34E87 /* Plugin.h */, 17F94DDC0B8D101100A34E87 /* Plugin.h */,
17D21EBB0B8BF44000D1EBDE /* AudioPlayer.h */, 17D21EBB0B8BF44000D1EBDE /* AudioPlayer.h */,
@ -341,7 +329,6 @@
17F94DD40B8D0F7000A34E87 /* PluginController.mm */, 17F94DD40B8D0F7000A34E87 /* PluginController.mm */,
17D21C750B8BE4BA00D1EBDE /* Chain */, 17D21C750B8BE4BA00D1EBDE /* Chain */,
17D21C9B0B8BE4BA00D1EBDE /* Output */, 17D21C9B0B8BE4BA00D1EBDE /* Output */,
839E56F6287974A100DFB5F4 /* SandboxBroker.h */,
17D21C9E0B8BE4BA00D1EBDE /* Status.h */, 17D21C9E0B8BE4BA00D1EBDE /* Status.h */,
B0575F2C0D687A0800411D77 /* Helper.h */, B0575F2C0D687A0800411D77 /* Helper.h */,
B0575F2F0D687A4000411D77 /* Helper.m */, B0575F2F0D687A4000411D77 /* Helper.m */,
@ -362,7 +349,6 @@
1058C7B2FEA5585E11CA2CBB /* Other Frameworks */ = { 1058C7B2FEA5585E11CA2CBB /* Other Frameworks */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
838A33712D06A97D00D0D770 /* librubberband.3.dylib */,
83725A7B27AA0D8A0003F694 /* Accelerate.framework */, 83725A7B27AA0D8A0003F694 /* Accelerate.framework */,
17D21DAA0B8BE76800D1EBDE /* AudioUnit.framework */, 17D21DAA0B8BE76800D1EBDE /* AudioUnit.framework */,
17D21DA90B8BE76800D1EBDE /* AudioToolbox.framework */, 17D21DA90B8BE76800D1EBDE /* AudioToolbox.framework */,
@ -379,25 +365,24 @@
17D21C750B8BE4BA00D1EBDE /* Chain */ = { 17D21C750B8BE4BA00D1EBDE /* Chain */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
83A349692D5C3F430096D530 /* DSP */,
834FD4EA27AF8F380063BC83 /* AudioChunk.h */, 834FD4EA27AF8F380063BC83 /* AudioChunk.h */,
834FD4EC27AF91220063BC83 /* AudioChunk.m */, 834FD4EC27AF91220063BC83 /* AudioChunk.m */,
17D21C760B8BE4BA00D1EBDE /* BufferChain.h */,
17D21C770B8BE4BA00D1EBDE /* BufferChain.m */,
834FD4EE27AF93680063BC83 /* ChunkList.h */, 834FD4EE27AF93680063BC83 /* ChunkList.h */,
834FD4EF27AF93680063BC83 /* ChunkList.m */, 834FD4EF27AF93680063BC83 /* ChunkList.m */,
834FD4F227AFA2150063BC83 /* Downmix.h */,
834FD4F327AFA2150063BC83 /* Downmix.m */,
17D21C760B8BE4BA00D1EBDE /* BufferChain.h */,
17D21C770B8BE4BA00D1EBDE /* BufferChain.m */,
8EC1225D0B993BD500C5B3AD /* ConverterNode.h */, 8EC1225D0B993BD500C5B3AD /* ConverterNode.h */,
8EC1225E0B993BD500C5B3AD /* ConverterNode.m */, 8EC1225E0B993BD500C5B3AD /* ConverterNode.mm */,
17D21C7A0B8BE4BA00D1EBDE /* InputNode.h */, 17D21C7A0B8BE4BA00D1EBDE /* InputNode.h */,
17D21C7B0B8BE4BA00D1EBDE /* InputNode.m */, 17D21C7B0B8BE4BA00D1EBDE /* InputNode.m */,
17D21C7C0B8BE4BA00D1EBDE /* Node.h */, 17D21C7C0B8BE4BA00D1EBDE /* Node.h */,
17D21C7D0B8BE4BA00D1EBDE /* Node.m */, 17D21C7D0B8BE4BA00D1EBDE /* Node.m */,
17D21C7E0B8BE4BA00D1EBDE /* OutputNode.h */, 17D21C7E0B8BE4BA00D1EBDE /* OutputNode.h */,
17D21C7F0B8BE4BA00D1EBDE /* OutputNode.m */, 17D21C7F0B8BE4BA00D1EBDE /* OutputNode.m */,
83FFED502D5B08BC0044CCAF /* DSPNode.h */, 835EDD7C279FE307001EDCCE /* HeadphoneFilter.h */,
83FFED522D5B09320044CCAF /* DSPNode.m */, 835EDD7A279FE23A001EDCCE /* HeadphoneFilter.mm */,
839E899D2D5DB9D500A13526 /* VisualizationNode.h */,
839E899F2D5DBA1700A13526 /* VisualizationNode.m */,
); );
path = Chain; path = Chain;
sourceTree = "<group>"; sourceTree = "<group>";
@ -405,8 +390,8 @@
17D21C9B0B8BE4BA00D1EBDE /* Output */ = { 17D21C9B0B8BE4BA00D1EBDE /* Output */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
835DD2662ACAF1D90057E319 /* OutputCoreAudio.h */, 17D21C9C0B8BE4BA00D1EBDE /* OutputCoreAudio.h */,
835DD2652ACAF1D90057E319 /* OutputCoreAudio.m */, 17D21C9D0B8BE4BA00D1EBDE /* OutputCoreAudio.m */,
); );
path = Output; path = Output;
sourceTree = "<group>"; sourceTree = "<group>";
@ -414,15 +399,9 @@
17D21CD80B8BE5B400D1EBDE /* ThirdParty */ = { 17D21CD80B8BE5B400D1EBDE /* ThirdParty */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
83F9FFF52D6EC43900026576 /* soxr */, 83F18ADE27D1E8EF00385946 /* r8brain-free-src */,
835DD2692ACAF5AD0057E319 /* lvqcl */,
834A41A4287A90AB00EB9D9B /* fsurround */,
839E56E02879450300DFB5F4 /* hrtf */,
831A50152865A8800049CFE4 /* rsstate.cpp */,
831A50172865A8B30049CFE4 /* rsstate.h */,
831A50132865A7FD0049CFE4 /* rsstate.hpp */,
8377C64A27B8C51500E8BC0F /* deadbeef */,
835C88AE279811A500E28EAE /* hdcd */, 835C88AE279811A500E28EAE /* hdcd */,
835C88A22797D4D400E28EAE /* lvqcl */,
17D21DC40B8BE79700D1EBDE /* CoreAudioUtils */, 17D21DC40B8BE79700D1EBDE /* CoreAudioUtils */,
); );
path = ThirdParty; path = ThirdParty;
@ -431,9 +410,6 @@
17D21CDC0B8BE5B400D1EBDE /* Utils */ = { 17D21CDC0B8BE5B400D1EBDE /* Utils */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
83CB56632E06464D003DD379 /* NSDictionary+Optional.h */,
83CB56642E06464D003DD379 /* NSDictionary+Optional.m */,
839065F22853338700636FBB /* dsd2float.h */,
8328995527CB51B700D7F028 /* SHA256Digest.h */, 8328995527CB51B700D7F028 /* SHA256Digest.h */,
8328995627CB51B700D7F028 /* SHA256Digest.m */, 8328995627CB51B700D7F028 /* SHA256Digest.m */,
8328995227CB511000D7F028 /* RedundantPlaylistDataStore.h */, 8328995227CB511000D7F028 /* RedundantPlaylistDataStore.h */,
@ -445,8 +421,8 @@
8347C73F2796C58800FA8A7D /* NSFileHandle+CreateFile.h */, 8347C73F2796C58800FA8A7D /* NSFileHandle+CreateFile.h */,
8347C7402796C58800FA8A7D /* NSFileHandle+CreateFile.m */, 8347C7402796C58800FA8A7D /* NSFileHandle+CreateFile.m */,
8384912618080FF100E7332D /* Logging.h */, 8384912618080FF100E7332D /* Logging.h */,
17D21CF10B8BE5EF00D1EBDE /* CogSemaphore.h */, 17D21CF10B8BE5EF00D1EBDE /* Semaphore.h */,
17D21CF20B8BE5EF00D1EBDE /* CogSemaphore.m */, 17D21CF20B8BE5EF00D1EBDE /* Semaphore.m */,
); );
path = Utils; path = Utils;
sourceTree = "<group>"; sourceTree = "<group>";
@ -468,15 +444,24 @@
name = "Other Sources"; name = "Other Sources";
sourceTree = "<group>"; sourceTree = "<group>";
}; };
834A41A4287A90AB00EB9D9B /* fsurround */ = { 835C88A22797D4D400E28EAE /* lvqcl */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
834A41A5287A90AB00EB9D9B /* freesurround_decoder.h */, 835C88A32797D4D400E28EAE /* License */,
834A41A6287A90AB00EB9D9B /* freesurround_decoder.cpp */, 835C88A62797D4D400E28EAE /* lpc.c */,
834A41A7287A90AB00EB9D9B /* channelmaps.cpp */, 835C88A72797D4D400E28EAE /* lpc.h */,
834A41A8287A90AB00EB9D9B /* channelmaps.h */, 835C88AC2797DA5800E28EAE /* util.h */,
); );
path = fsurround; path = lvqcl;
sourceTree = "<group>";
};
835C88A32797D4D400E28EAE /* License */ = {
isa = PBXGroup;
children = (
835C88A42797D4D400E28EAE /* LICENSE.LGPL */,
835C88A52797D4D400E28EAE /* License.txt */,
);
path = License;
sourceTree = "<group>"; sourceTree = "<group>";
}; };
835C88AE279811A500E28EAE /* hdcd */ = { 835C88AE279811A500E28EAE /* hdcd */ = {
@ -488,115 +473,76 @@
path = hdcd; path = hdcd;
sourceTree = "<group>"; sourceTree = "<group>";
}; };
835DD2692ACAF5AD0057E319 /* lvqcl */ = {
isa = PBXGroup;
children = (
835DD26A2ACAF5AD0057E319 /* License */,
835DD26D2ACAF5AD0057E319 /* lpc.h */,
835DD26E2ACAF5AD0057E319 /* util.h */,
835DD26F2ACAF5AD0057E319 /* lpc.c */,
);
path = lvqcl;
sourceTree = "<group>";
};
835DD26A2ACAF5AD0057E319 /* License */ = {
isa = PBXGroup;
children = (
835DD26B2ACAF5AD0057E319 /* LICENSE.LGPL */,
835DD26C2ACAF5AD0057E319 /* License.txt */,
);
path = License;
sourceTree = "<group>";
};
83725A8F27AA16C90003F694 /* Frameworks */ = { 83725A8F27AA16C90003F694 /* Frameworks */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
8350416C28646149006B32CC /* CoreMedia.framework */,
8328995927CB51C900D7F028 /* Security.framework */, 8328995927CB51C900D7F028 /* Security.framework */,
); );
name = Frameworks; name = Frameworks;
sourceTree = "<group>"; sourceTree = "<group>";
}; };
8377C64A27B8C51500E8BC0F /* deadbeef */ = {
isa = PBXGroup;
children = (
8377C64D27B8C54400E8BC0F /* fft.h */,
8377C64B27B8C51500E8BC0F /* fft_accelerate.c */,
);
path = deadbeef;
sourceTree = "<group>";
};
8377C64F27B8CAAB00E8BC0F /* Visualization */ = { 8377C64F27B8CAAB00E8BC0F /* Visualization */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
833442402D6EFA6700C51D38 /* VisualizationController.h */, 8377C65027B8CAD100E8BC0F /* VisualizationController.h */,
833442412D6EFA6700C51D38 /* VisualizationController.m */, 8377C65127B8CAD100E8BC0F /* VisualizationController.m */,
83D44DBE2839C60A00D4DD10 /* cqt.h */,
83D44DBF2839C60A00D4DD10 /* cqt.c */,
); );
path = Visualization; path = Visualization;
sourceTree = "<group>"; sourceTree = "<group>";
}; };
839E56E02879450300DFB5F4 /* hrtf */ = { 83F18ADE27D1E8EF00385946 /* r8brain-free-src */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
839E56E22879450300DFB5F4 /* Endianness.h */, 83F18ADF27D1E8EF00385946 /* CDSPHBDownsampler.h */,
839E56E32879450300DFB5F4 /* HrtfData.cpp */, 83F18AE027D1E8EF00385946 /* pffft_double */,
839E56E12879450300DFB5F4 /* HrtfData.h */, 83F18AF827D1E8EF00385946 /* CDSPSincFilterGen.h */,
839E56E928794F6300DFB5F4 /* HrtfTypes.h */, 83F18AF927D1E8EF00385946 /* r8butil.h */,
839E56E42879450300DFB5F4 /* IHrtfData.h */, 83F18AFB27D1E8EF00385946 /* LICENSE */,
83F18AFC27D1E8EF00385946 /* r8bbase.h */,
83F18AFD27D1E8EF00385946 /* CDSPFIRFilter.h */,
83F18B0827D1E8EF00385946 /* CDSPProcessor.h */,
83F18B0927D1E8EF00385946 /* README.md */,
83F18B0A27D1E8EF00385946 /* fft4g.h */,
83F18B0B27D1E8EF00385946 /* CDSPRealFFT.h */,
83F18B0C27D1E8EF00385946 /* CDSPFracInterpolator.h */,
83F18B1727D1E8EF00385946 /* CDSPBlockConvolver.h */,
83F18B1827D1E8EF00385946 /* CDSPHBUpsampler.inc */,
83F18B1927D1E8EF00385946 /* r8bconf.h */,
83F18B1A27D1E8EF00385946 /* r8bbase.cpp */,
83F18B1B27D1E8EF00385946 /* pffft.h */,
83F18B1C27D1E8EF00385946 /* CDSPResampler.h */,
83F18B1D27D1E8EF00385946 /* CDSPHBUpsampler.h */,
83F18B5527D1F5E900385946 /* r8bstate.h */,
); );
path = hrtf; path = "r8brain-free-src";
sourceTree = "<group>"; sourceTree = "<group>";
}; };
83A349692D5C3F430096D530 /* DSP */ = { 83F18AE027D1E8EF00385946 /* pffft_double */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
833738ED2D5EA5B700278628 /* Downmix.h */, 83F18AE127D1E8EF00385946 /* pffft_double.h */,
833738EE2D5EA5B700278628 /* Downmix.m */, 83F18AE227D1E8EF00385946 /* simd */,
83F8431E2D5C6272008C123B /* HeadphoneFilter.h */, 83F18AE927D1E8EF00385946 /* pffft_priv_impl.h */,
83F8431F2D5C6272008C123B /* HeadphoneFilter.mm */, 83F18AEA27D1E8EF00385946 /* pffft_double.c */,
83A349702D5C41810096D530 /* FSurroundFilter.h */,
83A349712D5C41810096D530 /* FSurroundFilter.mm */,
83A349672D5C3F430096D530 /* DSPRubberbandNode.h */,
83A349682D5C3F430096D530 /* DSPRubberbandNode.m */,
83A3496C2D5C40490096D530 /* DSPFSurroundNode.h */,
83A3496E2D5C405E0096D530 /* DSPFSurroundNode.m */,
83A349742D5C50A10096D530 /* DSPHRTFNode.h */,
83A349762D5C50B20096D530 /* DSPHRTFNode.m */,
83F843222D5C66DA008C123B /* DSPEqualizerNode.h */,
83F843242D5C66E9008C123B /* DSPEqualizerNode.m */,
833738E92D5EA52500278628 /* DSPDownmixNode.h */,
833738EB2D5EA53500278628 /* DSPDownmixNode.m */,
); );
path = DSP; path = pffft_double;
sourceTree = "<group>"; sourceTree = "<group>";
}; };
83F9FFF12D6EC43900026576 /* include */ = { 83F18AE227D1E8EF00385946 /* simd */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
83F9FFF02D6EC43900026576 /* soxr.h */, 83F18AE327D1E8EF00385946 /* pf_neon_double_from_avx.h */,
83F18AE427D1E8EF00385946 /* pf_double.h */,
83F18AE527D1E8EF00385946 /* pf_neon_double.h */,
83F18AE627D1E8EF00385946 /* pf_sse2_double.h */,
83F18AE727D1E8EF00385946 /* pf_avx_double.h */,
83F18AE827D1E8EF00385946 /* pf_scalar_double.h */,
); );
path = include; path = simd;
sourceTree = "<group>"; sourceTree = "<group>";
}; };
83F9FFF32D6EC43900026576 /* lib */ = {
isa = PBXGroup;
children = (
83F9FFF22D6EC43900026576 /* libsoxr.0.dylib */,
);
path = lib;
sourceTree = "<group>";
};
83F9FFF52D6EC43900026576 /* soxr */ = {
isa = PBXGroup;
children = (
83F9FFF12D6EC43900026576 /* include */,
83F9FFF32D6EC43900026576 /* lib */,
83F9FFF42D6EC43900026576 /* README.md */,
);
name = soxr;
path = ../ThirdParty/soxr;
sourceTree = SOURCE_ROOT;
};
/* End PBXGroup section */ /* End PBXGroup section */
/* Begin PBXHeadersBuildPhase section */ /* Begin PBXHeadersBuildPhase section */
@ -604,72 +550,75 @@
isa = PBXHeadersBuildPhase; isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
833442422D6EFA6700C51D38 /* VisualizationController.h in Headers */,
833738F02D5EA5B700278628 /* Downmix.h in Headers */,
834FD4EB27AF8F380063BC83 /* AudioChunk.h in Headers */,
83F843202D5C6272008C123B /* HeadphoneFilter.h in Headers */,
83A349732D5C41810096D530 /* FSurroundFilter.h in Headers */,
839E56E82879450300DFB5F4 /* IHrtfData.h in Headers */,
17D21CA10B8BE4BA00D1EBDE /* BufferChain.h in Headers */, 17D21CA10B8BE4BA00D1EBDE /* BufferChain.h in Headers */,
831A50142865A7FD0049CFE4 /* rsstate.hpp in Headers */,
835DD2682ACAF1D90057E319 /* OutputCoreAudio.h in Headers */,
834A41AC287A90AB00EB9D9B /* channelmaps.h in Headers */,
83A3496D2D5C40490096D530 /* DSPFSurroundNode.h in Headers */,
83A3496B2D5C3F430096D530 /* DSPRubberbandNode.h in Headers */,
17D21CA50B8BE4BA00D1EBDE /* InputNode.h in Headers */, 17D21CA50B8BE4BA00D1EBDE /* InputNode.h in Headers */,
83CB56652E06464D003DD379 /* NSDictionary+Optional.h in Headers */,
833738EA2D5EA52500278628 /* DSPDownmixNode.h in Headers */,
83F843232D5C66DA008C123B /* DSPEqualizerNode.h in Headers */,
834A41A9287A90AB00EB9D9B /* freesurround_decoder.h in Headers */,
834FD4F027AF93680063BC83 /* ChunkList.h in Headers */,
835DD2732ACAF5AD0057E319 /* util.h in Headers */,
17D21CA70B8BE4BA00D1EBDE /* Node.h in Headers */, 17D21CA70B8BE4BA00D1EBDE /* Node.h in Headers */,
83D44DC02839C60A00D4DD10 /* cqt.h in Headers */,
83F18B3427D1E8EF00385946 /* r8butil.h in Headers */,
8399CF2C27B5D1D5008751F1 /* NSDictionary+Merge.h in Headers */, 8399CF2C27B5D1D5008751F1 /* NSDictionary+Merge.h in Headers */,
17D21CA90B8BE4BA00D1EBDE /* OutputNode.h in Headers */, 17D21CA90B8BE4BA00D1EBDE /* OutputNode.h in Headers */,
8EC1225F0B993BD500C5B3AD /* ConverterNode.h in Headers */, 83F18B4527D1E8EF00385946 /* CDSPRealFFT.h in Headers */,
83F18B3827D1E8EF00385946 /* CDSPFIRFilter.h in Headers */,
8328995427CB511000D7F028 /* RedundantPlaylistDataStore.h in Headers */, 8328995427CB511000D7F028 /* RedundantPlaylistDataStore.h in Headers */,
839E56E52879450300DFB5F4 /* HrtfData.h in Headers */, 17D21CC50B8BE4BA00D1EBDE /* OutputCoreAudio.h in Headers */,
83FFED512D5B08BC0044CCAF /* DSPNode.h in Headers */, 83F18B2427D1E8EF00385946 /* pf_avx_double.h in Headers */,
839E899E2D5DB9D500A13526 /* VisualizationNode.h in Headers */, 834FD4F427AFA2150063BC83 /* Downmix.h in Headers */,
83A349752D5C50A10096D530 /* DSPHRTFNode.h in Headers */,
83F9FFF62D6EC43900026576 /* soxr.h in Headers */,
17D21CC70B8BE4BA00D1EBDE /* Status.h in Headers */, 17D21CC70B8BE4BA00D1EBDE /* Status.h in Headers */,
17D21CF30B8BE5EF00D1EBDE /* CogSemaphore.h in Headers */, 83F18B1F27D1E8EF00385946 /* pffft_double.h in Headers */,
839E56E62879450300DFB5F4 /* Endianness.h in Headers */, 835C88AB2797D4D400E28EAE /* lpc.h in Headers */,
17D21CF30B8BE5EF00D1EBDE /* Semaphore.h in Headers */,
17D21DC70B8BE79700D1EBDE /* CoreAudioUtils.h in Headers */, 17D21DC70B8BE79700D1EBDE /* CoreAudioUtils.h in Headers */,
835DD2722ACAF5AD0057E319 /* lpc.h in Headers */, 83F18B3327D1E8EF00385946 /* CDSPSincFilterGen.h in Headers */,
17D21EBD0B8BF44000D1EBDE /* AudioPlayer.h in Headers */, 17D21EBD0B8BF44000D1EBDE /* AudioPlayer.h in Headers */,
831A50182865A8B30049CFE4 /* rsstate.h in Headers */, 83F18B4427D1E8EF00385946 /* fft4g.h in Headers */,
83F18B1E27D1E8EF00385946 /* CDSPHBDownsampler.h in Headers */,
83F18B5627D1F5E900385946 /* r8bstate.h in Headers */,
8377C65227B8CAD100E8BC0F /* VisualizationController.h in Headers */,
834FD4F027AF93680063BC83 /* ChunkList.h in Headers */,
83F18B2127D1E8EF00385946 /* pf_double.h in Headers */,
17F94DD50B8D0F7000A34E87 /* PluginController.h in Headers */, 17F94DD50B8D0F7000A34E87 /* PluginController.h in Headers */,
17F94DDD0B8D101100A34E87 /* Plugin.h in Headers */, 17F94DDD0B8D101100A34E87 /* Plugin.h in Headers */,
83F18B2027D1E8EF00385946 /* pf_neon_double_from_avx.h in Headers */,
83F18B5227D1E8F000385946 /* pffft.h in Headers */,
83F18B2627D1E8EF00385946 /* pffft_priv_impl.h in Headers */,
8328995727CB51B700D7F028 /* SHA256Digest.h in Headers */, 8328995727CB51B700D7F028 /* SHA256Digest.h in Headers */,
83F18B4627D1E8EF00385946 /* CDSPFracInterpolator.h in Headers */,
834FD4EB27AF8F380063BC83 /* AudioChunk.h in Headers */,
83F18B4E27D1E8F000385946 /* CDSPBlockConvolver.h in Headers */,
83F18B4227D1E8EF00385946 /* CDSPProcessor.h in Headers */,
17A2D3C50B8D1D37000778C4 /* AudioDecoder.h in Headers */, 17A2D3C50B8D1D37000778C4 /* AudioDecoder.h in Headers */,
8347C7412796C58800FA8A7D /* NSFileHandle+CreateFile.h in Headers */, 8347C7412796C58800FA8A7D /* NSFileHandle+CreateFile.h in Headers */,
83B74281289E027F005AAC28 /* CogAudio-Bridging-Header.h in Headers */,
17C940230B900909008627D6 /* AudioMetadataReader.h in Headers */, 17C940230B900909008627D6 /* AudioMetadataReader.h in Headers */,
839E56F7287974A100DFB5F4 /* SandboxBroker.h in Headers */,
839065F32853338700636FBB /* dsd2float.h in Headers */,
17B619300B909BC300BC003F /* AudioPropertiesReader.h in Headers */, 17B619300B909BC300BC003F /* AudioPropertiesReader.h in Headers */,
83F18B5427D1E8F000385946 /* CDSPHBUpsampler.h in Headers */,
835EDD7D279FE307001EDCCE /* HeadphoneFilter.h in Headers */,
839366671815923C006DD712 /* CogPluginMulti.h in Headers */, 839366671815923C006DD712 /* CogPluginMulti.h in Headers */,
83F18B2227D1E8EF00385946 /* pf_neon_double.h in Headers */,
83F18B2527D1E8EF00385946 /* pf_scalar_double.h in Headers */,
83F18B5327D1E8F000385946 /* CDSPResampler.h in Headers */,
17ADB13C0B97926D00257CA2 /* AudioSource.h in Headers */, 17ADB13C0B97926D00257CA2 /* AudioSource.h in Headers */,
835C88B1279811A500E28EAE /* hdcd_decode2.h in Headers */, 835C88B1279811A500E28EAE /* hdcd_decode2.h in Headers */,
8EC1225F0B993BD500C5B3AD /* ConverterNode.h in Headers */,
8384912718080FF100E7332D /* Logging.h in Headers */, 8384912718080FF100E7332D /* Logging.h in Headers */,
8377C64E27B8C54400E8BC0F /* fft.h in Headers */, 83F18B5027D1E8F000385946 /* r8bconf.h in Headers */,
835FAC5E27BCA14D00BA8562 /* BadSampleCleaner.h in Headers */, 835FAC5E27BCA14D00BA8562 /* BadSampleCleaner.h in Headers */,
83F18B2327D1E8EF00385946 /* pf_sse2_double.h in Headers */,
8E8D3D2F0CBAEE6E00135C1B /* AudioContainer.h in Headers */, 8E8D3D2F0CBAEE6E00135C1B /* AudioContainer.h in Headers */,
83F18B3727D1E8EF00385946 /* r8bbase.h in Headers */,
B0575F2D0D687A0800411D77 /* Helper.h in Headers */, B0575F2D0D687A0800411D77 /* Helper.h in Headers */,
835C88AD2797DA5800E28EAE /* util.h in Headers */,
07DB5F3E0ED353A900C2E3EF /* AudioMetadataWriter.h in Headers */, 07DB5F3E0ED353A900C2E3EF /* AudioMetadataWriter.h in Headers */,
839E56EA28794F6300DFB5F4 /* HrtfTypes.h in Headers */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
/* End PBXHeadersBuildPhase section */ /* End PBXHeadersBuildPhase section */
/* Begin PBXNativeTarget section */ /* Begin PBXNativeTarget section */
8DC2EF4F0486A6940098B216 /* CogAudio */ = { 8DC2EF4F0486A6940098B216 /* CogAudio Framework */ = {
isa = PBXNativeTarget; isa = PBXNativeTarget;
buildConfigurationList = 1DEB91AD08733DA50010E9CD /* Build configuration list for PBXNativeTarget "CogAudio" */; buildConfigurationList = 1DEB91AD08733DA50010E9CD /* Build configuration list for PBXNativeTarget "CogAudio Framework" */;
buildPhases = ( buildPhases = (
17D21D2B0B8BE6A200D1EBDE /* CopyFiles */,
8DC2EF500486A6940098B216 /* Headers */, 8DC2EF500486A6940098B216 /* Headers */,
8DC2EF540486A6940098B216 /* Sources */, 8DC2EF540486A6940098B216 /* Sources */,
8DC2EF560486A6940098B216 /* Frameworks */, 8DC2EF560486A6940098B216 /* Frameworks */,
@ -680,7 +629,7 @@
); );
dependencies = ( dependencies = (
); );
name = CogAudio; name = "CogAudio Framework";
productInstallPath = "$(HOME)/Library/Frameworks"; productInstallPath = "$(HOME)/Library/Frameworks";
productName = CogAudio; productName = CogAudio;
productReference = 8DC2EF5B0486A6940098B216 /* CogAudio.framework */; productReference = 8DC2EF5B0486A6940098B216 /* CogAudio.framework */;
@ -692,11 +641,16 @@
0867D690FE84028FC02AAC07 /* Project object */ = { 0867D690FE84028FC02AAC07 /* Project object */ = {
isa = PBXProject; isa = PBXProject;
attributes = { attributes = {
BuildIndependentTargetsInParallel = YES; LastUpgradeCheck = 1250;
LastUpgradeCheck = 1620; TargetAttributes = {
8DC2EF4F0486A6940098B216 = {
DevelopmentTeam = "";
ProvisioningStyle = Automatic;
};
};
}; };
buildConfigurationList = 1DEB91B108733DA50010E9CD /* Build configuration list for PBXProject "CogAudio" */; buildConfigurationList = 1DEB91B108733DA50010E9CD /* Build configuration list for PBXProject "CogAudio" */;
compatibilityVersion = "Xcode 12.0"; compatibilityVersion = "Xcode 3.2";
developmentRegion = en; developmentRegion = en;
hasScannedForEncodings = 1; hasScannedForEncodings = 1;
knownRegions = ( knownRegions = (
@ -708,7 +662,7 @@
projectDirPath = ""; projectDirPath = "";
projectRoot = ""; projectRoot = "";
targets = ( targets = (
8DC2EF4F0486A6940098B216 /* CogAudio */, 8DC2EF4F0486A6940098B216 /* CogAudio Framework */,
); );
}; };
/* End PBXProject section */ /* End PBXProject section */
@ -718,6 +672,10 @@
isa = PBXResourcesBuildPhase; isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
83F18B3627D1E8EF00385946 /* LICENSE in Resources */,
835C88A92797D4D400E28EAE /* License.txt in Resources */,
835C88A82797D4D400E28EAE /* LICENSE.LGPL in Resources */,
83F18B4327D1E8EF00385946 /* README.md in Resources */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
@ -728,49 +686,39 @@
isa = PBXSourcesBuildPhase; isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
835EDD7B279FE23A001EDCCE /* HeadphoneFilter.mm in Sources */,
17D21CA20B8BE4BA00D1EBDE /* BufferChain.m in Sources */, 17D21CA20B8BE4BA00D1EBDE /* BufferChain.m in Sources */,
83A349772D5C50B20096D530 /* DSPHRTFNode.m in Sources */,
17D21CA60B8BE4BA00D1EBDE /* InputNode.m in Sources */, 17D21CA60B8BE4BA00D1EBDE /* InputNode.m in Sources */,
83A3496A2D5C3F430096D530 /* DSPRubberbandNode.m in Sources */,
8399CF2D27B5D1D5008751F1 /* NSDictionary+Merge.m in Sources */, 8399CF2D27B5D1D5008751F1 /* NSDictionary+Merge.m in Sources */,
83F843252D5C66E9008C123B /* DSPEqualizerNode.m in Sources */,
834A41AB287A90AB00EB9D9B /* channelmaps.cpp in Sources */,
833738EC2D5EA53500278628 /* DSPDownmixNode.m in Sources */,
833442432D6EFA6700C51D38 /* VisualizationController.m in Sources */,
831A50162865A8800049CFE4 /* rsstate.cpp in Sources */,
17D21CA80B8BE4BA00D1EBDE /* Node.m in Sources */, 17D21CA80B8BE4BA00D1EBDE /* Node.m in Sources */,
17D21CAA0B8BE4BA00D1EBDE /* OutputNode.m in Sources */, 17D21CAA0B8BE4BA00D1EBDE /* OutputNode.m in Sources */,
8377C65327B8CAD100E8BC0F /* VisualizationController.m in Sources */,
834FD4F527AFA2150063BC83 /* Downmix.m in Sources */,
17D21CC60B8BE4BA00D1EBDE /* OutputCoreAudio.m in Sources */,
835C88B2279811A500E28EAE /* hdcd_decode2.c in Sources */, 835C88B2279811A500E28EAE /* hdcd_decode2.c in Sources */,
835FAC5F27BCA14D00BA8562 /* BadSampleCleaner.m in Sources */, 835FAC5F27BCA14D00BA8562 /* BadSampleCleaner.m in Sources */,
834FD4ED27AF91220063BC83 /* AudioChunk.m in Sources */, 834FD4ED27AF91220063BC83 /* AudioChunk.m in Sources */,
833738EF2D5EA5B700278628 /* Downmix.m in Sources */, 83F18B5127D1E8F000385946 /* r8bbase.cpp in Sources */,
17D21CF40B8BE5EF00D1EBDE /* CogSemaphore.m in Sources */, 17D21CF40B8BE5EF00D1EBDE /* Semaphore.m in Sources */,
839E89A02D5DBA1700A13526 /* VisualizationNode.m in Sources */,
8347C7422796C58800FA8A7D /* NSFileHandle+CreateFile.m in Sources */, 8347C7422796C58800FA8A7D /* NSFileHandle+CreateFile.m in Sources */,
83A3496F2D5C405E0096D530 /* DSPFSurroundNode.m in Sources */,
17D21DC80B8BE79700D1EBDE /* CoreAudioUtils.m in Sources */, 17D21DC80B8BE79700D1EBDE /* CoreAudioUtils.m in Sources */,
83F18B2727D1E8EF00385946 /* pffft_double.c in Sources */,
8328995327CB511000D7F028 /* RedundantPlaylistDataStore.m in Sources */, 8328995327CB511000D7F028 /* RedundantPlaylistDataStore.m in Sources */,
8377C64C27B8C51500E8BC0F /* fft_accelerate.c in Sources */,
839366681815923C006DD712 /* CogPluginMulti.m in Sources */, 839366681815923C006DD712 /* CogPluginMulti.m in Sources */,
835C88AA2797D4D400E28EAE /* lpc.c in Sources */,
17D21EBE0B8BF44000D1EBDE /* AudioPlayer.m in Sources */, 17D21EBE0B8BF44000D1EBDE /* AudioPlayer.m in Sources */,
17F94DD60B8D0F7000A34E87 /* PluginController.mm in Sources */, 17F94DD60B8D0F7000A34E87 /* PluginController.mm in Sources */,
839E56E72879450300DFB5F4 /* HrtfData.cpp in Sources */, 83D44DC12839C60A00D4DD10 /* cqt.c in Sources */,
17A2D3C60B8D1D37000778C4 /* AudioDecoder.m in Sources */, 17A2D3C60B8D1D37000778C4 /* AudioDecoder.m in Sources */,
8328995827CB51B700D7F028 /* SHA256Digest.m in Sources */, 8328995827CB51B700D7F028 /* SHA256Digest.m in Sources */,
17C940240B900909008627D6 /* AudioMetadataReader.m in Sources */, 17C940240B900909008627D6 /* AudioMetadataReader.m in Sources */,
17B619310B909BC300BC003F /* AudioPropertiesReader.m in Sources */, 17B619310B909BC300BC003F /* AudioPropertiesReader.m in Sources */,
83F843212D5C6272008C123B /* HeadphoneFilter.mm in Sources */,
17ADB13D0B97926D00257CA2 /* AudioSource.m in Sources */, 17ADB13D0B97926D00257CA2 /* AudioSource.m in Sources */,
834FD4F127AF93680063BC83 /* ChunkList.m in Sources */, 834FD4F127AF93680063BC83 /* ChunkList.m in Sources */,
83FFED532D5B09320044CCAF /* DSPNode.m in Sources */, 83F18B4F27D1E8F000385946 /* CDSPHBUpsampler.inc in Sources */,
8EC122600B993BD500C5B3AD /* ConverterNode.m in Sources */, 8EC122600B993BD500C5B3AD /* ConverterNode.mm in Sources */,
835DD2672ACAF1D90057E319 /* OutputCoreAudio.m in Sources */,
83A349722D5C41810096D530 /* FSurroundFilter.mm in Sources */,
83CB56662E06464D003DD379 /* NSDictionary+Optional.m in Sources */,
8E8D3D300CBAEE6E00135C1B /* AudioContainer.m in Sources */, 8E8D3D300CBAEE6E00135C1B /* AudioContainer.m in Sources */,
B0575F300D687A4000411D77 /* Helper.m in Sources */, B0575F300D687A4000411D77 /* Helper.m in Sources */,
835DD2742ACAF5AD0057E319 /* lpc.c in Sources */,
834A41AA287A90AB00EB9D9B /* freesurround_decoder.cpp in Sources */,
07DB5F3F0ED353A900C2E3EF /* AudioMetadataWriter.m in Sources */, 07DB5F3F0ED353A900C2E3EF /* AudioMetadataWriter.m in Sources */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
@ -781,78 +729,67 @@
1DEB91AE08733DA50010E9CD /* Debug */ = { 1DEB91AE08733DA50010E9CD /* Debug */ = {
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
buildSettings = { buildSettings = {
CLANG_CXX_LANGUAGE_STANDARD = "c++17";
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = ""; CODE_SIGN_IDENTITY = "";
CODE_SIGN_STYLE = Automatic;
COMBINE_HIDPI_IMAGES = YES; COMBINE_HIDPI_IMAGES = YES;
COPY_PHASE_STRIP = NO; COPY_PHASE_STRIP = NO;
DEAD_CODE_STRIPPING = YES; DEVELOPMENT_TEAM = "";
DYLIB_COMPATIBILITY_VERSION = 1; DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1; DYLIB_CURRENT_VERSION = 1;
ENABLE_MODULE_VERIFIER = YES;
FRAMEWORK_VERSION = A; FRAMEWORK_VERSION = A;
GCC_DYNAMIC_NO_PIC = NO; GCC_DYNAMIC_NO_PIC = NO;
GCC_ENABLE_OBJC_EXCEPTIONS = YES; GCC_ENABLE_OBJC_EXCEPTIONS = YES;
GCC_MODEL_TUNING = G5;
GCC_OPTIMIZATION_LEVEL = 0; GCC_OPTIMIZATION_LEVEL = 0;
GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PRECOMPILE_PREFIX_HEADER = YES;
GCC_PREFIX_HEADER = CogAudio_Prefix.pch; GCC_PREFIX_HEADER = CogAudio_Prefix.pch;
GCC_PREPROCESSOR_DEFINITIONS = "DEBUG=1"; GCC_PREPROCESSOR_DEFINITIONS = (
HEADER_SEARCH_PATHS = ( "DEBUG=1",
../ThirdParty/soxr/include, "R8B_EXTFFT=1",
../ThirdParty/rubberband/include, "R8B_PFFFT_DOUBLE=1",
); );
INFOPLIST_FILE = Info.plist; INFOPLIST_FILE = Info.plist;
INSTALL_PATH = "@executable_path/../Frameworks"; INSTALL_PATH = "@executable_path/../Frameworks";
LD_RUNPATH_SEARCH_PATHS = "@loader_path/Frameworks"; LD_RUNPATH_SEARCH_PATHS = "@loader_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
../ThirdParty/soxr/lib,
../ThirdParty/rubberband/lib,
);
MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu17 c++17";
OTHER_LDFLAGS = ""; OTHER_LDFLAGS = "";
PRODUCT_BUNDLE_IDENTIFIER = org.cogx.cogaudio; PRODUCT_BUNDLE_IDENTIFIER = org.cogx.cogaudio;
PRODUCT_NAME = CogAudio; PRODUCT_NAME = CogAudio;
PROVISIONING_PROFILE_SPECIFIER = "";
SDKROOT = macosx;
SKIP_INSTALL = YES; SKIP_INSTALL = YES;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 5.0;
WARNING_LDFLAGS = ""; WARNING_LDFLAGS = "";
WRAPPER_EXTENSION = framework; WRAPPER_EXTENSION = framework;
ZERO_LINK = YES;
}; };
name = Debug; name = Debug;
}; };
1DEB91AF08733DA50010E9CD /* Release */ = { 1DEB91AF08733DA50010E9CD /* Release */ = {
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
buildSettings = { buildSettings = {
CLANG_CXX_LANGUAGE_STANDARD = "c++17";
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = ""; CODE_SIGN_IDENTITY = "";
CODE_SIGN_STYLE = Automatic;
COMBINE_HIDPI_IMAGES = YES; COMBINE_HIDPI_IMAGES = YES;
DEAD_CODE_STRIPPING = YES; DEVELOPMENT_TEAM = "";
DYLIB_COMPATIBILITY_VERSION = 1; DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1; DYLIB_CURRENT_VERSION = 1;
ENABLE_MODULE_VERIFIER = YES;
FRAMEWORK_VERSION = A; FRAMEWORK_VERSION = A;
GCC_ENABLE_OBJC_EXCEPTIONS = YES; GCC_ENABLE_OBJC_EXCEPTIONS = YES;
GCC_MODEL_TUNING = G5;
GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PRECOMPILE_PREFIX_HEADER = YES;
GCC_PREFIX_HEADER = CogAudio_Prefix.pch; GCC_PREFIX_HEADER = CogAudio_Prefix.pch;
GCC_PREPROCESSOR_DEFINITIONS = ""; GCC_PREPROCESSOR_DEFINITIONS = (
HEADER_SEARCH_PATHS = ( "R8B_EXTFFT=1",
../ThirdParty/soxr/include, "R8B_PFFFT_DOUBLE=1",
../ThirdParty/rubberband/include,
); );
INFOPLIST_FILE = Info.plist; INFOPLIST_FILE = Info.plist;
INSTALL_PATH = "@executable_path/../Frameworks"; INSTALL_PATH = "@executable_path/../Frameworks";
LD_RUNPATH_SEARCH_PATHS = "@loader_path/Frameworks"; LD_RUNPATH_SEARCH_PATHS = "@loader_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
../ThirdParty/soxr/lib,
../ThirdParty/rubberband/lib,
);
MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu17 c++17";
OTHER_LDFLAGS = ""; OTHER_LDFLAGS = "";
PRODUCT_BUNDLE_IDENTIFIER = org.cogx.cogaudio; PRODUCT_BUNDLE_IDENTIFIER = org.cogx.cogaudio;
PRODUCT_NAME = CogAudio; PRODUCT_NAME = CogAudio;
PROVISIONING_PROFILE_SPECIFIER = "";
SDKROOT = macosx;
SKIP_INSTALL = YES; SKIP_INSTALL = YES;
SWIFT_VERSION = 5.0;
WARNING_LDFLAGS = ""; WARNING_LDFLAGS = "";
WRAPPER_EXTENSION = framework; WRAPPER_EXTENSION = framework;
}; };
@ -863,7 +800,6 @@
buildSettings = { buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO; ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES; CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES; CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_BOOL_CONVERSION = YES;
@ -884,10 +820,8 @@
CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO; COPY_PHASE_STRIP = NO;
DEAD_CODE_STRIPPING = YES;
ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES; ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_NO_COMMON_BLOCKS = YES; GCC_NO_COMMON_BLOCKS = YES;
GCC_PREPROCESSOR_DEFINITIONS = ( GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1", "DEBUG=1",
@ -899,13 +833,9 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES;
GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES; GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.13; MACOSX_DEPLOYMENT_TARGET = 10.12;
ONLY_ACTIVE_ARCH = YES; ONLY_ACTIVE_ARCH = YES;
OTHER_CFLAGS = "-Wframe-larger-than=4000";
OTHER_CPLUSPLUSFLAGS = "-Wframe-larger-than=16000";
PRODUCT_MODULE_NAME = CogAudio;
SDKROOT = macosx; SDKROOT = macosx;
SWIFT_OBJC_BRIDGING_HEADER = "CogAudio-Bridging-Header.h";
SYMROOT = ../build; SYMROOT = ../build;
}; };
name = Debug; name = Debug;
@ -915,7 +845,6 @@
buildSettings = { buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO; ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES; CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES; CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_BOOL_CONVERSION = YES;
@ -935,10 +864,8 @@
CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
DEAD_CODE_STRIPPING = YES;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_NO_COMMON_BLOCKS = YES; GCC_NO_COMMON_BLOCKS = YES;
GCC_PREPROCESSOR_DEFINITIONS = "HAVE_CONFIG_H=1"; GCC_PREPROCESSOR_DEFINITIONS = "HAVE_CONFIG_H=1";
GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
@ -947,13 +874,8 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES;
GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES; GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.13; MACOSX_DEPLOYMENT_TARGET = 10.12;
OTHER_CFLAGS = "-Wframe-larger-than=4000";
OTHER_CPLUSPLUSFLAGS = "-Wframe-larger-than=16000";
PRODUCT_MODULE_NAME = CogAudio;
SDKROOT = macosx; SDKROOT = macosx;
SWIFT_COMPILATION_MODE = wholemodule;
SWIFT_OBJC_BRIDGING_HEADER = "CogAudio-Bridging-Header.h";
SYMROOT = ../build; SYMROOT = ../build;
}; };
name = Release; name = Release;
@ -961,7 +883,7 @@
/* End XCBuildConfiguration section */ /* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */ /* Begin XCConfigurationList section */
1DEB91AD08733DA50010E9CD /* Build configuration list for PBXNativeTarget "CogAudio" */ = { 1DEB91AD08733DA50010E9CD /* Build configuration list for PBXNativeTarget "CogAudio Framework" */ = {
isa = XCConfigurationList; isa = XCConfigurationList;
buildConfigurations = ( buildConfigurations = (
1DEB91AE08733DA50010E9CD /* Debug */, 1DEB91AE08733DA50010E9CD /* Debug */,

View file

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<Scheme <Scheme
LastUpgradeVersion = "1640" LastUpgradeVersion = "1250"
version = "1.3"> version = "1.3">
<BuildAction <BuildAction
parallelizeBuildables = "YES" parallelizeBuildables = "YES"
@ -16,7 +16,7 @@
BuildableIdentifier = "primary" BuildableIdentifier = "primary"
BlueprintIdentifier = "8DC2EF4F0486A6940098B216" BlueprintIdentifier = "8DC2EF4F0486A6940098B216"
BuildableName = "CogAudio.framework" BuildableName = "CogAudio.framework"
BlueprintName = "CogAudio" BlueprintName = "CogAudio Framework"
ReferencedContainer = "container:CogAudio.xcodeproj"> ReferencedContainer = "container:CogAudio.xcodeproj">
</BuildableReference> </BuildableReference>
</BuildActionEntry> </BuildActionEntry>
@ -45,7 +45,7 @@
BuildableIdentifier = "primary" BuildableIdentifier = "primary"
BlueprintIdentifier = "8DC2EF4F0486A6940098B216" BlueprintIdentifier = "8DC2EF4F0486A6940098B216"
BuildableName = "CogAudio.framework" BuildableName = "CogAudio.framework"
BlueprintName = "CogAudio" BlueprintName = "CogAudio Framework"
ReferencedContainer = "container:CogAudio.xcodeproj"> ReferencedContainer = "container:CogAudio.xcodeproj">
</BuildableReference> </BuildableReference>
</MacroExpansion> </MacroExpansion>
@ -61,7 +61,7 @@
BuildableIdentifier = "primary" BuildableIdentifier = "primary"
BlueprintIdentifier = "8DC2EF4F0486A6940098B216" BlueprintIdentifier = "8DC2EF4F0486A6940098B216"
BuildableName = "CogAudio.framework" BuildableName = "CogAudio.framework"
BlueprintName = "CogAudio" BlueprintName = "CogAudio Framework"
ReferencedContainer = "container:CogAudio.xcodeproj"> ReferencedContainer = "container:CogAudio.xcodeproj">
</BuildableReference> </BuildableReference>
</MacroExpansion> </MacroExpansion>

View file

@ -12,7 +12,6 @@
@interface CogDecoderMulti : NSObject <CogDecoder> { @interface CogDecoderMulti : NSObject <CogDecoder> {
NSArray *theDecoders; NSArray *theDecoders;
id<CogDecoder> theDecoder; id<CogDecoder> theDecoder;
BOOL observersAdded;
} }
- (id)initWithDecoders:(NSArray *)decoders; - (id)initWithDecoders:(NSArray *)decoders;
@ -23,7 +22,6 @@
} }
+ (NSArray *)urlsForContainerURL:(NSURL *)url containers:(NSArray *)containers; + (NSArray *)urlsForContainerURL:(NSURL *)url containers:(NSArray *)containers;
+ (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url containers:(NSArray *)containers;
@end @end

View file

@ -42,8 +42,6 @@ NSArray *sortClassesByPriority(NSArray *theClasses) {
@implementation CogDecoderMulti @implementation CogDecoderMulti
static void *kCogDecoderMultiContext = &kCogDecoderMultiContext;
+ (NSArray *)mimeTypes { + (NSArray *)mimeTypes {
return nil; return nil;
} }
@ -79,9 +77,9 @@ static void *kCogDecoderMultiContext = &kCogDecoderMultiContext;
return @{}; return @{};
} }
- (AudioChunk *)readAudio { - (int)readAudio:(void *)buffer frames:(UInt32)frames {
if(theDecoder != nil) return [theDecoder readAudio]; if(theDecoder != nil) return [theDecoder readAudio:buffer frames:frames];
return nil; return 0;
} }
- (BOOL)open:(id<CogSource>)source { - (BOOL)open:(id<CogSource>)source {
@ -112,31 +110,21 @@ static void *kCogDecoderMultiContext = &kCogDecoderMultiContext;
} }
} }
- (void)dealloc {
[self close];
}
- (void)registerObservers { - (void)registerObservers {
if(!observersAdded) {
[theDecoder addObserver:self [theDecoder addObserver:self
forKeyPath:@"properties" forKeyPath:@"properties"
options:(NSKeyValueObservingOptionNew) options:(NSKeyValueObservingOptionNew)
context:kCogDecoderMultiContext]; context:NULL];
[theDecoder addObserver:self [theDecoder addObserver:self
forKeyPath:@"metadata" forKeyPath:@"metadata"
options:(NSKeyValueObservingOptionNew) options:(NSKeyValueObservingOptionNew)
context:kCogDecoderMultiContext]; context:NULL];
observersAdded = YES;
}
} }
- (void)removeObservers { - (void)removeObservers {
if(observersAdded) { [theDecoder removeObserver:self forKeyPath:@"properties"];
observersAdded = NO; [theDecoder removeObserver:self forKeyPath:@"metadata"];
[theDecoder removeObserver:self forKeyPath:@"properties" context:kCogDecoderMultiContext];
[theDecoder removeObserver:self forKeyPath:@"metadata" context:kCogDecoderMultiContext];
}
} }
- (BOOL)setTrack:(NSURL *)track { - (BOOL)setTrack:(NSURL *)track {
@ -148,12 +136,8 @@ static void *kCogDecoderMultiContext = &kCogDecoderMultiContext;
ofObject:(id)object ofObject:(id)object
change:(NSDictionary *)change change:(NSDictionary *)change
context:(void *)context { context:(void *)context {
if(context == kCogDecoderMultiContext) {
[self willChangeValueForKey:keyPath]; [self willChangeValueForKey:keyPath];
[self didChangeValueForKey:keyPath]; [self didChangeValueForKey:keyPath];
} else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
} }
@end @end
@ -171,19 +155,6 @@ static void *kCogDecoderMultiContext = &kCogDecoderMultiContext;
return nil; return nil;
} }
+ (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url containers:(NSArray *)containers {
NSArray *sortedContainers = sortClassesByPriority(containers);
for(NSString *classString in sortedContainers) {
Class container = NSClassFromString(classString);
if([container respondsToSelector:@selector(dependencyUrlsForContainerURL:)]) {
NSArray *urls = [container dependencyUrlsForContainerURL:url];
if([urls count])
return urls;
}
}
return nil;
}
@end @end
@implementation CogMetadataReaderMulti @implementation CogMetadataReaderMulti

View file

@ -7,5 +7,5 @@
* *
*/ */
double logarithmicToLinear(const double logarithmic, double MAX_VOLUME); double logarithmicToLinear(double logarithmic, double MAX_VOLUME);
double linearToLogarithmic(const double linear, double MAX_VOLUME); double linearToLogarithmic(double linear, double MAX_VOLUME);

View file

@ -13,13 +13,13 @@
// These functions are helpers for the process of converting volume from a linear to logarithmic scale. // These functions are helpers for the process of converting volume from a linear to logarithmic scale.
// Numbers that goes in to audioPlayer should be logarithmic. Numbers that are displayed to the user should be linear. // Numbers that goes in to audioPlayer should be logarithmic. Numbers that are displayed to the user should be linear.
// Here's why: http://www.dr-lex.34sp.com/info-stuff/volumecontrols.html // Here's why: http://www.dr-lex.34sp.com/info-stuff/volumecontrols.html
// We are using the approximation of X^2 when volume is limited to 100% and X^4 when volume is limited to 800%. // We are using the approximation of X^4.
// Input/Output values are in percents. // Input/Output values are in percents.
double logarithmicToLinear(const double logarithmic, double MAX_VOLUME) { double logarithmicToLinear(double logarithmic, double MAX_VOLUME) {
return (MAX_VOLUME == 100.0) ? pow((logarithmic / MAX_VOLUME), 0.5) * 100.0 : pow((logarithmic / MAX_VOLUME), 0.25) * 100.0; return (MAX_VOLUME == 100.0) ? logarithmic : pow((logarithmic / MAX_VOLUME), 0.25) * 100.0;
} }
double linearToLogarithmic(const double linear, double MAX_VOLUME) { double linearToLogarithmic(double linear, double MAX_VOLUME) {
return (MAX_VOLUME == 100.0) ? (linear / 100.0) * (linear / 100.0) * MAX_VOLUME : (linear / 100.0) * (linear / 100.0) * (linear / 100.0) * (linear / 100.0) * MAX_VOLUME; return (MAX_VOLUME == 100.0) ? linear : (linear / 100.0) * (linear / 100.0) * (linear / 100.0) * (linear / 100.0) * MAX_VOLUME;
} }
// End helper volume function thingies. ONWARDS TO GLORY! // End helper volume function thingies. ONWARDS TO GLORY!

View file

@ -1,160 +0,0 @@
//
// OutputAVFoundation.h
// Cog
//
// Created by Christopher Snowhill on 6/23/22.
// Copyright 2022 Christopher Snowhill. All rights reserved.
//
#import <AssertMacros.h>
#import <Cocoa/Cocoa.h>
#import <AVFoundation/AVFoundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import <AudioUnit/AudioUnit.h>
#import <CoreAudio/AudioHardware.h>
#import <CoreAudio/CoreAudioTypes.h>
#ifdef __cplusplus
#import <atomic>
using std::atomic_long;
#else
#import <stdatomic.h>
#endif
#import "Downmix.h"
#import <CogAudio/CogAudio-Swift.h>
#import "HeadphoneFilter.h"
//#define OUTPUT_LOG
#ifdef OUTPUT_LOG
#import <stdio.h>
#endif
@class OutputNode;
@class FSurroundFilter;
@interface OutputAVFoundation : NSObject {
OutputNode *outputController;
BOOL rsDone;
void *rsstate, *rsold;
double lastClippedSampleRate;
void *rsvis;
double lastVisRate;
BOOL stopInvoked;
BOOL stopCompleted;
BOOL running;
BOOL stopping;
BOOL stopped;
BOOL started;
BOOL paused;
BOOL restarted;
BOOL commandStop;
BOOL eqEnabled;
BOOL eqInitialized;
BOOL streamFormatStarted;
BOOL streamFormatChanged;
double secondsHdcdSustained;
BOOL defaultdevicelistenerapplied;
BOOL currentdevicelistenerapplied;
BOOL devicealivelistenerapplied;
BOOL observersapplied;
BOOL outputdevicechanged;
float volume;
float eqPreamp;
AudioDeviceID outputDeviceID;
AudioStreamBasicDescription realStreamFormat; // stream format pre-hrtf
AudioStreamBasicDescription streamFormat; // stream format last seen in render callback
AudioStreamBasicDescription realNewFormat; // in case of resampler flush
AudioStreamBasicDescription newFormat; // in case of resampler flush
AudioStreamBasicDescription visFormat; // Mono format for vis
uint32_t realStreamChannelConfig;
uint32_t streamChannelConfig;
uint32_t realNewChannelConfig;
uint32_t newChannelConfig;
AVSampleBufferAudioRenderer *audioRenderer;
AVSampleBufferRenderSynchronizer *renderSynchronizer;
CMAudioFormatDescriptionRef audioFormatDescription;
id currentPtsObserver;
NSLock *currentPtsLock;
CMTime currentPts, lastPts;
double secondsLatency;
CMTime outputPts, trackPts, lastCheckpointPts;
AudioTimeStamp timeStamp;
size_t _bufferSize;
AudioUnit _eq;
DownmixProcessor *downmixerForVis;
VisualizationController *visController;
BOOL enableHrtf;
HeadphoneFilter *hrtf;
BOOL enableFSurround;
BOOL FSurroundDelayRemoved;
int inputBufferLastTime;
FSurroundFilter *fsurround;
BOOL resetStreamFormat;
BOOL shouldPlayOutBuffer;
float *samplePtr;
float tempBuffer[512 * 32];
float rsTempBuffer[4096 * 32];
float inputBuffer[4096 * 32]; // 4096 samples times maximum supported channel count
float fsurroundBuffer[8192 * 6];
float hrtfBuffer[4096 * 2];
float eqBuffer[4096 * 32];
float visAudio[4096];
float visTemp[8192];
#ifdef OUTPUT_LOG
FILE *_logFile;
#endif
}
- (id)initWithController:(OutputNode *)c;
- (BOOL)setup;
- (OSStatus)setOutputDeviceByID:(AudioDeviceID)deviceID;
- (BOOL)setOutputDeviceWithDeviceDict:(NSDictionary *)deviceDict;
- (void)start;
- (void)pause;
- (void)resume;
- (void)stop;
- (double)latency;
- (void)setVolume:(double)v;
- (void)setEqualizerEnabled:(BOOL)enabled;
- (void)setShouldPlayOutBuffer:(BOOL)enabled;
- (void)sustainHDCD;
@end

File diff suppressed because it is too large Load diff

View file

@ -2,8 +2,8 @@
// OutputCoreAudio.h // OutputCoreAudio.h
// Cog // Cog
// //
// Created by Christopher Snowhill on 7/25/23. // Created by Vincent Spader on 8/2/05.
// Copyright 2023-2024 Christopher Snowhill. All rights reserved. // Copyright 2005 Vincent Spader. All rights reserved.
// //
#import <AssertMacros.h> #import <AssertMacros.h>
@ -15,64 +15,45 @@
#import <CoreAudio/AudioHardware.h> #import <CoreAudio/AudioHardware.h>
#import <CoreAudio/CoreAudioTypes.h> #import <CoreAudio/CoreAudioTypes.h>
#ifdef __cplusplus
#import <atomic>
using std::atomic_long;
#else
#import <stdatomic.h> #import <stdatomic.h>
#endif
#import <simd/simd.h> #import "Downmix.h"
#import <CogAudio/ChunkList.h> #import "VisualizationController.h"
#import <CogAudio/HeadphoneFilter.h>
#import "Semaphore.h"
//#define OUTPUT_LOG //#define OUTPUT_LOG
#ifdef OUTPUT_LOG
#import <stdio.h>
#endif
@class OutputNode; @class OutputNode;
@class AudioChunk;
@interface OutputCoreAudio : NSObject { @interface OutputCoreAudio : NSObject {
OutputNode *outputController; OutputNode *outputController;
dispatch_semaphore_t writeSemaphore; Semaphore *writeSemaphore;
dispatch_semaphore_t readSemaphore; Semaphore *readSemaphore;
NSLock *outputLock;
double streamTimestamp;
BOOL stopInvoked; BOOL stopInvoked;
BOOL stopCompleted;
BOOL running; BOOL running;
BOOL stopping; BOOL stopping;
BOOL stopped; BOOL stopped;
BOOL started; BOOL started;
BOOL paused; BOOL paused;
BOOL stopNext;
BOOL restarted; BOOL restarted;
BOOL commandStop;
BOOL resetting;
BOOL cutOffInput;
BOOL fading, faded;
float fadeLevel;
float fadeStep;
float fadeTarget;
BOOL eqEnabled; BOOL eqEnabled;
BOOL eqInitialized;
BOOL streamFormatStarted; BOOL streamFormatStarted;
BOOL streamFormatChanged;
double secondsHdcdSustained; atomic_long bytesRendered;
atomic_long bytesHdcdSustained;
BOOL defaultdevicelistenerapplied; BOOL listenerapplied;
BOOL currentdevicelistenerapplied;
BOOL devicealivelistenerapplied;
BOOL observersapplied; BOOL observersapplied;
BOOL outputdevicechanged;
float volume; float volume;
float eqPreamp; float eqPreamp;
@ -80,53 +61,43 @@ using std::atomic_long;
AVAudioFormat *_deviceFormat; AVAudioFormat *_deviceFormat;
AudioDeviceID outputDeviceID; AudioDeviceID outputDeviceID;
AudioStreamBasicDescription deviceFormat; AudioStreamBasicDescription deviceFormat; // info about the default device
AudioStreamBasicDescription realStreamFormat; // stream format pre-hrtf
AudioStreamBasicDescription streamFormat; // stream format last seen in render callback AudioStreamBasicDescription streamFormat; // stream format last seen in render callback
AudioStreamBasicDescription visFormat; // Mono format for vis
uint32_t deviceChannelConfig; uint32_t deviceChannelConfig;
uint32_t realStreamChannelConfig;
uint32_t streamChannelConfig; uint32_t streamChannelConfig;
AUAudioUnit *_au; AUAudioUnit *_au;
size_t _bufferSize; size_t _bufferSize;
BOOL resetStreamFormat; AudioUnit _eq;
BOOL shouldPlayOutBuffer; DownmixProcessor *downmixer;
DownmixProcessor *downmixerForVis;
ChunkList *outputBuffer; VisualizationController *visController;
#ifdef OUTPUT_LOG #ifdef OUTPUT_LOG
NSFileHandle *_logFile; FILE *_logFile;
#endif #endif
} }
- (id)initWithController:(OutputNode *)c; - (id)initWithController:(OutputNode *)c;
- (BOOL)setup; - (BOOL)setup;
- (OSStatus)setOutputDeviceByID:(int)deviceID; - (OSStatus)setOutputDeviceByID:(AudioDeviceID)deviceID;
- (BOOL)setOutputDeviceWithDeviceDict:(NSDictionary *)deviceDict; - (BOOL)setOutputDeviceWithDeviceDict:(NSDictionary *)deviceDict;
- (void)start; - (void)start;
- (void)pause; - (void)pause;
- (void)resume; - (void)resume;
- (void)stop; - (void)stop;
- (void)fadeOut;
- (void)fadeOutBackground;
- (void)fadeIn;
- (double)latency;
- (double)volume;
- (void)setVolume:(double)v; - (void)setVolume:(double)v;
- (void)setShouldPlayOutBuffer:(BOOL)enabled; - (void)setEqualizerEnabled:(BOOL)enabled;
- (void)sustainHDCD; - (void)sustainHDCD;
- (AudioStreamBasicDescription)deviceFormat;
- (uint32_t)deviceChannelConfig;
@end @end

File diff suppressed because it is too large Load diff

View file

@ -1,11 +1,5 @@
// Plugins! HOORAY! // Plugins! HOORAY!
#if __has_include(<CogAudio/AudioChunk.h>)
# import <CogAudio/AudioChunk.h>
#else
# import "AudioChunk.h"
#endif
@protocol CogSource <NSObject> @protocol CogSource <NSObject>
+ (NSArray *)schemes; // http, file, etc + (NSArray *)schemes; // http, file, etc
@ -31,9 +25,6 @@
+ (float)priority; + (float)priority;
+ (NSArray *)urlsForContainerURL:(NSURL *)url; + (NSArray *)urlsForContainerURL:(NSURL *)url;
@optional
+ (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url;
@end @end
@protocol CogDecoder <NSObject> @protocol CogDecoder <NSObject>
@ -48,7 +39,7 @@
- (NSDictionary *)properties; - (NSDictionary *)properties;
- (NSDictionary *)metadata; // Only to be implemented for dynamic metadata, send events on change - (NSDictionary *)metadata; // Only to be implemented for dynamic metadata, send events on change
- (AudioChunk *)readAudio; - (int)readAudio:(void *)buffer frames:(UInt32)frames;
- (BOOL)open:(id<CogSource>)source; - (BOOL)open:(id<CogSource>)source;
- (long)seek:(long)frame; - (long)seek:(long)frame;
@ -62,7 +53,6 @@
// These are in NSObject, so as long as you are a subclass of that, you are ok. // These are in NSObject, so as long as you are a subclass of that, you are ok.
- (void)addObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath options:(NSKeyValueObservingOptions)options context:(void *)context; - (void)addObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath options:(NSKeyValueObservingOptions)options context:(void *)context;
- (void)removeObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath; - (void)removeObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath;
- (void)removeObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath context:(void *)context;
- (BOOL)isSilence; - (BOOL)isSilence;
@end @end
@ -102,18 +92,9 @@
- (id<CogSource>)audioSourceForURL:(NSURL *)url; - (id<CogSource>)audioSourceForURL:(NSURL *)url;
- (NSArray *)urlsForContainerURL:(NSURL *)url; - (NSArray *)urlsForContainerURL:(NSURL *)url;
- (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url;
- (NSDictionary *)metadataForURL:(NSURL *)url skipCue:(BOOL)skip; - (NSDictionary *)metadataForURL:(NSURL *)url skipCue:(BOOL)skip;
- (NSDictionary *)propertiesForURL:(NSURL *)url skipCue:(BOOL)skip; - (NSDictionary *)propertiesForURL:(NSURL *)url skipCue:(BOOL)skip;
- (id<CogDecoder>)audioDecoderForSource:(id<CogSource>)source skipCue:(BOOL)skip; - (id<CogDecoder>)audioDecoderForSource:(id<CogSource>)source skipCue:(BOOL)skip;
- (int)putMetadataInURL:(NSURL *)url; - (int)putMetadataInURL:(NSURL *)url;
@end @end
#ifdef __cplusplus
extern "C" {
#endif
extern NSString *guess_encoding_of_string(const char *input);
#ifdef __cplusplus
}
#endif

View file

@ -2,7 +2,7 @@
#import <Cocoa/Cocoa.h> #import <Cocoa/Cocoa.h>
#import <CogAudio/Plugin.h> #import "Plugin.h"
// Singletonish // Singletonish
@interface PluginController : NSObject <CogPluginController> { @interface PluginController : NSObject <CogPluginController> {

View file

@ -31,7 +31,6 @@ static std::map<std::string, Cached_Metadata> Cache_List;
static RedundantPlaylistDataStore *Cache_Data_Store = nil; static RedundantPlaylistDataStore *Cache_Data_Store = nil;
static bool Cache_Running = false; static bool Cache_Running = false;
static bool Cache_Stopped = false;
static std::thread *Cache_Thread = NULL; static std::thread *Cache_Thread = NULL;
@ -45,15 +44,11 @@ static void cache_init() {
static void cache_deinit() { static void cache_deinit() {
Cache_Running = false; Cache_Running = false;
Cache_Thread->join(); Cache_Thread->join();
while(!Cache_Stopped)
usleep(500);
delete Cache_Thread; delete Cache_Thread;
Cache_Data_Store = nil; Cache_Data_Store = nil;
} }
static void cache_insert_properties(NSURL *url, NSDictionary *properties) { static void cache_insert_properties(NSURL *url, NSDictionary *properties) {
if(properties == nil) return;
std::lock_guard<std::mutex> lock(Cache_Lock); std::lock_guard<std::mutex> lock(Cache_Lock);
std::string path = [[url absoluteString] UTF8String]; std::string path = [[url absoluteString] UTF8String];
@ -66,8 +61,6 @@ static void cache_insert_properties(NSURL *url, NSDictionary *properties) {
} }
static void cache_insert_metadata(NSURL *url, NSDictionary *metadata) { static void cache_insert_metadata(NSURL *url, NSDictionary *metadata) {
if(metadata == nil) return;
std::lock_guard<std::mutex> lock(Cache_Lock); std::lock_guard<std::mutex> lock(Cache_Lock);
std::string path = [[url absoluteString] UTF8String]; std::string path = [[url absoluteString] UTF8String];
@ -112,16 +105,11 @@ static NSDictionary *cache_access_metadata(NSURL *url) {
static void cache_run() { static void cache_run() {
std::chrono::milliseconds dura(250); std::chrono::milliseconds dura(250);
Cache_Running = true;
while(Cache_Running) { while(Cache_Running) {
std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now();
@autoreleasepool { @autoreleasepool {
std::lock_guard<std::mutex> lock(Cache_Lock); std::lock_guard<std::mutex> lock(Cache_Lock);
size_t cacheListOriginalSize = Cache_List.size();
for(auto it = Cache_List.begin(); it != Cache_List.end();) { for(auto it = Cache_List.begin(); it != Cache_List.end();) {
auto elapsed = std::chrono::duration_cast<std::chrono::seconds>(now - it->second.time_accessed); auto elapsed = std::chrono::duration_cast<std::chrono::seconds>(now - it->second.time_accessed);
if(elapsed.count() >= 10) { if(elapsed.count() >= 10) {
@ -131,15 +119,12 @@ static void cache_run() {
++it; ++it;
} }
if(cacheListOriginalSize && Cache_List.size() == 0) { if(Cache_List.size() == 0)
[Cache_Data_Store reset]; [Cache_Data_Store reset];
} }
}
std::this_thread::sleep_for(dura); std::this_thread::sleep_for(dura);
} }
Cache_Stopped = true;
} }
@implementation PluginController @implementation PluginController
@ -201,9 +186,6 @@ static PluginController *sharedPluginController = nil;
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(bundleDidLoad:) name:NSBundleDidLoadNotification object:nil]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(bundleDidLoad:) name:NSBundleDidLoadNotification object:nil];
[self loadPlugins]; [self loadPlugins];
[[NSNotificationCenter defaultCenter] removeObserver:self name:NSBundleDidLoadNotification object:nil];
[self printPluginInfo]; [self printPluginInfo];
} }
} }
@ -362,16 +344,6 @@ static PluginController *sharedPluginController = nil;
} }
} }
static NSString *xmlEscapeString(NSString * string) {
CFStringRef textXML = CFXMLCreateStringByEscapingEntities(kCFAllocatorDefault, (CFStringRef)string, nil);
if(textXML) {
NSString *textString = (__bridge NSString *)textXML;
CFRelease(textXML);
return textString;
}
return @"";
}
- (void)printPluginInfo { - (void)printPluginInfo {
ALog(@"Sources: %@", self.sources); ALog(@"Sources: %@", self.sources);
ALog(@"Containers: %@", self.containers); ALog(@"Containers: %@", self.containers);
@ -389,10 +361,8 @@ static NSString *xmlEscapeString(NSString * string) {
<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n\ <!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n\
<plist version=\"1.0\">\n\ <plist version=\"1.0\">\n\
<dict>\n\ <dict>\n\
\t<key>SUEnableInstallerLauncherService</key>\n\
\t<true/>\n\
\t<key>CFBundleDevelopmentRegion</key>\n\ \t<key>CFBundleDevelopmentRegion</key>\n\
\t<string>en_US</string>\n\ \t<string>English</string>\n\
\t<key>CFBundleDocumentTypes</key>\n\ \t<key>CFBundleDocumentTypes</key>\n\
\t<array>\n\ \t<array>\n\
\t\t<dict>\n\ \t\t<dict>\n\
@ -404,8 +374,6 @@ static NSString *xmlEscapeString(NSString * string) {
\t\t\t<string>song.icns</string>\n\ \t\t\t<string>song.icns</string>\n\
\t\t\t<key>CFBundleTypeIconSystemGenerated</key>\n\ \t\t\t<key>CFBundleTypeIconSystemGenerated</key>\n\
\t\t\t<integer>1</integer>\n\ \t\t\t<integer>1</integer>\n\
\t\t\t<key>CFBundleTypeName</key>\n\
\t\t\t<string>Folder</string>\n\
\t\t\t<key>CFBundleTypeOSTypes</key>\n\ \t\t\t<key>CFBundleTypeOSTypes</key>\n\
\t\t\t<array>\n\ \t\t\t<array>\n\
\t\t\t\t<string>****</string>\n\ \t\t\t\t<string>****</string>\n\
@ -420,16 +388,14 @@ static NSString *xmlEscapeString(NSString * string) {
NSString * plistFooter = @"\t</array>\n\ NSString * plistFooter = @"\t</array>\n\
\t<key>CFBundleExecutable</key>\n\ \t<key>CFBundleExecutable</key>\n\
\t<string>Cog</string>\n\ \t<string>Cog</string>\n\
\t<key>CFBundleHelpBookFolder</key>\n\
\t<string>Cog.help</string>\n\
\t<key>CFBundleHelpBookName</key>\n\ \t<key>CFBundleHelpBookName</key>\n\
\t<string>org.cogx.cog.help</string>\n\ \t<string>org.cogx.cog.help</string>\n\
\t<key>CFBundleIdentifier</key>\n\ \t<key>CFBundleIdentifier</key>\n\
\t<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>\n\ \t<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>\n\
\t<key>CFBundleInfoDictionaryVersion</key>\n\ \t<key>CFBundleInfoDictionaryVersion</key>\n\
\t<string>6.0</string>\n\ \t<string>6.0</string>\n\
\t<key>CFBundleName</key>\n\
\t<string>$(PRODUCT_NAME)</string>\n\
\t<key>CFBundleDisplayName</key>\n\
\t<string>$(PRODUCT_NAME)</string>\n\
\t<key>CFBundlePackageType</key>\n\ \t<key>CFBundlePackageType</key>\n\
\t<string>APPL</string>\n\ \t<string>APPL</string>\n\
\t<key>CFBundleShortVersionString</key>\n\ \t<key>CFBundleShortVersionString</key>\n\
@ -467,22 +433,12 @@ static NSString *xmlEscapeString(NSString * string) {
\t<string>MediaKeysApplication</string>\n\ \t<string>MediaKeysApplication</string>\n\
\t<key>NSRemindersUsageDescription</key>\n\ \t<key>NSRemindersUsageDescription</key>\n\
\t<string>Cog has no use for your reminders. Why are you trying to access them with an audio player?</string>\n\ \t<string>Cog has no use for your reminders. Why are you trying to access them with an audio player?</string>\n\
\t<key>NSDownloadsFolderUsageDescription</key>\n\
\t<string>We may request related audio files from this folder for playback purposes. We will only play back files you specifically add, unless you enable the option to add an entire folder. Granting permission either for individual files or for parent folders ensures their contents will remain playable in future sessions.</string>\n\
\t<key>NSDocumentsFolderUsageDescription</key>\n\
\t<string>We may request related audio files from this folder for playback purposes. We will only play back files you specifically add, unless you enable the option to add an entire folder. Granting permission either for individual files or for parent folders ensures their contents will remain playable in future sessions.</string>\n\
\t<key>NSDesktopFolderUsageDescription</key>\n\
\t<string>We may request related audio files from this folder for playback purposes. We will only play back files you specifically add, unless you enable the option to add an entire folder. Granting permission either for individual files or for parent folders ensures their contents will remain playable in future sessions.</string>\n\
\t<key>NSMotionUsageDescription</key>\n\
\t<string>Cog optionally supports motion tracking headphones for head tracked positional audio, using its own low latency positioning model.</string>\n\
\t<key>OSAScriptingDefinition</key>\n\ \t<key>OSAScriptingDefinition</key>\n\
\t<string>Cog.sdef</string>\n\ \t<string>Cog.sdef</string>\n\
\t<key>SUFeedURL</key>\n\ \t<key>SUFeedURL</key>\n\
\t<string>https://cogcdn.cog.losno.co/mercury.xml</string>\n\ \t<string>https://cogcdn.cog.losno.co/mercury.xml</string>\n\
\t<key>SUPublicEDKey</key>\n\ \t<key>SUPublicEDKey</key>\n\
\t<string>omxG7Rp0XK9/YEvKbVy7cd44eVAh1LJB6CmjQwjOJz4=</string>\n\ \t<string>omxG7Rp0XK9/YEvKbVy7cd44eVAh1LJB6CmjQwjOJz4=</string>\n\
\t<key>ITSAppUsesNonExemptEncryption</key>\n\
\t<false/>\n\
</dict>\n\ </dict>\n\
</plist>\n"; </plist>\n";
NSMutableArray * decodersRegistered = [[NSMutableArray alloc] init]; NSMutableArray * decodersRegistered = [[NSMutableArray alloc] init];
@ -540,7 +496,7 @@ static NSString *xmlEscapeString(NSString * string) {
\t\t\t<integer>1</integer>\n\ \t\t\t<integer>1</integer>\n\
\t\t\t<key>CFBundleTypeName</key>\n\ \t\t\t<key>CFBundleTypeName</key>\n\
\t\t\t<string>"]; \t\t\t<string>"];
[stringList addObject:xmlEscapeString([type objectAtIndex:0])]; [stringList addObject:[type objectAtIndex:0]];
[stringList addObject:@"</string>\n\ [stringList addObject:@"</string>\n\
\t\t\t<key>CFBundleTypeRole</key>\n\ \t\t\t<key>CFBundleTypeRole</key>\n\
\t\t\t<string>Viewer</string>\n\ \t\t\t<string>Viewer</string>\n\
@ -553,7 +509,7 @@ static NSString *xmlEscapeString(NSString * string) {
[stringList addObject:plistFooter]; [stringList addObject:plistFooter];
NSFileHandle *fileHandle = [NSFileHandle fileHandleForWritingAtPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"Cog_Info.plist"] createFile:YES]; NSFileHandle *fileHandle = [NSFileHandle fileHandleForWritingAtPath:@"/tmp/Cog_Info.plist" createFile:YES];
if (!fileHandle) { if (!fileHandle) {
DLog(@"Error saving Info.plist!"); DLog(@"Error saving Info.plist!");
return; return;
@ -591,29 +547,6 @@ static NSString *xmlEscapeString(NSString * string) {
return [container urlsForContainerURL:url]; return [container urlsForContainerURL:url];
} }
- (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url {
NSString *ext = [url pathExtension];
NSArray *containerSet = [containers objectForKey:[ext lowercaseString]];
NSString *classString;
if(containerSet) {
if([containerSet count] > 1) {
return [CogContainerMulti dependencyUrlsForContainerURL:url containers:containerSet];
} else {
classString = [containerSet objectAtIndex:0];
}
} else {
return nil;
}
Class container = NSClassFromString(classString);
if([container respondsToSelector:@selector(dependencyUrlsForContainerURL:)]) {
return [container dependencyUrlsForContainerURL:url];
} else {
return nil;
}
}
// Note: Source is assumed to already be opened. // Note: Source is assumed to already be opened.
- (id<CogDecoder>)audioDecoderForSource:(id<CogSource>)source skipCue:(BOOL)skip { - (id<CogDecoder>)audioDecoderForSource:(id<CogSource>)source skipCue:(BOOL)skip {
NSString *ext = [[source url] pathExtension]; NSString *ext = [[source url] pathExtension];
@ -648,28 +581,11 @@ static NSString *xmlEscapeString(NSString * string) {
} }
} }
if(skip && [classString isEqualToString:@"CueSheetDecoder"]) {
classString = @"SilenceDecoder";
}
Class decoder = NSClassFromString(classString); Class decoder = NSClassFromString(classString);
return [[decoder alloc] init]; return [[decoder alloc] init];
} }
+ (BOOL)isCoverFile:(NSString *)fileName {
for(NSString *coverFileName in [PluginController coverNames]) {
if([[[[fileName lastPathComponent] stringByDeletingPathExtension] lowercaseString] hasSuffix:coverFileName]) {
return true;
}
}
return false;
}
+ (NSArray *)coverNames {
return @[@"cover", @"folder", @"album", @"front"];
}
- (NSDictionary *)metadataForURL:(NSURL *)url skipCue:(BOOL)skip { - (NSDictionary *)metadataForURL:(NSURL *)url skipCue:(BOOL)skip {
NSString *urlScheme = [url scheme]; NSString *urlScheme = [url scheme];
if([urlScheme isEqualToString:@"http"] || if([urlScheme isEqualToString:@"http"] ||
@ -679,7 +595,6 @@ static NSString *xmlEscapeString(NSString * string) {
NSDictionary *cacheData = cache_access_metadata(url); NSDictionary *cacheData = cache_access_metadata(url);
if(cacheData) return cacheData; if(cacheData) return cacheData;
do {
NSString *ext = [url pathExtension]; NSString *ext = [url pathExtension];
NSArray *readers = [metadataReaders objectForKey:[ext lowercaseString]]; NSArray *readers = [metadataReaders objectForKey:[ext lowercaseString]];
NSString *classString; NSString *classString;
@ -694,61 +609,22 @@ static NSString *xmlEscapeString(NSString * string) {
++i; ++i;
} }
cacheData = [CogMetadataReaderMulti metadataForURL:url readers:_readers]; cacheData = [CogMetadataReaderMulti metadataForURL:url readers:_readers];
break; cache_insert_metadata(url, cacheData);
return cacheData;
} }
cacheData = [CogMetadataReaderMulti metadataForURL:url readers:readers]; cacheData = [CogMetadataReaderMulti metadataForURL:url readers:readers];
break; cache_insert_metadata(url, cacheData);
return cacheData;
} else { } else {
classString = [readers objectAtIndex:0]; classString = [readers objectAtIndex:0];
} }
} else { } else {
cacheData = nil; return nil;
break;
}
if(skip && [classString isEqualToString:@"CueSheetMetadataReader"]) {
cacheData = nil;
break;
} }
Class metadataReader = NSClassFromString(classString); Class metadataReader = NSClassFromString(classString);
cacheData = [metadataReader metadataForURL:url]; cacheData = [metadataReader metadataForURL:url];
} while(0);
if(cacheData == nil) {
cacheData = [NSDictionary dictionary];
}
if(cacheData) {
NSData *image = [cacheData objectForKey:@"albumArt"];
if(nil == image) {
// Try to load image from external file
NSString *path = [[url path] stringByDeletingLastPathComponent];
// Gather list of candidate image files
NSArray *fileNames = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:path error:nil];
NSArray *types = @[@"jpg", @"jpeg", @"png", @"gif", @"webp", @"avif", @"heic"];
NSArray *imageFileNames = [fileNames pathsMatchingExtensions:types];
for(NSString *fileName in imageFileNames) {
if([PluginController isCoverFile:fileName]) {
image = [NSData dataWithContentsOfFile:[path stringByAppendingPathComponent:fileName]];
break;
}
}
if(image) {
NSMutableDictionary *data = [cacheData mutableCopy];
[data setValue:image forKey:@"albumArt"];
cacheData = data;
}
}
}
cache_insert_metadata(url, cacheData); cache_insert_metadata(url, cacheData);
return cacheData; return cacheData;
} }
@ -830,24 +706,3 @@ static NSString *xmlEscapeString(NSString * string) {
} }
@end @end
NSString *guess_encoding_of_string(const char *input) {
NSString *ret = @"";
if(input && *input) {
@try {
ret = [NSString stringWithUTF8String:input];
}
@catch(NSException *e) {
ret = nil;
}
if(!ret) {
// This method is incredibly slow
NSData *stringData = [NSData dataWithBytes:input length:strlen(input)];
[NSString stringEncodingForData:stringData encodingOptions:nil convertedString:&ret usedLossyConversion:nil];
if(!ret) {
ret = @"";
}
}
}
return ret;
}

View file

@ -56,7 +56,7 @@ AudioStreamBasicDescription propertiesToASBD(NSDictionary *properties)
asbd.mFormatFlags |= kLinearPCMFormatFlagIsAlignedHigh; asbd.mFormatFlags |= kLinearPCMFormatFlagIsAlignedHigh;
} }
if(isFloat == NO && [[properties objectForKey:@"unSigned"] boolValue] == NO) { if (isFloat == NO && [[properties objectForKey:@"Unsigned"] boolValue] == NO) {
asbd.mFormatFlags |= kLinearPCMFormatFlagIsSignedInteger; asbd.mFormatFlags |= kLinearPCMFormatFlagIsSignedInteger;
} }

View file

@ -1,40 +0,0 @@
/*
DeaDBeeF -- the music player
Copyright (C) 2009-2021 Alexey Yakovenko and other contributors
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
*/
#ifndef FFT_H
#define FFT_H
#ifdef __cplusplus
extern "C" {
}
#endif
void fft_calculate(const float *data, float *freq, int fft_size);
void fft_free(void);
#ifdef __cplusplus
}
#endif
#endif

View file

@ -1,139 +0,0 @@
/*
DeaDBeeF -- the music player
Copyright (C) 2009-2021 Alexey Yakovenko and other contributors
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
*/
#include "fft.h"
#include <Accelerate/Accelerate.h>
// Some newer spectrum calculation methodology, adapted but not copied wholesale
// Mostly about a dozen or two lines of Cocoa and vDSP code
// AudioSpectrum: A sample app using Audio Unit and vDSP
// By Keijiro Takahashi, 2013, 2014
// https://github.com/keijiro/AudioSpectrum
struct SpectrumData
{
unsigned long length;
Float32 data[0];
};
static int _fft_size = 0;
static vDSP_DFT_Setup _dftSetup = NULL;
static DSPSplitComplex _dftBuffer = {0};
static Float32 *_window = NULL;
static struct SpectrumData *_rawSpectrum = NULL;
// Apparently _mm_malloc is Intel-only on newer macOS targets, so use supported posix_memalign
// malloc() is allegedly aligned on macOS, but I don't know for sure
static void *_memalign_calloc(size_t count, size_t size, size_t align) {
size *= count;
void *ret = NULL;
if(posix_memalign(&ret, align, size) != 0) {
return NULL;
}
bzero(ret, size);
return ret;
}
static void
_init_buffers(int fft_size) {
if(fft_size != _fft_size) {
fft_free();
_dftSetup = vDSP_DFT_zrop_CreateSetup(NULL, fft_size * 2, vDSP_DFT_FORWARD);
if(!_dftSetup) return;
_dftBuffer.realp = _memalign_calloc(fft_size, sizeof(Float32), 16);
_dftBuffer.imagp = _memalign_calloc(fft_size, sizeof(Float32), 16);
if(!_dftBuffer.realp || !_dftBuffer.imagp) return;
_window = _memalign_calloc(fft_size * 2, sizeof(Float32), 16);
if(!_window) return;
vDSP_blkman_window(_window, fft_size * 2, 0);
Float32 normFactor = 2.0f / (fft_size * 2);
vDSP_vsmul(_window, 1, &normFactor, _window, 1, fft_size * 2);
_rawSpectrum = (struct SpectrumData *) _memalign_calloc(sizeof(struct SpectrumData) + sizeof(Float32) * fft_size, 1, 16);
if(!_rawSpectrum) return;
_rawSpectrum->length = fft_size;
_fft_size = fft_size;
}
}
void fft_calculate(const float *data, float *freq, int fft_size) {
if(!freq || !fft_size) return;
_init_buffers(fft_size);
if(!_fft_size || !data) {
// Decibels
float kZeroLevel = -128.0;
vDSP_vfill(&kZeroLevel, freq, 1, fft_size);
return;
}
// Split the waveform
DSPSplitComplex dest = { _dftBuffer.realp, _dftBuffer.imagp };
vDSP_ctoz((const DSPComplex*)data, 2, &dest, 1, fft_size);
// Apply the window function
vDSP_vmul(_dftBuffer.realp, 1, _window, 2, _dftBuffer.realp, 1, fft_size);
vDSP_vmul(_dftBuffer.imagp, 1, _window + 1, 2, _dftBuffer.imagp, 1, fft_size);
// DFT
vDSP_DFT_Execute(_dftSetup, _dftBuffer.realp, _dftBuffer.imagp, _dftBuffer.realp, _dftBuffer.imagp);
// Zero out the Nyquist value
_dftBuffer.imagp[0] = 0.0;
// Calculate power spectrum
Float32 *rawSpectrum = _rawSpectrum->data;
vDSP_zvmags(&_dftBuffer, 1, rawSpectrum, 1, fft_size);
// Add -128dB offset to avoid log(0)
float kZeroOffset = 1.5849e-13;
vDSP_vsadd(rawSpectrum, 1, &kZeroOffset, rawSpectrum, 1, fft_size);
// Convert power to decibel
float kZeroDB = 0.70710678118f; // 1/sqrt(2)
vDSP_vdbcon(rawSpectrum, 1, &kZeroDB, rawSpectrum, 1, fft_size, 0);
cblas_scopy(fft_size, rawSpectrum, 1, freq, 1);
}
void __attribute__((destructor)) fft_free(void) {
free(_dftBuffer.realp);
free(_dftBuffer.imagp);
free(_window);
free(_rawSpectrum);
if(_dftSetup != NULL) {
vDSP_DFT_DestroySetup(_dftSetup);
}
_dftBuffer.realp = NULL;
_dftBuffer.imagp = NULL;
_window = NULL;
_rawSpectrum = NULL;
_dftSetup = NULL;
_fft_size = 0;
}

View file

@ -1,101 +0,0 @@
/*
DeaDBeeF -- the music player
Copyright (C) 2009-2021 Alexey Yakovenko and other contributors
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
*/
#include "fft.h"
#include "pffft.h"
#include <Accelerate/Accelerate.h>
static int _fft_size;
static float *_input;
static float *_output;
static float *_output_real;
static float *_output_imaginary;
static float *_work;
static float *_hamming;
static float *_sq_mags;
static PFFFT_Setup *_fft_setup;
static void
_init_buffers(int fft_size) {
if(fft_size != _fft_size) {
fft_free();
_input = pffft_aligned_malloc(fft_size * 4 * sizeof(float));
_hamming = pffft_aligned_malloc(fft_size * 2 * sizeof(float));
_sq_mags = pffft_aligned_malloc(fft_size * sizeof(float));
_output = pffft_aligned_malloc(fft_size * 4 * sizeof(float));
_output_real = pffft_aligned_malloc(fft_size * 2 * sizeof(float));
_output_imaginary = pffft_aligned_malloc(fft_size * 2 * sizeof(float));
_work = pffft_aligned_malloc(fft_size * 4 * sizeof(float));
bzero(_input, fft_size * 4 * sizeof(float));
_fft_setup = pffft_new_setup(fft_size * 2, PFFFT_COMPLEX);
vDSP_hamm_window(_hamming, fft_size * 2, 0);
_fft_size = fft_size;
}
}
void fft_calculate(const float *data, float *freq, int fft_size) {
int dft_size = fft_size * 2;
_init_buffers(fft_size);
vDSP_vmul(data, 1, _hamming, 1, _input, 2, dft_size);
pffft_transform_ordered(_fft_setup, _input, _output, _work, PFFFT_FORWARD);
DSPSplitComplex split_complex = {
.realp = _output_real,
.imagp = _output_imaginary
};
vDSP_ctoz((const DSPComplex *)_output, 2, &split_complex, 1, dft_size);
vDSP_zvmags(&split_complex, 1, _sq_mags, 1, fft_size);
int sq_count = fft_size;
vvsqrtf(_sq_mags, _sq_mags, &sq_count);
float mult = 2.f / fft_size;
vDSP_vsmul(_sq_mags, 1, &mult, freq, 1, fft_size);
}
void fft_free(void) {
pffft_aligned_free(_input);
pffft_aligned_free(_hamming);
pffft_aligned_free(_sq_mags);
pffft_aligned_free(_output);
pffft_aligned_free(_output_real);
pffft_aligned_free(_output_imaginary);
if(_fft_setup != NULL) {
pffft_destroy_setup(_fft_setup);
}
_input = NULL;
_hamming = NULL;
_sq_mags = NULL;
_fft_setup = NULL;
_output = NULL;
_output_real = NULL;
_output_imaginary = NULL;
}

File diff suppressed because it is too large Load diff

View file

@ -1,36 +0,0 @@
/*
Copyright (C) 2010 Christian Kothe
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
#ifndef CHANNELMAPS_H
#define CHANNELMAPS_H
#include "freesurround_decoder.h"
#include <map>
#include <vector>
const int grid_res = 21; // resolution of the lookup grid
// channel allocation maps (per setup)
typedef std::vector<std::vector<float*> > alloc_lut;
extern std::map<unsigned, alloc_lut> chn_alloc;
// channel metadata maps (per setup)
extern std::map<unsigned, std::vector<float> > chn_angle;
extern std::map<unsigned, std::vector<float> > chn_xsf;
extern std::map<unsigned, std::vector<float> > chn_ysf;
extern std::map<unsigned, std::vector<channel_id> > chn_id;
#endif

View file

@ -1,413 +0,0 @@
/*
Copyright (C) 2007-2010 Christian Kothe
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
#include "freesurround_decoder.h"
#include "channelmaps.h"
#include <Accelerate/Accelerate.h>
#include <cmath>
#include <vector>
#pragma warning(disable : 4244)
#define pi _pi
const float _pi = 3.141592654f;
const float epsilon = 0.000001f;
using namespace std;
#undef min
#undef max
static void *_memalign_malloc(size_t size, size_t align) {
void *ret = NULL;
if(posix_memalign(&ret, align, size) != 0) {
return NULL;
}
return ret;
}
static void _dsp_complexalloc(DSPDoubleSplitComplex *cpx, int count) {
cpx->realp = (double *)_memalign_malloc(count * sizeof(double), 16);
cpx->imagp = (double *)_memalign_malloc(count * sizeof(double), 16);
}
static void _dsp_complexfree(DSPDoubleSplitComplex *cpx) {
free(cpx->realp);
free(cpx->imagp);
}
// FreeSurround implementation
class decoder_impl {
public:
// instantiate the decoder with a given channel setup and processing block size (in samples)
decoder_impl(channel_setup setup, unsigned N)
: N(N),
wnd(N), inbuf(3 * N), setup(setup), C((unsigned)chn_alloc[setup].size()),
buffer_empty(true), lt(N), rt(N), dst(N), dstf(N),
dftsetupF(vDSP_DFT_zrop_CreateSetupD(0, N, vDSP_DFT_FORWARD)),
dftsetupB(vDSP_DFT_zrop_CreateSetupD(0, N, vDSP_DFT_INVERSE)) {
_dsp_complexalloc(&lf, N/2 + 1);
_dsp_complexalloc(&rf, N/2 + 1);
// allocate per-channel buffers
outbuf.resize((N + N / 2) * C);
signal.resize(C);
for(unsigned k = 0; k < C; k++)
_dsp_complexalloc(&signal[k], N/2 + 1);
// init the window function
for(unsigned k = 0; k < N; k++)
wnd[k] = sqrt(0.5 * (1 - cos(2 * pi * k / N)) / N);
// set default parameters
set_circular_wrap(90);
set_shift(0);
set_depth(1);
set_focus(0);
set_center_image(1);
set_front_separation(1);
set_rear_separation(1);
set_low_cutoff(40.0 / 22050);
set_high_cutoff(90.0 / 22050);
set_bass_redirection(false);
flush();
}
~decoder_impl() {
_dsp_complexfree(&lf);
_dsp_complexfree(&rf);
for(unsigned k = 0; k < C; k++)
_dsp_complexfree(&signal[k]);
vDSP_DFT_DestroySetupD(dftsetupF);
vDSP_DFT_DestroySetupD(dftsetupB);
}
// decode a stereo chunk, produces a multichannel chunk of the same size (lagged)
float *decode(const float *input) {
// append incoming data to the end of the input buffer
memcpy(&inbuf[N], &input[0], 8 * N);
// process first and second half, overlapped
buffered_decode(&inbuf[0]);
buffered_decode(&inbuf[N]);
// shift last half of the input to the beginning (for overlapping with a future block)
memcpy(&inbuf[0], &inbuf[2 * N], 4 * N);
buffer_empty = false;
return &outbuf[0];
}
// flush the internal buffers
void flush() {
memset(&outbuf[0], 0, outbuf.size() * 4);
memset(&inbuf[0], 0, inbuf.size() * 4);
buffer_empty = true;
}
// number of samples currently held in the buffer
unsigned buffered() {
return buffer_empty ? 0 : N / 2;
}
// set soundfield & rendering parameters
void set_circular_wrap(float v) {
circular_wrap = v;
}
void set_shift(float v) {
shift = v;
}
void set_depth(float v) {
depth = v;
}
void set_focus(float v) {
focus = v;
}
void set_center_image(float v) {
center_image = v;
}
void set_front_separation(float v) {
front_separation = v;
}
void set_rear_separation(float v) {
rear_separation = v;
}
void set_low_cutoff(float v) {
lo_cut = v * (N / 2);
}
void set_high_cutoff(float v) {
hi_cut = v * (N / 2);
}
void set_bass_redirection(bool v) {
use_lfe = v;
}
private:
// helper functions
static inline float sqr(double x) {
return x * x;
}
static inline double amplitude(const DSPDoubleSplitComplex &cpx, size_t index) {
return sqrt(sqr(cpx.realp[index]) + sqr(cpx.imagp[index]));
}
static inline double phase(const DSPDoubleSplitComplex &cpx, size_t index) {
return atan2(cpx.imagp[index], cpx.realp[index]);
}
static inline void polar(double a, double p, DSPDoubleSplitComplex &cpx, size_t index) {
cpx.realp[index] = a * cos(p);
cpx.imagp[index] = a * sin(p);
}
static inline float min(double a, double b) {
return a < b ? a : b;
}
static inline float max(double a, double b) {
return a > b ? a : b;
}
static inline float clamp(double x) {
return max(-1, min(1, x));
}
static inline float sign(double x) {
return x < 0 ? -1 : (x > 0 ? 1 : 0);
}
// get the distance of the soundfield edge, along a given angle
static inline double edgedistance(double a) {
return min(sqrt(1 + sqr(tan(a))), sqrt(1 + sqr(1 / tan(a))));
}
// get the index (and fractional offset!) in a piecewise-linear channel allocation grid
int map_to_grid(double &x) {
double gp = ((x + 1) * 0.5) * (grid_res - 1), i = min(grid_res - 2, floor(gp));
x = gp - i;
return i;
}
// decode a block of data and overlap-add it into outbuf
void buffered_decode(const float *input) {
// demultiplex and apply window function
vDSP_vspdp(input, 2, &lt[0], 1, N);
vDSP_vspdp(input + 1, 2, &rt[0], 1, N);
vDSP_vmulD(&lt[0], 1, &wnd[0], 1, &lt[0], 1, N);
vDSP_vmulD(&rt[0], 1, &wnd[0], 1, &rt[0], 1, N);
// map into spectral domain
vDSP_ctozD((DSPDoubleComplex *)(&lt[0]), 2, &lf, 1, N / 2);
vDSP_ctozD((DSPDoubleComplex *)(&rt[0]), 2, &rf, 1, N / 2);
vDSP_DFT_ExecuteD(dftsetupF, lf.realp, lf.imagp, lf.realp, lf.imagp);
vDSP_DFT_ExecuteD(dftsetupF, rf.realp, rf.imagp, rf.realp, rf.imagp);
for(unsigned c = 0; c < C; c++) {
signal[c].realp[0] = 0;
signal[c].imagp[0] = 0;
signal[c].realp[N/2] = 0;
signal[c].imagp[N/2] = 0;
}
bzero(signal[C - 1].realp, sizeof(double) * (N / 2 + 1));
bzero(signal[C - 1].imagp, sizeof(double) * (N / 2 + 1));
// compute multichannel output signal in the spectral domain
for(unsigned f = 1; f < N / 2; f++) {
// get Lt/Rt amplitudes & phases
double ampL = amplitude(lf, f), ampR = amplitude(rf, f);
double phaseL = phase(lf, f), phaseR = phase(rf, f);
// calculate the amplitude & phase differences
double ampDiff = clamp((ampL + ampR < epsilon) ? 0 : (ampR - ampL) / (ampR + ampL));
double phaseDiff = abs(phaseL - phaseR);
if(phaseDiff > pi) phaseDiff = 2 * pi - phaseDiff;
// decode into x/y soundfield position
double x, y;
transform_decode(ampDiff, phaseDiff, x, y);
// add wrap control
transform_circular_wrap(x, y, circular_wrap);
// add shift control
y = clamp(y - shift);
// add depth control
y = clamp(1 - (1 - y) * depth);
// add focus control
transform_focus(x, y, focus);
// add crossfeed control
x = clamp(x * (front_separation * (1 + y) / 2 + rear_separation * (1 - y) / 2));
// get total signal amplitude
double amp_total = sqrt(ampL * ampL + ampR * ampR);
// and total L/C/R signal phases
double phase_of[] = { phaseL, atan2(lf.imagp[f] + rf.imagp[f], lf.realp[f] + rf.realp[f]), phaseR };
// compute 2d channel map indexes p/q and update x/y to fractional offsets in the map grid
int p = map_to_grid(x), q = map_to_grid(y);
// map position to channel volumes
for(unsigned c = 0; c < C - 1; c++) {
// look up channel map at respective position (with bilinear interpolation) and build the signal
const vector<float *> &a = chn_alloc[setup][c];
polar(amp_total * ((1 - x) * (1 - y) * a[q][p] + x * (1 - y) * a[q][p + 1] + (1 - x) * y * a[q + 1][p] + x * y * a[q + 1][p + 1]),
phase_of[1 + (int)sign(chn_xsf[setup][c])], signal[c], f);
}
// optionally redirect bass
if(use_lfe && f < hi_cut) {
// level of LFE channel according to normalized frequency
double lfe_level = f < lo_cut ? 1 : 0.5 * (1 + cos(pi * (f - lo_cut) / (hi_cut - lo_cut)));
// assign LFE channel
polar(amp_total, phase_of[1], signal[C - 1], f);
signal[C - 1].realp[f] *= lfe_level;
signal[C - 1].imagp[f] *= lfe_level;
// subtract the signal from the other channels
for(unsigned c = 0; c < C - 1; c++) {
signal[c].realp[f] *= (1 - lfe_level);
signal[c].imagp[f] *= (1 - lfe_level);
}
}
}
// shift the last 2/3 to the first 2/3 of the output buffer
memmove(&outbuf[0], &outbuf[C * N / 2], N * C * 4);
// and clear the rest
memset(&outbuf[C * N], 0, C * 4 * N / 2);
// backtransform each channel and overlap-add
for(unsigned c = 0; c < C; c++) {
// back-transform into time domain
vDSP_DFT_ExecuteD(dftsetupB, signal[c].realp, signal[c].imagp, signal[c].realp, signal[c].imagp);
vDSP_ztocD(&signal[c], 1, (DSPDoubleComplex *)(&dst[0]), 2, N / 2);
// add the result to the last 2/3 of the output buffer, windowed (and remultiplex)
vDSP_vmulD(&dst[0], 1, &wnd[0], 1, &dst[0], 1, N);
vDSP_vdpsp(&dst[0], 1, &dstf[0], 1, N);
vDSP_vadd(&outbuf[C * N / 2 + c], C, &dstf[0], 1, &outbuf[C * N / 2 + c], C, N);
}
}
// transform amp/phase difference space into x/y soundfield space
void transform_decode(double a, double p, double &x, double &y) {
x = clamp(1.0047 * a + 0.46804 * a * p * p * p - 0.2042 * a * p * p * p * p + 0.0080586 * a * p * p * p * p * p * p * p - 0.0001526 * a * p * p * p * p * p * p * p * p * p * p - 0.073512 * a * a * a * p - 0.2499 * a * a * a * p * p * p * p + 0.016932 * a * a * a * p * p * p * p * p * p * p - 0.00027707 * a * a * a * p * p * p * p * p * p * p * p * p * p + 0.048105 * a * a * a * a * a * p * p * p * p * p * p * p - 0.0065947 * a * a * a * a * a * p * p * p * p * p * p * p * p * p * p + 0.0016006 * a * a * a * a * a * p * p * p * p * p * p * p * p * p * p * p - 0.0071132 * a * a * a * a * a * a * a * p * p * p * p * p * p * p * p * p + 0.0022336 * a * a * a * a * a * a * a * p * p * p * p * p * p * p * p * p * p * p - 0.0004804 * a * a * a * a * a * a * a * p * p * p * p * p * p * p * p * p * p * p * p);
y = clamp(0.98592 - 0.62237 * p + 0.077875 * p * p - 0.0026929 * p * p * p * p * p + 0.4971 * a * a * p - 0.00032124 * a * a * p * p * p * p * p * p + 9.2491e-006 * a * a * a * a * p * p * p * p * p * p * p * p * p * p + 0.051549 * a * a * a * a * a * a * a * a + 1.0727e-014 * a * a * a * a * a * a * a * a * a * a);
}
// apply a circular_wrap transformation to some position
void transform_circular_wrap(double &x, double &y, double refangle) {
if(refangle == 90)
return;
refangle = refangle * pi / 180;
double baseangle = 90 * pi / 180;
// translate into edge-normalized polar coordinates
double ang = atan2(x, y), len = sqrt(x * x + y * y);
len = len / edgedistance(ang);
// apply circular_wrap transform
if(abs(ang) < baseangle / 2)
// angle falls within the front region (to be enlarged)
ang *= refangle / baseangle;
else
// angle falls within the rear region (to be shrunken)
ang = pi - (-(((refangle - 2 * pi) * (pi - abs(ang)) * sign(ang)) / (2 * pi - baseangle)));
// translate back into soundfield position
len = len * edgedistance(ang);
x = clamp(sin(ang) * len);
y = clamp(cos(ang) * len);
}
// apply a focus transformation to some position
void transform_focus(double &x, double &y, double focus) {
if(focus == 0)
return;
// translate into edge-normalized polar coordinates
double ang = atan2(x, y), len = clamp(sqrt(x * x + y * y) / edgedistance(ang));
// apply focus
len = focus > 0 ? 1 - pow(1 - len, 1 + focus * 20) : pow(len, 1 - focus * 20);
// back-transform into euclidian soundfield position
len = len * edgedistance(ang);
x = clamp(sin(ang) * len);
y = clamp(cos(ang) * len);
}
// constants
unsigned N, C; // number of samples per input/output block, number of output channels
channel_setup setup; // the channel setup
// parameters
float circular_wrap; // angle of the front soundstage around the listener (90<39>=default)
float shift; // forward/backward offset of the soundstage
float depth; // backward extension of the soundstage
float focus; // localization of the sound events
float center_image; // presence of the center speaker
float front_separation; // front stereo separation
float rear_separation; // rear stereo separation
float lo_cut, hi_cut; // LFE cutoff frequencies
bool use_lfe; // whether to use the LFE channel
// FFT data structures
vector<double> lt, rt, dst; // left total, right total (source arrays), time-domain destination buffer array
vector<float> dstf; // float conversion destination array
DSPDoubleSplitComplex lf, rf; // left total / right total in frequency domain
vDSP_DFT_SetupD dftsetupF, dftsetupB; // FFT objects
// buffers
bool buffer_empty; // whether the buffer is currently empty or dirty
vector<float> inbuf; // stereo input buffer (multiplexed)
vector<float> outbuf; // multichannel output buffer (multiplexed)
vector<double> wnd; // the window function, precomputed
vector<DSPDoubleSplitComplex> signal; // the signal to be constructed in every channel, in the frequency domain
};
// implementation of the shell class
freesurround_decoder::freesurround_decoder(channel_setup setup, unsigned blocksize)
: impl(new decoder_impl(setup, blocksize)) {
}
freesurround_decoder::~freesurround_decoder() {
delete impl;
}
float *freesurround_decoder::decode(const float *input) {
return impl->decode(input);
}
void freesurround_decoder::flush() {
impl->flush();
}
void freesurround_decoder::circular_wrap(float v) {
impl->set_circular_wrap(v);
}
void freesurround_decoder::shift(float v) {
impl->set_shift(v);
}
void freesurround_decoder::depth(float v) {
impl->set_depth(v);
}
void freesurround_decoder::focus(float v) {
impl->set_focus(v);
}
void freesurround_decoder::center_image(float v) {
impl->set_center_image(v);
}
void freesurround_decoder::front_separation(float v) {
impl->set_front_separation(v);
}
void freesurround_decoder::rear_separation(float v) {
impl->set_rear_separation(v);
}
void freesurround_decoder::low_cutoff(float v) {
impl->set_low_cutoff(v);
}
void freesurround_decoder::high_cutoff(float v) {
impl->set_high_cutoff(v);
}
void freesurround_decoder::bass_redirection(bool v) {
impl->set_bass_redirection(v);
}
unsigned freesurround_decoder::buffered() {
return impl->buffered();
}
unsigned freesurround_decoder::num_channels(channel_setup s) {
return (unsigned)chn_id[s].size();
}
channel_id freesurround_decoder::channel_at(channel_setup s, unsigned i) {
return i < chn_id[s].size() ? chn_id[s][i] : ci_none;
}

View file

@ -1,210 +0,0 @@
/*
Copyright (C) 2007-2010 Christian Kothe
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
#ifndef FREESURROUND_DECODER_H
#define FREESURROUND_DECODER_H
/**
* Identifiers for the supported output channels (from front to back, left to right).
* The ordering here also determines the ordering of interleaved samples in the output signal.
*/
typedef enum channel_id {
ci_none = 0,
ci_front_left = 1 << 1,
ci_front_center_left = 1 << 2,
ci_front_center = 1 << 3,
ci_front_center_right = 1 << 4,
ci_front_right = 1 << 5,
ci_side_front_left = 1 << 6,
ci_side_front_right = 1 << 7,
ci_side_center_left = 1 << 8,
ci_side_center_right = 1 << 9,
ci_side_back_left = 1 << 10,
ci_side_back_right = 1 << 11,
ci_back_left = 1 << 12,
ci_back_center_left = 1 << 13,
ci_back_center = 1 << 14,
ci_back_center_right = 1 << 15,
ci_back_right = 1 << 16,
ci_lfe = 1 << 31
} channel_id;
/**
* The supported output channel setups.
* A channel setup is defined by the set of channels that are present. Here is a graphic
* of the cs_5point1 setup: http://en.wikipedia.org/wiki/File:5_1_channels_(surround_sound)_label.svg
*/
typedef enum channel_setup {
cs_stereo = ci_front_left | ci_front_right | ci_lfe,
cs_3stereo = ci_front_left | ci_front_center | ci_front_right | ci_lfe,
cs_5stereo = ci_front_left | ci_front_center_left | ci_front_center | ci_front_center_right | ci_front_right | ci_lfe,
cs_4point1 = ci_front_left | ci_front_right | ci_back_left | ci_back_right | ci_lfe,
cs_5point1 = ci_front_left | ci_front_center | ci_front_right | ci_back_left | ci_back_right | ci_lfe,
cs_6point1 = ci_front_left | ci_front_center | ci_front_right | ci_side_center_left | ci_side_center_right | ci_back_center | ci_lfe,
cs_7point1 = ci_front_left | ci_front_center | ci_front_right | ci_side_center_left | ci_side_center_right | ci_back_left | ci_back_right | ci_lfe,
cs_7point1_panorama = ci_front_left | ci_front_center_left | ci_front_center | ci_front_center_right | ci_front_right |
ci_side_center_left | ci_side_center_right | ci_lfe,
cs_7point1_tricenter = ci_front_left | ci_front_center_left | ci_front_center | ci_front_center_right | ci_front_right |
ci_back_left | ci_back_right | ci_lfe,
cs_8point1 = ci_front_left | ci_front_center | ci_front_right | ci_side_center_left | ci_side_center_right |
ci_back_left | ci_back_center | ci_back_right | ci_lfe,
cs_9point1_densepanorama = ci_front_left | ci_front_center_left | ci_front_center | ci_front_center_right | ci_front_right |
ci_side_front_left | ci_side_front_right | ci_side_center_left | ci_side_center_right | ci_lfe,
cs_9point1_wrap = ci_front_left | ci_front_center_left | ci_front_center | ci_front_center_right | ci_front_right |
ci_side_center_left | ci_side_center_right | ci_back_left | ci_back_right | ci_lfe,
cs_11point1_densewrap = ci_front_left | ci_front_center_left | ci_front_center | ci_front_center_right | ci_front_right |
ci_side_front_left | ci_side_front_right | ci_side_center_left | ci_side_center_right |
ci_side_back_left | ci_side_back_right | ci_lfe,
cs_13point1_totalwrap = ci_front_left | ci_front_center_left | ci_front_center | ci_front_center_right | ci_front_right |
ci_side_front_left | ci_side_front_right | ci_side_center_left | ci_side_center_right |
ci_side_back_left | ci_side_back_right | ci_back_left | ci_back_right | ci_lfe,
cs_16point1 = ci_front_left | ci_front_center_left | ci_front_center | ci_front_center_right | ci_front_right |
ci_side_front_left | ci_side_front_right | ci_side_center_left | ci_side_center_right | ci_side_back_left |
ci_side_back_right | ci_back_left | ci_back_center_left | ci_back_center | ci_back_center_right | ci_back_right | ci_lfe,
cs_legacy = 0 // same channels as cs_5point1 but different upmixing transform; does not support the focus control
} channel_setup;
/**
* The FreeSurround decoder.
*/
class freesurround_decoder {
public:
/**
* Create an instance of the decoder.
* @param setup The output channel setup -- determines the number of output channels
* and their place in the sound field.
* @param blocksize Granularity at which data is processed by the decode() function.
* Must be a power of two and should correspond to ca. 10ms worth of single-channel
* samples (default is 4096 for 44.1Khz data). Do not make it shorter or longer
* than 5ms to 20ms since the granularity at which locations are decoded
* changes with this.
*/
freesurround_decoder(channel_setup setup = cs_5point1, unsigned blocksize = 4096);
~freesurround_decoder();
/**
* Decode a chunk of stereo sound. The output is delayed by half of the blocksize.
* This function is the only one needed for straightforward decoding.
* @param input Contains exactly blocksize (multiplexed) stereo samples, i.e. 2*blocksize numbers.
* @return A pointer to an internal buffer of exactly blocksize (multiplexed) multichannel samples.
* The actual number of values depends on the number of output channels in the chosen
* channel setup.
*/
float *decode(const float *input);
/**
* Flush the internal buffer.
*/
void flush();
// --- soundfield transformations
// These functions allow to set up geometric transformations of the sound field after it has been decoded.
// The sound field is best pictured as a 2-dimensional square with the listener in its
// center which can be shifted or stretched in various ways before it is sent to the
// speakers. The order in which these transformations are applied is as listed below.
/**
* Allows to wrap the soundfield around the listener in a circular manner.
* Determines the angle of the frontal sound stage relative to the listener, in degrees.
* A setting of 90<EFBFBD> corresponds to standard surround decoding, 180<EFBFBD> stretches the front stage from
* ear to ear, 270<EFBFBD> wraps it around most of the head. The side and rear content of the sound
* field is compressed accordingly behind the listerer. (default: 90, range: [0<EFBFBD>..360<EFBFBD>])
*/
void circular_wrap(float v);
/**
* Allows to shift the soundfield forward or backward.
* Value range: [-1.0..+1.0]. 0 is no offset, positive values move the sound
* forward, negative values move it backwards. (default: 0)
*/
void shift(float v);
/**
* Allows to scale the soundfield backwards.
* Value range: [0.0..+5.0] -- 0 is all compressed to the front, 1 is no change, 5 is scaled 5x backwards (default: 1)
*/
void depth(float v);
/**
* Allows to control the localization (i.e., focality) of sources.
* Value range: [-1.0..+1.0] -- 0 means unchanged, positive means more localized, negative means more ambient (default: 0)
*/
void focus(float v);
// --- rendering parameters
// These parameters control how the sound field is mapped onto speakers.
/**
* Set the presence of the front center channel(s).
* Value range: [0.0..1.0] -- fully present at 1.0, fully replaced by left/right at 0.0 (default: 1).
* The default of 1.0 results in spec-conformant decoding ("movie mode") while a value of 0.7 is
* better suited for music reproduction (which is usually mixed without a center channel).
*/
void center_image(float v);
/**
* Set the front stereo separation.
* Value range: [0.0..inf] -- 1.0 is default, 0.0 is mono.
*/
void front_separation(float v);
/**
* Set the rear stereo separation.
* Value range: [0.0..inf] -- 1.0 is default, 0.0 is mono.
*/
void rear_separation(float v);
// --- bass redirection (to LFE)
/**
* Enable/disable LFE channel (default: false = disabled)
*/
void bass_redirection(bool v);
/**
* Set the lower end of the transition band, in Hz/Nyquist (default: 40/22050).
*/
void low_cutoff(float v);
/**
* Set the upper end of the transition band, in Hz/Nyquist (default: 90/22050).
*/
void high_cutoff(float v);
// --- info
/**
* Number of samples currently held in the buffer.
*/
unsigned buffered();
/**
* Number of channels in the given setup.
*/
static unsigned num_channels(channel_setup s);
/**
* Channel id of the i'th channel in the given setup.
*/
static channel_id channel_at(channel_setup s, unsigned i);
private:
class decoder_impl *impl; // private implementation
};
#endif

View file

@ -1,5 +1,5 @@
/* /*
Copyright (C) 2010-2023, Christopher Snowhill, Copyright (C) 2010-2022, Christopher Snowhill,
All rights reserved. All rights reserved.
Optimizations by Gumboot Optimizations by Gumboot
Additional work by Burt P. Additional work by Burt P.
@ -1044,8 +1044,8 @@ static int hdcd_envelope(int32_t *samples, int count, int stride, int gain, int
/* hold a steady level */ /* hold a steady level */
if (gain == 0x800000) { if (gain == 0x800000) {
/*if (count > 0) if (count > 0)
samples += count * stride;*/ samples += count * stride;
} else { } else {
while (--count >= 0) { while (--count >= 0) {
APPLY_GAIN(*samples, gain); APPLY_GAIN(*samples, gain);

View file

@ -1,25 +0,0 @@
#pragma once
// The functions provide little endianness to native endianness conversion and back again
#if(defined(_MSC_VER) && defined(_WIN32)) || defined(__APPLE__)
template <typename T>
inline void from_little_endian_inplace(T& x) {
}
template <typename T>
inline T from_little_endian(T x) {
return x;
}
template <typename T>
inline void to_little_endian_inplace(T& x) {
}
template <typename T>
inline T to_little_endian(T x) {
return x;
}
#else
#error "Specify endianness conversion for your platform"
#endif

View file

@ -1,641 +0,0 @@
#include "HrtfData.h"
#include "Endianness.h"
#include <algorithm>
#include <cassert>
#include <cmath>
typedef struct {
uint8_t bytes[3];
} sample_int24_t;
const double pi = M_PI;
template <typename T>
void read_stream(std::istream& stream, T& value) {
stream.read(reinterpret_cast<std::istream::char_type*>(&value), sizeof(value));
from_little_endian_inplace(value);
}
HrtfData::HrtfData(std::istream& stream) {
const char required_magic00[] = { 'M', 'i', 'n', 'P', 'H', 'R', '0', '0' };
const char required_magic01[] = { 'M', 'i', 'n', 'P', 'H', 'R', '0', '1' };
const char required_magic02[] = { 'M', 'i', 'n', 'P', 'H', 'R', '0', '2' };
const char required_magic03[] = { 'M', 'i', 'n', 'P', 'H', 'R', '0', '3' };
char actual_magic[sizeof(required_magic03) / sizeof(required_magic03[0])];
stream.read(actual_magic, sizeof(actual_magic));
if(std::equal(std::begin(required_magic03), std::end(required_magic03), std::begin(actual_magic), std::end(actual_magic))) {
LoadHrtf03(stream);
} else if(std::equal(std::begin(required_magic02), std::end(required_magic02), std::begin(actual_magic), std::end(actual_magic))) {
LoadHrtf02(stream);
} else if(std::equal(std::begin(required_magic01), std::end(required_magic01), std::begin(actual_magic), std::end(actual_magic))) {
LoadHrtf01(stream);
} else if(std::equal(std::begin(required_magic00), std::end(required_magic00), std::begin(actual_magic), std::end(actual_magic))) {
LoadHrtf00(stream);
} else {
throw std::logic_error("Bad file format.");
}
}
void HrtfData::LoadHrtf03(std::istream& stream) {
// const uint8_t ChanType_LeftOnly{0};
const uint8_t ChanType_LeftRight{ 1 };
uint32_t sample_rate;
uint8_t channel_type;
uint8_t impulse_response_length;
uint8_t distances_count;
read_stream(stream, sample_rate);
read_stream(stream, channel_type);
read_stream(stream, impulse_response_length);
read_stream(stream, distances_count);
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
if(channel_type > ChanType_LeftRight) {
throw std::logic_error("Invalid channel format.");
}
int channel_count = channel_type == ChanType_LeftRight ? 2 : 1;
std::vector<DistanceData> distances(distances_count);
for(uint8_t i = 0; i < distances_count; i++) {
uint16_t distance;
read_stream(stream, distance);
distances[i].distance = float(distance) / 1000.0f;
uint8_t elevations_count;
read_stream(stream, elevations_count);
distances[i].elevations.resize(elevations_count);
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
for(uint8_t j = 0; j < elevations_count; j++) {
uint8_t azimuth_count;
read_stream(stream, azimuth_count);
distances[i].elevations[j].azimuths.resize(azimuth_count);
}
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
}
const float normalization_factor = 1.0f / 8388608.0f;
for(auto& distance : distances) {
for(auto& elevation : distance.elevations) {
for(auto& azimuth : elevation.azimuths) {
azimuth.impulse_response.resize(impulse_response_length * channel_count);
for(auto& sample : azimuth.impulse_response) {
union {
sample_int24_t sample;
int32_t sample_int;
} sample_union;
sample_union.sample_int = 0;
read_stream(stream, sample_union.sample);
sample_union.sample_int <<= 8;
sample_union.sample_int >>= 8;
sample = sample_union.sample_int * normalization_factor;
}
}
}
}
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
uint8_t longest_delay = 0;
for(auto& distance : distances) {
for(auto& elevation : distance.elevations) {
for(auto& azimuth : elevation.azimuths) {
uint8_t delay;
read_stream(stream, delay);
azimuth.delay = delay;
longest_delay = std::max(longest_delay, delay);
if(channel_type == ChanType_LeftRight) {
read_stream(stream, delay);
azimuth.delay_right = delay;
longest_delay = std::max(longest_delay, delay);
}
}
}
}
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
std::sort(distances.begin(), distances.end(),
[](const DistanceData& lhs, const DistanceData& rhs) noexcept { return lhs.distance > rhs.distance; });
m_distances = std::move(distances);
m_channel_count = channel_count;
m_response_length = impulse_response_length;
m_sample_rate = sample_rate;
m_longest_delay = longest_delay;
}
void HrtfData::LoadHrtf02(std::istream& stream) {
// const uint8_t SampleType_S16{0};
const uint8_t SampleType_S24{ 1 };
// const uint8_t ChanType_LeftOnly{0};
const uint8_t ChanType_LeftRight{ 1 };
uint32_t sample_rate;
uint8_t sample_type;
uint8_t channel_type;
uint8_t impulse_response_length;
uint8_t distances_count;
read_stream(stream, sample_rate);
read_stream(stream, sample_type);
read_stream(stream, channel_type);
read_stream(stream, impulse_response_length);
read_stream(stream, distances_count);
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
if(sample_type > SampleType_S24) {
throw std::logic_error("Invalid sample type.");
}
if(channel_type > ChanType_LeftRight) {
throw std::logic_error("Invalid channel format.");
}
int channel_count = channel_type == ChanType_LeftRight ? 2 : 1;
std::vector<DistanceData> distances(distances_count);
for(uint8_t i = 0; i < distances_count; i++) {
uint16_t distance;
read_stream(stream, distance);
distances[i].distance = float(distance) / 1000.0f;
uint8_t elevations_count;
read_stream(stream, elevations_count);
distances[i].elevations.resize(elevations_count);
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
for(uint8_t j = 0; j < elevations_count; j++) {
uint8_t azimuth_count;
read_stream(stream, azimuth_count);
distances[i].elevations[j].azimuths.resize(azimuth_count);
}
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
}
const float normalization_factor = (sample_type == SampleType_S24) ? 1.0f / 8388608.0f : 1.0f / 32768.0f;
for(auto& distance : distances) {
for(auto& elevation : distance.elevations) {
for(auto& azimuth : elevation.azimuths) {
azimuth.impulse_response.resize(impulse_response_length * channel_count);
if(sample_type == SampleType_S24) {
for(auto& sample : azimuth.impulse_response) {
union {
sample_int24_t sample;
int32_t sample_int;
} sample_union;
sample_union.sample_int = 0;
read_stream(stream, sample_union.sample);
sample_union.sample_int <<= 8;
sample_union.sample_int >>= 8;
sample = sample_union.sample_int * normalization_factor;
}
} else {
for(auto& sample : azimuth.impulse_response) {
int16_t sample_from_file;
read_stream(stream, sample_from_file);
sample = sample_from_file * normalization_factor;
}
}
}
}
}
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
uint8_t longest_delay = 0;
for(auto& distance : distances) {
for(auto& elevation : distance.elevations) {
for(auto& azimuth : elevation.azimuths) {
uint8_t delay;
read_stream(stream, delay);
azimuth.delay = delay;
longest_delay = std::max(longest_delay, delay);
if(channel_type == ChanType_LeftRight) {
read_stream(stream, delay);
azimuth.delay_right = delay;
longest_delay = std::max(longest_delay, delay);
}
}
}
}
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
std::sort(distances.begin(), distances.end(),
[](const DistanceData& lhs, const DistanceData& rhs) noexcept { return lhs.distance > rhs.distance; });
m_distances = std::move(distances);
m_channel_count = channel_count;
m_response_length = impulse_response_length;
m_sample_rate = sample_rate;
m_longest_delay = longest_delay;
}
void HrtfData::LoadHrtf01(std::istream& stream) {
uint32_t sample_rate;
uint8_t impulse_response_length;
read_stream(stream, sample_rate);
read_stream(stream, impulse_response_length);
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
std::vector<DistanceData> distances(1);
distances[0].distance = 1.0;
uint8_t elevations_count;
read_stream(stream, elevations_count);
distances[0].elevations.resize(elevations_count);
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
for(uint8_t i = 0; i < elevations_count; i++) {
uint8_t azimuth_count;
read_stream(stream, azimuth_count);
distances[0].elevations[i].azimuths.resize(azimuth_count);
}
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
const float normalization_factor = 1.0f / 32768.0f;
for(auto& elevation : distances[0].elevations) {
for(auto& azimuth : elevation.azimuths) {
azimuth.impulse_response.resize(impulse_response_length);
for(auto& sample : azimuth.impulse_response) {
int16_t sample_from_file;
read_stream(stream, sample_from_file);
sample = sample_from_file * normalization_factor;
}
}
}
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
uint8_t longest_delay = 0;
for(auto& elevation : distances[0].elevations) {
for(auto& azimuth : elevation.azimuths) {
uint8_t delay;
read_stream(stream, delay);
delay <<= 2;
azimuth.delay = delay;
longest_delay = std::max(longest_delay, delay);
}
}
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
m_distances = std::move(distances);
m_channel_count = 1;
m_response_length = impulse_response_length;
m_sample_rate = sample_rate;
m_longest_delay = longest_delay;
}
void HrtfData::LoadHrtf00(std::istream& stream) {
uint32_t sample_rate;
uint16_t impulse_response_count;
uint16_t impulse_response_length;
read_stream(stream, sample_rate);
read_stream(stream, impulse_response_count);
read_stream(stream, impulse_response_length);
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
std::vector<DistanceData> distances(1);
distances[0].distance = 1.0;
uint8_t elevations_count;
read_stream(stream, elevations_count);
distances[0].elevations.resize(elevations_count);
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
std::vector<uint16_t> irOffsets(elevations_count);
for(uint8_t i = 0; i < elevations_count; i++) {
read_stream(stream, irOffsets[i]);
}
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
for(size_t i = 1; i < elevations_count; i++) {
if(irOffsets[i] <= irOffsets[i - 1]) {
throw std::logic_error("Invalid elevation offset.");
}
}
if(impulse_response_count <= irOffsets[elevations_count - 1]) {
throw std::logic_error("Invalid elevation offset.");
}
for(size_t i = 1; i < elevations_count; i++) {
distances[0].elevations[i - 1].azimuths.resize(irOffsets[i] - irOffsets[i - 1]);
}
distances[0].elevations[elevations_count - 1].azimuths.resize(impulse_response_count - irOffsets[elevations_count - 1]);
const float normalization_factor = 1.0f / 32768.0f;
for(auto& elevation : distances[0].elevations) {
for(auto& azimuth : elevation.azimuths) {
azimuth.impulse_response.resize(impulse_response_length);
for(auto& sample : azimuth.impulse_response) {
int16_t sample_from_file;
read_stream(stream, sample_from_file);
sample = sample_from_file * normalization_factor;
}
}
}
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
uint8_t longest_delay = 0;
for(auto& elevation : distances[0].elevations) {
for(auto& azimuth : elevation.azimuths) {
uint8_t delay;
read_stream(stream, delay);
delay <<= 2;
azimuth.delay = delay;
longest_delay = std::max(longest_delay, delay);
}
}
if(!stream || stream.eof()) {
throw std::logic_error("Failed reading file.");
}
m_distances = std::move(distances);
m_channel_count = 1;
m_response_length = impulse_response_length;
m_sample_rate = sample_rate;
m_longest_delay = longest_delay;
}
void HrtfData::get_direction_data(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t channel, DirectionData& ref_data) const {
assert(elevation >= -angle_t(pi * 0.5));
assert(elevation <= angle_t(pi * 0.5));
assert(azimuth >= -angle_t(2.0 * pi));
assert(azimuth <= angle_t(2.0 * pi));
const float azimuth_mod = std::fmod(azimuth + angle_t(pi * 2.0), angle_t(pi * 2.0));
size_t distance_index0 = 0;
while(distance_index0 < m_distances.size() - 1 &&
m_distances[distance_index0].distance > distance) {
distance_index0++;
}
const size_t distance_index1 = std::min(distance_index0 + 1, m_distances.size() - 1);
const distance_t distance0 = m_distances[distance_index0].distance;
const distance_t distance1 = m_distances[distance_index1].distance;
const distance_t distance_delta = distance0 - distance1;
const float distance_fractional_part = distance_delta ? (distance - distance1) / distance_delta : 0;
const auto& elevations0 = m_distances[distance_index0].elevations;
const auto& elevations1 = m_distances[distance_index1].elevations;
const angle_t elevation_scaled0 = (elevation + angle_t(pi * 0.5)) * (elevations0.size() - 1) / angle_t(pi);
const angle_t elevation_scaled1 = (elevation + angle_t(pi * 0.5)) * (elevations1.size() - 1) / angle_t(pi);
const size_t elevation_index00 = static_cast<size_t>(elevation_scaled0);
const size_t elevation_index10 = static_cast<size_t>(elevation_scaled1);
const size_t elevation_index01 = std::min(elevation_index00 + 1, elevations0.size() - 1);
const size_t elevation_index11 = std::min(elevation_index10 + 1, elevations1.size() - 1);
const float elevation_fractional_part0 = std::fmod(elevation_scaled0, 1.0);
const float elevation_fractional_part1 = std::fmod(elevation_scaled1, 1.0);
const angle_t azimuth_scaled00 = azimuth_mod * elevations0[elevation_index00].azimuths.size() / angle_t(2 * pi);
const size_t azimuth_index000 = static_cast<size_t>(azimuth_scaled00) % elevations0[elevation_index00].azimuths.size();
const size_t azimuth_index001 = static_cast<size_t>(azimuth_scaled00 + 1) % elevations0[elevation_index00].azimuths.size();
const float azimuth_fractional_part00 = std::fmod(azimuth_scaled00, 1.0);
const angle_t azimuth_scaled10 = azimuth_mod * elevations1[elevation_index10].azimuths.size() / angle_t(2 * pi);
const size_t azimuth_index100 = static_cast<size_t>(azimuth_scaled10) % elevations1[elevation_index10].azimuths.size();
const size_t azimuth_index101 = static_cast<size_t>(azimuth_scaled10 + 1) % elevations1[elevation_index10].azimuths.size();
const float azimuth_fractional_part10 = std::fmod(azimuth_scaled10, 1.0);
const angle_t azimuth_scaled01 = azimuth_mod * elevations0[elevation_index01].azimuths.size() / angle_t(2 * pi);
const size_t azimuth_index010 = static_cast<size_t>(azimuth_scaled01) % elevations0[elevation_index01].azimuths.size();
const size_t azimuth_index011 = static_cast<size_t>(azimuth_scaled01 + 1) % elevations0[elevation_index01].azimuths.size();
const float azimuth_fractional_part01 = std::fmod(azimuth_scaled01, 1.0);
const angle_t azimuth_scaled11 = azimuth_mod * elevations1[elevation_index11].azimuths.size() / angle_t(2 * pi);
const size_t azimuth_index110 = static_cast<size_t>(azimuth_scaled11) % elevations1[elevation_index11].azimuths.size();
const size_t azimuth_index111 = static_cast<size_t>(azimuth_scaled11 + 1) % elevations1[elevation_index11].azimuths.size();
const float azimuth_fractional_part11 = std::fmod(azimuth_scaled11, 1.0);
const float blend_factor_000 = (1.0f - elevation_fractional_part0) * (1.0f - azimuth_fractional_part00) * distance_fractional_part;
const float blend_factor_001 = (1.0f - elevation_fractional_part0) * azimuth_fractional_part00 * distance_fractional_part;
const float blend_factor_010 = elevation_fractional_part0 * (1.0f - azimuth_fractional_part01) * distance_fractional_part;
const float blend_factor_011 = elevation_fractional_part0 * azimuth_fractional_part01 * distance_fractional_part;
const float blend_factor_100 = (1.0f - elevation_fractional_part1) * (1.0f - azimuth_fractional_part10) * (1.0f - distance_fractional_part);
const float blend_factor_101 = (1.0f - elevation_fractional_part1) * azimuth_fractional_part10 * (1.0f - distance_fractional_part);
const float blend_factor_110 = elevation_fractional_part1 * (1.0f - azimuth_fractional_part11) * (1.0f - distance_fractional_part);
const float blend_factor_111 = elevation_fractional_part1 * azimuth_fractional_part11 * (1.0f - distance_fractional_part);
delay_t delay0;
delay_t delay1;
if(channel == 0) {
delay0 =
elevations0[elevation_index00].azimuths[azimuth_index000].delay * blend_factor_000 + elevations0[elevation_index00].azimuths[azimuth_index001].delay * blend_factor_001 + elevations0[elevation_index01].azimuths[azimuth_index010].delay * blend_factor_010 + elevations0[elevation_index01].azimuths[azimuth_index011].delay * blend_factor_011;
delay1 =
elevations1[elevation_index10].azimuths[azimuth_index100].delay * blend_factor_100 + elevations1[elevation_index10].azimuths[azimuth_index101].delay * blend_factor_101 + elevations1[elevation_index11].azimuths[azimuth_index110].delay * blend_factor_110 + elevations1[elevation_index11].azimuths[azimuth_index111].delay * blend_factor_111;
} else {
delay0 =
elevations0[elevation_index00].azimuths[azimuth_index000].delay_right * blend_factor_000 + elevations0[elevation_index00].azimuths[azimuth_index001].delay_right * blend_factor_001 + elevations0[elevation_index01].azimuths[azimuth_index010].delay_right * blend_factor_010 + elevations0[elevation_index01].azimuths[azimuth_index011].delay_right * blend_factor_011;
delay1 =
elevations1[elevation_index10].azimuths[azimuth_index100].delay_right * blend_factor_100 + elevations1[elevation_index10].azimuths[azimuth_index101].delay_right * blend_factor_101 + elevations1[elevation_index11].azimuths[azimuth_index110].delay_right * blend_factor_110 + elevations1[elevation_index11].azimuths[azimuth_index111].delay_right * blend_factor_111;
}
ref_data.delay = delay0 + delay1;
if(ref_data.impulse_response.size() < m_response_length)
ref_data.impulse_response.resize(m_response_length);
for(size_t i = 0, j = channel; i < m_response_length; i++, j += m_channel_count) {
float sample0 =
elevations0[elevation_index00].azimuths[azimuth_index000].impulse_response[j] * blend_factor_000 + elevations0[elevation_index00].azimuths[azimuth_index001].impulse_response[j] * blend_factor_001 + elevations0[elevation_index01].azimuths[azimuth_index010].impulse_response[j] * blend_factor_010 + elevations0[elevation_index01].azimuths[azimuth_index011].impulse_response[j] * blend_factor_011;
float sample1 =
elevations1[elevation_index10].azimuths[azimuth_index100].impulse_response[j] * blend_factor_100 + elevations1[elevation_index10].azimuths[azimuth_index101].impulse_response[j] * blend_factor_101 + elevations1[elevation_index11].azimuths[azimuth_index110].impulse_response[j] * blend_factor_110 + elevations1[elevation_index11].azimuths[azimuth_index111].impulse_response[j] * blend_factor_111;
ref_data.impulse_response[i] = sample0 + sample1;
}
}
void HrtfData::get_direction_data(angle_t elevation, angle_t azimuth, distance_t distance, DirectionData& ref_data_left, DirectionData& ref_data_right) const {
assert(elevation >= -angle_t(pi * 0.5));
assert(elevation <= angle_t(pi * 0.5));
assert(azimuth >= -angle_t(2.0 * pi));
assert(azimuth <= angle_t(2.0 * pi));
get_direction_data(elevation, azimuth, distance, 0, ref_data_left);
if(m_channel_count == 1) {
get_direction_data(elevation, -azimuth, distance, 0, ref_data_right);
} else {
get_direction_data(elevation, azimuth, distance, 1, ref_data_right);
}
}
void HrtfData::sample_direction(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t sample, uint32_t channel, float& value, float& delay) const {
assert(elevation >= -angle_t(pi * 0.5));
assert(elevation <= angle_t(pi * 0.5));
assert(azimuth >= -angle_t(2.0 * pi));
assert(azimuth <= angle_t(2.0 * pi));
size_t distance_index0 = 0;
while(distance_index0 < m_distances.size() - 1 &&
m_distances[distance_index0].distance > distance) {
distance_index0++;
}
const size_t distance_index1 = std::min(distance_index0 + 1, m_distances.size() - 1);
const distance_t distance0 = m_distances[distance_index0].distance;
const distance_t distance1 = m_distances[distance_index1].distance;
const distance_t distance_delta = distance0 - distance1;
const float distance_fractional_part = distance_delta ? (distance - distance1) / distance_delta : 0;
const auto& elevations0 = m_distances[distance_index0].elevations;
const auto& elevations1 = m_distances[distance_index1].elevations;
const float azimuth_mod = std::fmod(azimuth + angle_t(pi * 2.0), angle_t(pi * 2.0));
const angle_t elevation_scaled0 = (elevation + angle_t(pi * 0.5)) * (elevations0.size() - 1) / angle_t(pi);
const angle_t elevation_scaled1 = (elevation + angle_t(pi * 0.5)) * (elevations1.size() - 1) / angle_t(pi);
const size_t elevation_index00 = static_cast<size_t>(elevation_scaled0);
const size_t elevation_index10 = static_cast<size_t>(elevation_scaled1);
const size_t elevation_index01 = std::min(elevation_index00 + 1, elevations0.size() - 1);
const size_t elevation_index11 = std::min(elevation_index10 + 1, elevations1.size() - 1);
const float elevation_fractional_part0 = std::fmod(elevation_scaled0, 1.0);
const float elevation_fractional_part1 = std::fmod(elevation_scaled1, 1.0);
const angle_t azimuth_scaled00 = azimuth_mod * elevations0[elevation_index00].azimuths.size() / angle_t(2 * pi);
const size_t azimuth_index000 = static_cast<size_t>(azimuth_scaled00) % elevations0[elevation_index00].azimuths.size();
const size_t azimuth_index001 = static_cast<size_t>(azimuth_scaled00 + 1) % elevations0[elevation_index00].azimuths.size();
const float azimuth_fractional_part00 = std::fmod(azimuth_scaled00, 1.0);
const angle_t azimuth_scaled10 = azimuth_mod * elevations1[elevation_index10].azimuths.size() / angle_t(2 * pi);
const size_t azimuth_index100 = static_cast<size_t>(azimuth_scaled10) % elevations1[elevation_index10].azimuths.size();
const size_t azimuth_index101 = static_cast<size_t>(azimuth_scaled10 + 1) % elevations1[elevation_index10].azimuths.size();
const float azimuth_fractional_part10 = std::fmod(azimuth_scaled10, 1.0);
const angle_t azimuth_scaled01 = azimuth_mod * elevations0[elevation_index01].azimuths.size() / angle_t(2 * pi);
const size_t azimuth_index010 = static_cast<size_t>(azimuth_scaled01) % elevations0[elevation_index01].azimuths.size();
const size_t azimuth_index011 = static_cast<size_t>(azimuth_scaled01 + 1) % elevations0[elevation_index01].azimuths.size();
const float azimuth_fractional_part01 = std::fmod(azimuth_scaled01, 1.0);
const angle_t azimuth_scaled11 = azimuth_mod * elevations1[elevation_index11].azimuths.size() / angle_t(2 * pi);
const size_t azimuth_index110 = static_cast<size_t>(azimuth_scaled11) % elevations1[elevation_index11].azimuths.size();
const size_t azimuth_index111 = static_cast<size_t>(azimuth_scaled11 + 1) % elevations1[elevation_index11].azimuths.size();
const float azimuth_fractional_part11 = std::fmod(azimuth_scaled11, 1.0);
const float blend_factor_000 = (1.0f - elevation_fractional_part0) * (1.0f - azimuth_fractional_part00) * distance_fractional_part;
const float blend_factor_001 = (1.0f - elevation_fractional_part0) * azimuth_fractional_part00 * distance_fractional_part;
const float blend_factor_010 = elevation_fractional_part0 * (1.0f - azimuth_fractional_part01) * distance_fractional_part;
const float blend_factor_011 = elevation_fractional_part0 * azimuth_fractional_part01 * distance_fractional_part;
const float blend_factor_100 = (1.0f - elevation_fractional_part1) * (1.0f - azimuth_fractional_part10) * (1.0f - distance_fractional_part);
const float blend_factor_101 = (1.0f - elevation_fractional_part1) * azimuth_fractional_part10 * (1.0f - distance_fractional_part);
const float blend_factor_110 = elevation_fractional_part1 * (1.0f - azimuth_fractional_part11) * (1.0f - distance_fractional_part);
const float blend_factor_111 = elevation_fractional_part1 * azimuth_fractional_part11 * (1.0f - distance_fractional_part);
float delay0;
float delay1;
if(channel == 0) {
delay0 =
elevations0[elevation_index00].azimuths[azimuth_index000].delay * blend_factor_000 + elevations0[elevation_index00].azimuths[azimuth_index001].delay * blend_factor_001 + elevations0[elevation_index01].azimuths[azimuth_index010].delay * blend_factor_010 + elevations0[elevation_index01].azimuths[azimuth_index011].delay * blend_factor_011;
delay1 =
elevations1[elevation_index10].azimuths[azimuth_index100].delay * blend_factor_100 + elevations1[elevation_index10].azimuths[azimuth_index101].delay * blend_factor_101 + elevations1[elevation_index11].azimuths[azimuth_index110].delay * blend_factor_110 + elevations1[elevation_index11].azimuths[azimuth_index111].delay * blend_factor_111;
} else {
delay0 =
elevations0[elevation_index00].azimuths[azimuth_index000].delay_right * blend_factor_000 + elevations0[elevation_index00].azimuths[azimuth_index001].delay_right * blend_factor_001 + elevations0[elevation_index01].azimuths[azimuth_index010].delay_right * blend_factor_010 + elevations0[elevation_index01].azimuths[azimuth_index011].delay_right * blend_factor_011;
delay1 =
elevations1[elevation_index10].azimuths[azimuth_index100].delay_right * blend_factor_100 + elevations1[elevation_index10].azimuths[azimuth_index101].delay_right * blend_factor_101 + elevations1[elevation_index11].azimuths[azimuth_index110].delay_right * blend_factor_110 + elevations1[elevation_index11].azimuths[azimuth_index111].delay_right * blend_factor_111;
}
delay = delay0 + delay1;
sample = sample * m_channel_count + channel;
float value0 =
elevations0[elevation_index00].azimuths[azimuth_index000].impulse_response[sample] * blend_factor_000 + elevations0[elevation_index00].azimuths[azimuth_index001].impulse_response[sample] * blend_factor_001 + elevations0[elevation_index01].azimuths[azimuth_index010].impulse_response[sample] * blend_factor_010 + elevations0[elevation_index01].azimuths[azimuth_index011].impulse_response[sample] * blend_factor_011;
float value1 =
elevations1[elevation_index10].azimuths[azimuth_index100].impulse_response[sample] * blend_factor_100 + elevations1[elevation_index10].azimuths[azimuth_index101].impulse_response[sample] * blend_factor_101 + elevations1[elevation_index11].azimuths[azimuth_index110].impulse_response[sample] * blend_factor_110 + elevations1[elevation_index11].azimuths[azimuth_index111].impulse_response[sample] * blend_factor_111;
value = value0 + value1;
}
void HrtfData::sample_direction(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t sample, float& value_left, float& delay_left, float& value_right, float& delay_right) const {
assert(elevation >= -angle_t(pi * 0.5));
assert(elevation <= angle_t(pi * 0.5));
assert(azimuth >= -angle_t(2.0 * pi));
assert(azimuth <= angle_t(2.0 * pi));
sample_direction(elevation, azimuth, distance, sample, 0, value_left, delay_left);
if(m_channel_count == 1) {
sample_direction(elevation, -azimuth, distance, sample, 0, value_right, delay_right);
} else {
sample_direction(elevation, azimuth, distance, sample, 1, value_right, delay_right);
}
}

View file

@ -1,48 +0,0 @@
#pragma once
#include "HrtfTypes.h"
#include "IHrtfData.h"
#include <cstdint>
#include <iostream>
#include <vector>
struct ElevationData {
std::vector<DirectionData> azimuths;
};
struct DistanceData {
distance_t distance;
std::vector<ElevationData> elevations;
};
class HrtfData : public IHrtfData {
void LoadHrtf00(std::istream& stream);
void LoadHrtf01(std::istream& stream);
void LoadHrtf02(std::istream& stream);
void LoadHrtf03(std::istream& stream);
public:
HrtfData(std::istream& stream);
void get_direction_data(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t channel, DirectionData& ref_data) const override;
void get_direction_data(angle_t elevation, angle_t azimuth, distance_t distance, DirectionData& ref_data_left, DirectionData& ref_data_right) const override;
void sample_direction(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t sample, uint32_t channel, float& value, float& delay) const override;
void sample_direction(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t sample, float& value_left, float& delay_left, float& value_right, float& delay_right) const override;
uint32_t get_sample_rate() const override {
return m_sample_rate;
}
uint32_t get_response_length() const override {
return m_response_length;
}
uint32_t get_longest_delay() const override {
return m_longest_delay;
}
private:
uint32_t m_sample_rate;
uint32_t m_response_length;
uint32_t m_longest_delay;
uint32_t m_channel_count;
std::vector<DistanceData> m_distances;
};

View file

@ -1,14 +0,0 @@
#pragma once
#include <cstdint>
#include <vector>
typedef float distance_t;
typedef float angle_t;
typedef int delay_t;
struct DirectionData {
std::vector<float> impulse_response;
delay_t delay;
delay_t delay_right;
};

View file

@ -1,19 +0,0 @@
#pragma once
#include "HrtfTypes.h"
class IHrtfData {
public:
virtual ~IHrtfData() = default;
virtual void get_direction_data(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t channel, DirectionData& ref_data) const = 0;
virtual void get_direction_data(angle_t elevation, angle_t azimuth, distance_t distance, DirectionData& ref_data_left, DirectionData& ref_data_right) const = 0;
// Get only once IR sample at given direction. The delay returned is the delay of IR's beginning, not the sample's!
virtual void sample_direction(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t sample, uint32_t channel, float& value, float& delay) const = 0;
// Get only once IR sample at given direction for both channels. The delay returned is the delay of IR's beginning, not the sample's!
virtual void sample_direction(angle_t elevation, angle_t azimuth, distance_t distance, uint32_t sample, float& value_left, float& delay_left, float& value_right, float& delay_right) const = 0;
virtual uint32_t get_sample_rate() const = 0;
virtual uint32_t get_response_length() const = 0;
virtual uint32_t get_longest_delay() const = 0;
};

View file

@ -139,10 +139,7 @@ static void vorbis_lpc_predict(float *coeff, float *prime, int m, float *data, l
} }
void lpc_extrapolate2(float *const data, const size_t data_len, const int nch, const int lpc_order, const size_t extra_bkwd, const size_t extra_fwd, void **extrapolate_buffer, size_t *extrapolate_buffer_size) { void lpc_extrapolate2(float *const data, const size_t data_len, const int nch, const int lpc_order, const size_t extra_bkwd, const size_t extra_fwd, void **extrapolate_buffer, size_t *extrapolate_buffer_size) {
//const size_t max_to_prime = (data_len < lpc_order) ? data_len : lpc_order; const size_t tdata_size = sizeof(float) * (extra_bkwd + data_len + extra_fwd);
const size_t min_data_len = (data_len < lpc_order) ? lpc_order : data_len;
const size_t tdata_size = sizeof(float) * (extra_bkwd + min_data_len + extra_fwd);
const size_t aut_size = sizeof(double) * (lpc_order + 1); const size_t aut_size = sizeof(double) * (lpc_order + 1);
const size_t lpc_size = sizeof(double) * lpc_order; const size_t lpc_size = sizeof(double) * lpc_order;
const size_t lpci_size = sizeof(float) * lpc_order; const size_t lpci_size = sizeof(float) * lpc_order;
@ -153,68 +150,44 @@ void lpc_extrapolate2(float *const data, const size_t data_len, const int nch, c
if(new_size > *extrapolate_buffer_size) { if(new_size > *extrapolate_buffer_size) {
*extrapolate_buffer = realloc(*extrapolate_buffer, new_size); *extrapolate_buffer = realloc(*extrapolate_buffer, new_size);
*extrapolate_buffer_size = new_size; *extrapolate_buffer_size = new_size;
if(!*extrapolate_buffer) return;
} }
double *aut = (double *)(*extrapolate_buffer); float *tdata = (float *)(*extrapolate_buffer); // for 1 channel only
double *lpc = (double *)(*extrapolate_buffer + aut_size);
float *tdata = (float *)(*extrapolate_buffer + aut_size + lpc_size); // for 1 channel only double *aut = (double *)(*extrapolate_buffer + tdata_size);
double *lpc = (double *)(*extrapolate_buffer + tdata_size + aut_size);
float *lpci = (float *)(*extrapolate_buffer + aut_size + lpc_size + tdata_size); float *lpci = (float *)(*extrapolate_buffer + tdata_size + aut_size + lpc_size);
float *work = (float *)(*extrapolate_buffer + aut_size + lpc_size + tdata_size + lpci_size); float *work = (float *)(*extrapolate_buffer + tdata_size + aut_size + lpc_size + lpci_size);
for(int c = 0; c < nch; c++) { for(int c = 0; c < nch; c++) {
if(extra_bkwd) { if(extra_bkwd) {
for(int i = 0; i < (int)data_len; i++) for(int i = 0; i < (int)data_len; i++)
tdata[min_data_len - 1 - i] = data[i * nch + c]; tdata[data_len - 1 - i] = data[i * nch + c];
if(data_len < min_data_len)
for(int i = (int)data_len; i < (int)min_data_len; i++)
tdata[min_data_len - 1 - i] = 0.0f;
} else { } else {
const ssize_t len_diff = min_data_len - data_len;
if(len_diff <= 0) {
for(int i = 0; i < (int)data_len; i++) for(int i = 0; i < (int)data_len; i++)
tdata[i] = data[i * nch + c]; tdata[i] = data[i * nch + c];
} else {
for(int i = 0; i < (int)len_diff; i++)
tdata[i] = 0.0f;
for(int i = 0; i < (int)data_len; i++)
tdata[len_diff + i] = data[i * nch + c];
}
} }
apply_window(tdata, min_data_len); apply_window(tdata, data_len);
vorbis_lpc_from_data(tdata, lpci, (int)min_data_len, lpc_order, aut, lpc); vorbis_lpc_from_data(tdata, lpci, (int)data_len, lpc_order, aut, lpc);
// restore after apply_window // restore after apply_window
if(extra_bkwd) { if(extra_bkwd) {
for(int i = 0; i < (int)data_len; i++) for(int i = 0; i < (int)data_len; i++)
tdata[min_data_len - 1 - i] = data[i * nch + c]; tdata[data_len - 1 - i] = data[i * nch + c];
if(data_len < min_data_len)
for(int i = (int)data_len; i < (int)min_data_len; i++)
tdata[min_data_len - 1 - i] = 0.0f;
} else { } else {
const ssize_t len_diff = min_data_len - data_len;
if(len_diff <= 0) {
for(int i = 0; i < (int)data_len; i++) for(int i = 0; i < (int)data_len; i++)
tdata[i] = data[i * nch + c]; tdata[i] = data[i * nch + c];
} else {
for(int i = 0; i < (int)len_diff; i++)
tdata[i] = 0.0f;
for(int i = 0; i < (int)data_len; i++)
tdata[len_diff + i] = data[i * nch + c];
}
} }
vorbis_lpc_predict(lpci, tdata + min_data_len - lpc_order, lpc_order, tdata + min_data_len, extra_fwd + extra_bkwd, work); vorbis_lpc_predict(lpci, tdata + data_len - lpc_order, lpc_order, tdata + data_len, extra_fwd + extra_bkwd, work);
if(extra_bkwd) { if(extra_bkwd) {
for(int i = 0; i < extra_bkwd; i++) for(int i = 0; i < extra_bkwd; i++)
data[(-i - 1) * nch + c] = tdata[min_data_len + i]; data[(-i - 1) * nch + c] = tdata[data_len + i];
} else { } else {
for(int i = 0; i < extra_fwd; i++) for(int i = 0; i < extra_fwd; i++)
data[(i + data_len) * nch + c] = tdata[min_data_len + i]; data[(i + data_len) * nch + c] = tdata[data_len + i];
} }
} }
} }

View file

@ -24,8 +24,16 @@
static const size_t LPC_ORDER = 32; static const size_t LPC_ORDER = 32;
#ifdef __cplusplus
extern "C" {
#endif
void lpc_extrapolate2(float * const data, const size_t data_len, const int nch, const int lpc_order, const size_t extra_bkwd, const size_t extra_fwd, void ** extrapolate_buffer, size_t * extrapolate_buffer_size); void lpc_extrapolate2(float * const data, const size_t data_len, const int nch, const int lpc_order, const size_t extra_bkwd, const size_t extra_fwd, void ** extrapolate_buffer, size_t * extrapolate_buffer_size);
#ifdef __cplusplus
}
#endif
static inline void lpc_extrapolate_bkwd(float * const data, const size_t data_len, const size_t prime_len, const int nch, const int lpc_order, const size_t extra_bkwd, void ** extrapolate_buffer, size_t * extrapolate_buffer_size) static inline void lpc_extrapolate_bkwd(float * const data, const size_t data_len, const size_t prime_len, const int nch, const int lpc_order, const size_t extra_bkwd, void ** extrapolate_buffer, size_t * extrapolate_buffer_size)
{ {
(void)data_len; (void)data_len;

View file

@ -44,7 +44,6 @@ static void samples_len(unsigned* r1, unsigned* r2, unsigned N, unsigned M)
{ {
if (r1 == 0 || r2 == 0) return; if (r1 == 0 || r2 == 0) return;
unsigned v = local_gcd(*r1, *r2); // v = 300 unsigned v = local_gcd(*r1, *r2); // v = 300
if (v == 0) return;
*r1 /= v; *r2 /= v; // r1 = 147; r2 = 160 == 1/300th of second *r1 /= v; *r2 /= v; // r1 = 147; r2 = 160 == 1/300th of second
unsigned n = (v + N-1) / N; // n = 300/20 = 15 times unsigned n = (v + N-1) / N; // n = 300/20 = 15 times
unsigned z = max(*r1, *r2); // z = 160 unsigned z = max(*r1, *r2); // z = 160

1
Audio/ThirdParty/r8brain-free-src vendored Submodule

@ -0,0 +1 @@
Subproject commit 33e8a35ef6e33cbebe1d0ccfbb12ecd7535e44c2

164
Audio/ThirdParty/r8bstate.h vendored Normal file
View file

@ -0,0 +1,164 @@
//
// r8bstate.h
// CogAudio Framework
//
// Created by Christopher Snowhill on 3/3/22.
//
#ifndef r8bstate_h
#define r8bstate_h
#include <Accelerate/Accelerate.h>
#include "r8bbase.h"
#include "CDSPResampler.h"
struct r8bstate {
int channelCount;
int bufferCapacity;
size_t remainder;
uint64_t inProcessed;
uint64_t outProcessed;
double sampleRatio;
r8b::CFixedBuffer<double> InBuf;
r8b::CFixedBuffer<double> *OutBufs;
r8b::CDSPResampler24 **Resamps;
r8bstate(int _channelCount, int _bufferCapacity, double srcRate, double dstRate)
: channelCount(_channelCount), bufferCapacity(_bufferCapacity), inProcessed(0), outProcessed(0), remainder(0) {
InBuf.alloc(bufferCapacity);
OutBufs = new r8b::CFixedBuffer<double>[channelCount];
Resamps = new r8b::CDSPResampler24 *[channelCount];
for(int i = 0; i < channelCount; ++i) {
Resamps[i] = new r8b::CDSPResampler24(srcRate, dstRate, bufferCapacity);
}
sampleRatio = dstRate / srcRate;
}
~r8bstate() {
delete[] OutBufs;
for(int i = 0; i < channelCount; ++i) {
delete Resamps[i];
}
delete[] Resamps;
}
double latency() {
return ((double)inProcessed * sampleRatio) - (double)outProcessed;
}
int resample(const float *input, size_t inCount, size_t *inDone, float *output, size_t outMax) {
int ret = 0;
int i;
if(inDone) *inDone = 0;
while(remainder > 0) {
size_t blockCount = remainder;
if(blockCount > outMax)
blockCount = outMax;
for(i = 0; i < channelCount; ++i) {
vDSP_vdpsp(&OutBufs[i][0], 1, output + i, channelCount, blockCount);
}
remainder -= blockCount;
if(remainder > 0) {
for(i = 0; i < channelCount; ++i) {
memmove(&OutBufs[i][0], &OutBufs[i][blockCount], remainder * sizeof(double));
}
}
output += channelCount * blockCount;
outProcessed += blockCount;
outMax -= blockCount;
ret += blockCount;
if(!outMax)
return ret;
}
while(inCount > 0) {
size_t blockCount = inCount;
if(blockCount > bufferCapacity)
blockCount = bufferCapacity;
int outputDone;
for(i = 0; i < channelCount; ++i) {
double *outputPointer;
vDSP_vspdp(input + i, channelCount, &InBuf[0], 1, blockCount);
outputDone = Resamps[i]->process(InBuf, (int)blockCount, outputPointer);
if(outputDone) {
if(outputDone > outMax) {
vDSP_vdpsp(outputPointer, 1, output + i, channelCount, outMax);
remainder = outputDone - outMax;
OutBufs[i].alloc((int)remainder);
memcpy(&OutBufs[i][0], outputPointer + outMax, remainder);
} else {
vDSP_vdpsp(outputPointer, 1, output + i, channelCount, outputDone);
}
}
}
size_t outputActual = outputDone - remainder;
input += channelCount * blockCount;
output += channelCount * outputActual;
inCount -= blockCount;
if(inDone) *inDone += blockCount;
inProcessed += blockCount;
outProcessed += outputActual;
outMax -= outputActual;
ret += outputActual;
if(remainder)
break;
}
return ret;
}
int flush(float *output, size_t outMax) {
int ret = 0;
int i;
if(remainder > 0) {
size_t blockCount = remainder;
if(blockCount > outMax)
blockCount = outMax;
for(i = 0; i < channelCount; ++i) {
vDSP_vdpsp(&OutBufs[i][0], 1, output + i, channelCount, blockCount);
}
remainder -= blockCount;
if(remainder > 0) {
for(i = 0; i < channelCount; ++i) {
memmove(&OutBufs[i][0], &OutBufs[i][blockCount], remainder * sizeof(double));
}
}
output += channelCount * blockCount;
outProcessed += blockCount;
outMax -= blockCount;
ret += blockCount;
if(!outMax)
return ret;
}
uint64_t outputWanted = ceil(inProcessed * sampleRatio);
memset(&InBuf[0], 0, sizeof(double) * bufferCapacity);
while(outProcessed < outputWanted) {
int outputDone = 0;
for(int i = 0; i < channelCount; ++i) {
double *outputPointer;
outputDone = Resamps[i]->process(InBuf, bufferCapacity, outputPointer);
if(outputDone) {
if(outputDone > (outputWanted - outProcessed))
outputDone = (int)(outputWanted - outProcessed);
if(outputDone > outMax) {
vDSP_vdpsp(outputPointer, 1, output + i, channelCount, outMax);
remainder = outputDone - outMax;
OutBufs[i].alloc((int)remainder);
memcpy(&OutBufs[i][0], outputPointer + outMax, remainder);
} else {
vDSP_vdpsp(outputPointer, 1, output + i, channelCount, outputDone);
}
}
}
size_t outputActual = outputDone - remainder;
outProcessed += outputActual;
output += channelCount * outputActual;
outMax -= outputActual;
ret += outputActual;
if(remainder)
break;
}
return ret;
}
};
#endif /* r8bstate_h */

View file

@ -1,30 +0,0 @@
//
// rsstate.cpp
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/4/23.
//
#include "rsstate.h"
#include "rsstate.hpp"
void *rsstate_new(int channelCount, double srcRate, double dstRate) {
return (void *)new rsstate(channelCount, srcRate, dstRate);
}
void rsstate_delete(void *state) {
delete(rsstate *)state;
}
double rsstate_latency(void *state) {
return ((rsstate *)state)->latency();
}
int rsstate_resample(void *state, const float *input, size_t inCount, size_t *inDone,
float *output, size_t outMax) {
return ((rsstate *)state)->resample(input, inCount, inDone, output, outMax);
}
int rsstate_flush(void *state, float *output, size_t outMax) {
return ((rsstate *)state)->flush(output, outMax);
}

View file

@ -1,32 +0,0 @@
//
// rsstate.h
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/4/23.
//
#include <stdint.h>
#include <stdlib.h>
#ifndef rsstate_h
#define rsstate_h
#ifdef __cplusplus
extern "C" {
#endif
void *rsstate_new(int channelCount, double srcRate, double dstRate);
void rsstate_delete(void *);
double rsstate_latency(void *);
int rsstate_resample(void *, const float *input, size_t inCount, size_t *inDone,
float *output, size_t outMax);
int rsstate_flush(void *, float *output, size_t outMax);
#ifdef __cplusplus
}
#endif
#endif /* rsstate_h */

View file

@ -1,81 +0,0 @@
//
// rsstate.hpp
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/3/23.
//
#ifndef rsstate_hpp
#define rsstate_hpp
#include "soxr.h"
#include <cmath>
#include <vector>
struct rsstate {
int channelCount;
int bufferCapacity;
size_t remainder;
uint64_t inProcessed;
uint64_t outProcessed;
double sampleRatio;
double dstRate;
std::vector<float> SilenceBuf;
soxr_t Resampler;
rsstate(int _channelCount, double srcRate, double _dstRate)
: channelCount(_channelCount), inProcessed(0), outProcessed(0), remainder(0), dstRate(_dstRate) {
SilenceBuf.resize(1024 * channelCount);
memset(&SilenceBuf[0], 0, 1024 * channelCount * sizeof(float));
Resampler = soxr_create(srcRate, dstRate, channelCount, NULL, NULL, NULL, NULL);
sampleRatio = dstRate / srcRate;
}
~rsstate() {
soxr_delete(Resampler);
}
double latency() {
return (((double)inProcessed * sampleRatio) - (double)outProcessed) / dstRate;
}
int resample(const float *input, size_t inCount, size_t *inDone, float *output, size_t outMax) {
size_t outDone = 0;
soxr_error_t errmsg = soxr_process(Resampler, (soxr_in_t)input, inCount, inDone, (soxr_out_t)output, outMax, &outDone);
if(!errmsg) {
inProcessed += *inDone;
outProcessed += outDone;
return (int)outDone;
} else {
return 0;
}
}
int flush(float *output, size_t outMax) {
size_t outTotal = 0;
uint64_t outputWanted = std::ceil(inProcessed * sampleRatio);
while(outProcessed < outputWanted) {
size_t outWanted = outputWanted - outProcessed;
if(outWanted > outMax) {
outWanted = outMax;
}
size_t outDone = 0;
size_t inDone = 0;
soxr_error_t errmsg = soxr_process(Resampler, (soxr_in_t)(&SilenceBuf[0]), 1024, &inDone, (soxr_out_t)output, outWanted, &outDone);
if(!errmsg) {
outProcessed += outDone;
outTotal += outDone;
output += outDone * channelCount;
outMax -= outDone;
if(!outMax || outProcessed == outputWanted) {
return (int)outTotal;
}
} else {
return 0;
}
}
return (int)outTotal;
}
};
#endif /* r8bstate_h */

View file

@ -1,5 +1,5 @@
// //
// CogSemaphore.h // Semaphore.h
// Cog // Cog
// //
// Created by Vincent Spader on 8/2/05. // Created by Vincent Spader on 8/2/05.

View file

@ -1,12 +1,12 @@
// //
// CogSemaphore.m // Semaphore.m
// Cog // Cog
// //
// Created by Vincent Spader on 8/2/05. // Created by Vincent Spader on 8/2/05.
// Copyright 2005 Vincent Spader. All rights reserved. // Copyright 2005 Vincent Spader. All rights reserved.
// //
#import <CogAudio/CogSemaphore.h> #import "Semaphore.h"
@implementation Semaphore @implementation Semaphore

View file

@ -1,258 +0,0 @@
static const float dsd2float[256][8] = {
{ -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f, +1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, -1.0f },
{ +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f, +1.0f }
};

View file

@ -1,19 +0,0 @@
#include <stdint.h>
#include <stdio.h>
int main(void) {
fprintf(stdout, "static const float dsdtofloat[256][8] = {\n");
for(size_t i = 0; i < 256; ++i) {
fprintf(stdout, "\t{ ");
for(size_t j = 0; j < 8; ++j) {
if(j) fprintf(stdout, ", ");
fprintf(stdout, "%s", ((i << j) & 128) ? "+1.0f" : "-1.0f");
}
fprintf(stdout, " }%s", (i < 255) ? ",\n" : "\n");
}
fprintf(stdout, "};\n");
return 0;
}

View file

@ -10,20 +10,16 @@
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
@interface VisualizationController : NSObject { @interface VisualizationController : NSObject {
double sampleRate;
float visAudio[4096];
} }
+ (VisualizationController *)sharedController; + (VisualizationController *)sharedController;
- (void)postLatency:(double)latency;
- (UInt64)samplesPosted;
- (void)postSampleRate:(double)sampleRate; - (void)postSampleRate:(double)sampleRate;
- (void)postVisPCM:(const float *)inPCM amount:(int)amount; - (void)postVisPCM:(const float *)inPCM amount:(int)amount;
- (double)readSampleRate; - (double)readSampleRate;
- (void)copyVisPCM:(float *_Nullable)outPCM visFFT:(float *_Nullable)outFFT latencyOffset:(double)latency; - (void)copyVisPCM:(float *)outPCM visCQT:(float *)outCQT;
- (void)reset;
@end @end

View file

@ -8,15 +8,9 @@
#import "VisualizationController.h" #import "VisualizationController.h"
#import <Accelerate/Accelerate.h> #import <Accelerate/Accelerate.h>
#import "fft.h" #import "cqt.h"
@implementation VisualizationController { @implementation VisualizationController
double sampleRate;
double latency;
float *visAudio;
int visAudioCursor, visAudioSize;
uint64_t visSamplesPosted;
}
static VisualizationController *_sharedController = nil; static VisualizationController *_sharedController = nil;
@ -32,74 +26,31 @@ static VisualizationController *_sharedController = nil;
- (id)init { - (id)init {
self = [super init]; self = [super init];
if(self) { if(self) {
visAudio = NULL; vDSP_vclr(visAudio, 1, 4096);
visAudioSize = 0;
latency = 0;
} }
return self; return self;
} }
- (void)dealloc { - (void)dealloc {
fft_free(); cqt_free();
}
- (void)reset {
@synchronized (self) {
latency = 0;
visAudioCursor = 0;
visSamplesPosted = 0;
if(visAudio && visAudioSize) {
bzero(visAudio, sizeof(float) * visAudioSize);
}
}
} }
- (void)postSampleRate:(double)sampleRate { - (void)postSampleRate:(double)sampleRate {
@synchronized(self) { @synchronized(self) {
if(self->sampleRate != sampleRate) {
self->sampleRate = sampleRate; self->sampleRate = sampleRate;
int visAudioSize = (int)(sampleRate * 45.0);
void *visAudio = realloc(self->visAudio, visAudioSize * sizeof(float));
if(visAudio && visAudioSize) {
if(visAudioSize > self->visAudioSize) {
bzero(((float *)visAudio) + self->visAudioSize, sizeof(float) * (visAudioSize - self->visAudioSize));
}
self->visAudio = visAudio;
self->visAudioSize = visAudioSize;
visAudioCursor %= visAudioSize;
} else {
if(self->visAudio) {
free(self->visAudio);
self->visAudio = NULL;
}
self->visAudioSize = 0;
}
}
} }
} }
- (void)postVisPCM:(const float *)inPCM amount:(int)amount { - (void)postVisPCM:(const float *)inPCM amount:(int)amount {
int skipAmount = 0;
if(amount > 4096) {
skipAmount = amount - 4096;
amount = 4096;
}
@synchronized(self) { @synchronized(self) {
if(!visAudioSize) { cblas_scopy(4096 - amount, visAudio + amount, 1, visAudio, 1);
return; cblas_scopy(amount, inPCM + skipAmount, 1, visAudio + 4096 - amount, 1);
} }
int samplesRead = 0;
while(amount > 0) {
int amountToCopy = (int)(visAudioSize - visAudioCursor);
if(amountToCopy > amount) amountToCopy = amount;
cblas_scopy(amountToCopy, inPCM + samplesRead, 1, visAudio + visAudioCursor, 1);
visAudioCursor = visAudioCursor + amountToCopy;
if(visAudioCursor >= visAudioSize) visAudioCursor -= visAudioSize;
amount -= amountToCopy;
samplesRead += amountToCopy;
visSamplesPosted += amountToCopy;
}
}
}
- (void)postLatency:(double)latency {
self->latency = latency;
assert(latency < 45.0);
} }
- (double)readSampleRate { - (double)readSampleRate {
@ -108,63 +59,11 @@ static VisualizationController *_sharedController = nil;
} }
} }
- (UInt64)samplesPosted { - (void)copyVisPCM:(float *)outPCM visCQT:(float *)outCQT {
return visSamplesPosted;
}
- (void)copyVisPCM:(float *_Nullable)outPCM visFFT:(float *_Nullable)outFFT latencyOffset:(double)latency {
if(!outPCM && !outFFT) return;
if(!visAudio || !visAudioSize) {
if(outPCM) bzero(outPCM, sizeof(float) * 4096);
if(outFFT) bzero(outFFT, sizeof(float) * 2048);
return;
}
void *visAudioTemp = calloc(sizeof(float), 4096);
if(!visAudioTemp) {
if(outPCM) bzero(outPCM, sizeof(float) * 4096);
if(outFFT) bzero(outFFT, sizeof(float) * 2048);
return;
}
@synchronized(self) { @synchronized(self) {
if(!sampleRate) { cblas_scopy(4096, visAudio, 1, outPCM, 1);
free(visAudioTemp); cqt_calculate(visAudio, sampleRate, outCQT, 4096);
if(outPCM) bzero(outPCM, 4096 * sizeof(float));
if(outFFT) bzero(outFFT, 2048 * sizeof(float));
return;
} }
int latencySamples = (int)(sampleRate * (self->latency + latency)) + 2048;
if(latencySamples < 4096) latencySamples = 4096;
int readCursor = visAudioCursor - latencySamples;
int samples = 4096;
int samplesRead = 0;
if(latencySamples + samples > visAudioSize) {
samples = (int)(visAudioSize - latencySamples);
}
while(readCursor < 0)
readCursor += visAudioSize;
while(readCursor >= visAudioSize)
readCursor -= visAudioSize;
while(samples > 0) {
int samplesToRead = (int)(visAudioSize - readCursor);
if(samplesToRead > samples) samplesToRead = samples;
cblas_scopy(samplesToRead, visAudio + readCursor, 1, visAudioTemp + samplesRead, 1);
samplesRead += samplesToRead;
readCursor += samplesToRead;
samples -= samplesToRead;
if(readCursor >= visAudioSize) readCursor -= visAudioSize;
}
}
if(outPCM) {
cblas_scopy(4096, visAudioTemp, 1, outPCM, 1);
}
if(outFFT) {
fft_calculate(visAudioTemp, outFFT, 2048);
}
free(visAudioTemp);
} }
@end @end

View file

@ -1,136 +0,0 @@
//
// VisualizationController.swift
// CogAudio Framework
//
// Created by Christopher Snowhill on 6/30/22.
//
import Foundation
@objc(VisualizationController)
class VisualizationController : NSObject {
var serialQueue = DispatchQueue(label: "Visualization Queue")
var sampleRate = 0.0
var latency = 0.0
var visAudio: [Float] = Array(repeating: 0.0, count: 44100 * 45)
var visAudioCursor = 0
var visAudioSize = 0
var visSamplesPosted: UInt64 = 0
private static var sharedVisualizationController: VisualizationController = {
let visualizationController = VisualizationController()
return visualizationController
}()
@objc
class func sharedController() -> VisualizationController {
return sharedVisualizationController
}
@objc
func reset() {
serialQueue.sync {
self.latency = 0;
let amount = self.visAudioSize
for i in 0..<amount {
self.visAudio[i] = 0
}
self.visSamplesPosted = 0;
}
}
@objc
func postLatency(_ latency: Double) {
self.latency = latency
}
@objc
func samplesPosted() -> UInt64 {
return self.visSamplesPosted
}
@objc
func postSampleRate(_ sampleRate: Double) {
serialQueue.sync {
if(self.sampleRate != sampleRate) {
self.sampleRate = sampleRate
visAudioSize = (Int)(sampleRate * 45.0)
visAudio = Array(repeating: 0.0, count: visAudioSize)
visAudioCursor = 0
}
}
}
@objc
func postVisPCM(_ inPCM: UnsafePointer<Float>?, amount: Int) {
serialQueue.sync {
let bufferPointer = UnsafeBufferPointer<Float>(start: inPCM, count: amount)
var j = self.visAudioCursor
let k = self.visAudioSize
if(j + amount <= k) {
let endIndex = j + amount;
self.visAudio.replaceSubrange(j..<endIndex, with: bufferPointer)
j += amount
if(j >= k) { j = 0 }
} else {
let inEndIndex = k - j
let remainder = amount - inEndIndex
self.visAudio.replaceSubrange(j..<k, with: bufferPointer.prefix(inEndIndex))
self.visAudio.replaceSubrange(0..<remainder, with: bufferPointer.suffix(remainder))
j = remainder
}
self.visAudioCursor = j
self.visSamplesPosted += UInt64(amount);
}
}
@objc
func readSampleRate() -> Double {
serialQueue.sync {
return self.sampleRate
}
}
@objc
func copyVisPCM(_ outPCM: UnsafeMutablePointer<Float>?, visFFT: UnsafeMutablePointer<Float>?, latencyOffset: Double) {
if(self.visAudioSize == 0) {
outPCM?.update(repeating: 0.0, count: 4096)
visFFT?.update(repeating: 0.0, count: 2048)
return
}
var outPCMCopy = Array<Float>(repeating: 0.0, count: 4096)
serialQueue.sync {
// Offset latency so the target sample is in the center of the window
let latencySamples = (Int)((self.latency + latencyOffset) * self.sampleRate) + 2048
var samplesToDo = 4096;
if(latencySamples < 0) {
return;
}
if(latencySamples < 4096) {
// Latency can sometimes dip below this threshold
samplesToDo = latencySamples;
}
var j = self.visAudioCursor - latencySamples
let k = self.visAudioSize
if j < 0 { j += k }
if(j + samplesToDo <= k) {
outPCMCopy.replaceSubrange(0..<samplesToDo, with: self.visAudio.suffix(from: j).prefix(samplesToDo))
} else {
let outEndIndex = k - j
let remainder = samplesToDo - outEndIndex
outPCMCopy.replaceSubrange(0..<outEndIndex, with: self.visAudio.suffix(from: j))
outPCMCopy.replaceSubrange(outEndIndex..<samplesToDo, with: self.visAudio.prefix(remainder))
}
}
outPCM?.update(from: outPCMCopy, count: 4096)
if(visFFT != nil) {
serialQueue.sync {
fft_calculate(outPCMCopy, visFFT, 2048)
}
}
}
}

Some files were not shown because too many files have changed in this diff Show more