Compare commits

..

No commits in common. "main" and "r516" have entirely different histories.
main ... r516

5913 changed files with 499570 additions and 3605658 deletions

View file

@ -1,176 +0,0 @@
# The style used for all options not specifically set in the configuration.
BasedOnStyle: LLVM
# The extra indent or outdent of access modifiers, e.g. public:.
AccessModifierOffset: 0
# If true, aligns escaped newlines as far left as possible. Otherwise puts them into the right-most column.
AlignEscapedNewlinesLeft: false
# If true, aligns trailing comments.
AlignTrailingComments: false
# Allow putting all parameters of a function declaration onto the next line even if BinPackParameters is false.
AllowAllParametersOfDeclarationOnNextLine: false
# Allows contracting simple braced statements to a single line.
AllowShortBlocksOnASingleLine: true
# If true, short case labels will be contracted to a single line.
AllowShortCaseLabelsOnASingleLine: false
# Dependent on the value, int f() { return 0; } can be put on a single line. Possible values: None, Inline, All.
AllowShortFunctionsOnASingleLine: None
# If true, if (a) return; can be put on a single line.
AllowShortIfStatementsOnASingleLine: true
# If true, while (true) continue; can be put on a single line.
AllowShortLoopsOnASingleLine: true
# If true, always break after function definition return types. More truthfully called break before the identifier following the type in a function definition.
AlwaysBreakAfterDefinitionReturnType: false
# If true, always break before multiline string literals.
AlwaysBreakBeforeMultilineStrings: false
# If true, always break after the template<...> of a template declaration.
AlwaysBreakTemplateDeclarations: true
# If false, a function calls arguments will either be all on the same line or will have one line each.
#BinPackArguments: true
# If false, a function declarations or function definitions parameters will either all be on the same line or will have one line each.
BinPackParameters: true
# The way to wrap binary operators. Possible values: None, NonAssignment, All.
BreakBeforeBinaryOperators: None
# The brace breaking style to use. Possible values: Attach, Linux, Stroustrup, Allman, GNU.
BreakBeforeBraces: Attach
# If true, ternary operators will be placed after line breaks.
BreakBeforeTernaryOperators: false
# Always break constructor initializers before commas and align the commas with the colon.
BreakConstructorInitializersBeforeComma: false
# The column limit. A column limit of 0 means that there is no column limit.
ColumnLimit: 0
# A regular expression that describes comments with special meaning, which should not be split into lines or otherwise changed.
CommentPragmas: '^ IWYU pragma:'
# If the constructor initializers dont fit on a line, put each initializer on its own line.
ConstructorInitializerAllOnOneLineOrOnePerLine: false
# The number of characters to use for indentation of constructor initializer lists.
ConstructorInitializerIndentWidth: 0
# Indent width for line continuations.
ContinuationIndentWidth: 0
# If true, format braced lists as best suited for C++11 braced lists.
Cpp11BracedListStyle: false
# If true, analyze the formatted file for the most common alignment of & and *. PointerAlignment is then used only as fallback.
DerivePointerAlignment: true
# Disables formatting at all.
DisableFormat: false
# If true, clang-format detects whether function calls and definitions are formatted with one parameter per line.
ExperimentalAutoDetectBinPacking: false
# A vector of macros that should be interpreted as foreach loops instead of as function calls.
#ForEachMacros: ''
# Indent case labels one level from the switch statement. When false, use the same indentation level as for the switch statement. Switch statement body is always indented one level more than case labels.
IndentCaseLabels: true
# The number of columns to use for indentation.
IndentWidth: 4
# Indent if a function definition or declaration is wrapped after the type.
IndentWrappedFunctionNames: false
# If true, empty lines at the start of blocks are kept.
KeepEmptyLinesAtTheStartOfBlocks: false
# Language, this format style is targeted at. Possible values: None, Cpp, Java, JavaScript, Proto.
# Language: None
# The maximum number of consecutive empty lines to keep.
MaxEmptyLinesToKeep: 1
# The indentation used for namespaces. Possible values: None, Inner, All.
NamespaceIndentation: All
# The number of characters to use for indentation of ObjC blocks.
ObjCBlockIndentWidth: 4
# Add a space after @property in Objective-C, i.e. use \@property (readonly) instead of \@property(readonly).
ObjCSpaceAfterProperty: false
# Add a space in front of an Objective-C protocol list, i.e. use Foo <Protocol> instead of Foo<Protocol>.
ObjCSpaceBeforeProtocolList: true
# The penalty for breaking a function call after “call(”.
PenaltyBreakBeforeFirstCallParameter: 1000
# The penalty for each line break introduced inside a comment.
PenaltyBreakComment: 1000
# The penalty for breaking before the first <<.
PenaltyBreakFirstLessLess: 1000
# The penalty for each line break introduced inside a string literal.
PenaltyBreakString: 1000
# The penalty for each character outside of the column limit.
PenaltyExcessCharacter: 1000
# Penalty for putting the return type of a function onto its own line.
PenaltyReturnTypeOnItsOwnLine: 1000
# Pointer and reference alignment style. Possible values: Left, Right, Middle.
PointerAlignment: Left
# If true, a space may be inserted after C style casts.
SpaceAfterCStyleCast: false
# If false, spaces will be removed before assignment operators.
SpaceBeforeAssignmentOperators: true
# Defines in which cases to put a space before opening parentheses. Possible values: Never, ControlStatements, Always.
SpaceBeforeParens: Never
# If true, spaces may be inserted into ().
SpaceInEmptyParentheses: false
# The number of spaces before trailing line comments (// - comments).
SpacesBeforeTrailingComments: 1
# If true, spaces will be inserted after < and before > in template argument lists.
SpacesInAngles: false
# If true, spaces may be inserted into C style casts.
SpacesInCStyleCastParentheses: false
# If true, spaces are inserted inside container literals (e.g. ObjC and Javascript array and dict literals).
SpacesInContainerLiterals: false
# If true, spaces will be inserted after ( and before ).
SpacesInParentheses: false
# If true, spaces will be inserted after [ and before ].
SpacesInSquareBrackets: false
# Format compatible with this standard, e.g. use A<A<int> > instead of A<A<int>> for LS_Cpp03. Possible values: Cpp03, Cpp11, Auto.
Standard: Auto
# The number of columns used for tab stops.
TabWidth: 4
# The way to use tab characters in the resulting file. Possible values: Never, ForIndentation, Always.
UseTab: ForIndentation

View file

@ -1,12 +0,0 @@
#!/bin/sh
MATCHES=$(git grep -n -E "(DevelopmentTeam|DEVELOPMENT_TEAM) =" .)
COUNT=$(echo -n "$MATCHES\c" | grep -cvE '(Shared.xcconfig|= "")')
if [ $COUNT -ne 0 ]; then
ERRORS=$(echo -n "$MATCHES\c" | grep -vE '= ""')
echo $COUNT
echo "Remove Development Team specifications from project files:"
echo "$ERRORS";
exit 1;
fi

4
.github/FUNDING.yml vendored
View file

@ -1,4 +0,0 @@
github: kode54
ko_fi: kode54
patreon: kode54
liberapay: kode54

View file

@ -1,31 +0,0 @@
---
name: Feedback
about: Report bugs or suggest new features
title: ''
labels:
assignees: kode54
---
**Describe**
A clear and concise description of what the issue is.
**To Reproduce** (delete if not applicable)
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Version information:***
- macOS version: ...
- Cog version: ... (_cut and paste from title_)
**Additional context**
Add any other context about the issue here.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5 KiB

View file

@ -1,44 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 24.2.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 2809.9 600" style="enable-background:new 0 0 2809.9 600;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FF6336;}
.st1{fill:#FFC501;}
.st2{fill:#A4A14A;}
</style>
<g>
<polygon class="st0" points="393.5,468.8 524.7,468.8 524.7,376.1 355.1,376.1 262.3,468.8 169.6,376.1 0,376.1 0,468.8
131.2,468.8 262.3,600 "/>
<rect y="190.6" class="st1" width="524.7" height="92.8"/>
<rect y="5.1" class="st2" width="524.7" height="92.8"/>
</g>
<path d="M733.3,5.1h83.4v455.2h-83.4V5.1z"/>
<path d="M946.7,447.3c-26.3-14.5-47.2-34.5-62.6-59.7c-15.4-25.3-23.1-53.1-23.1-83.4s7.7-58.2,23.1-83.4
c15.4-25.3,36.2-45.2,62.6-59.7c26.3-14.5,54.9-21.8,85.7-21.8c30.8,0,59.3,7.3,85.7,21.8c26.3,14.5,47.2,34.5,62.6,59.7
c15.4,25.3,23.1,53.1,23.1,83.4s-7.7,58.2-23.1,83.4c-15.4,25.3-36.3,45.2-62.6,59.7c-26.3,14.5-54.9,21.8-85.7,21.8
C1001.5,469.1,973,461.8,946.7,447.3z M1076.2,380.9c13.3-7.4,23.9-17.8,31.9-31.3s12-28.7,12-45.5s-4-32-12-45.5
s-18.6-23.9-31.9-31.3c-13.3-7.4-27.9-11.1-43.9-11.1s-30.7,3.7-43.9,11.1c-13.3,7.4-23.9,17.8-31.9,31.3s-12,28.7-12,45.5
c0,16.9,4,32,12,45.5s18.6,23.9,31.9,31.3c13.3,7.4,27.9,11.1,43.9,11.1C1048.3,391.9,1063,388.3,1076.2,380.9z"/>
<path d="M1247.9,5.1h83.4v271.2L1440,147.9h99.2l-122.6,144.8l131.5,167.5h-106.8l-110-144.8v144.8h-83.4L1247.9,5.1L1247.9,5.1z"/>
<path d="M1626.9,448.8c-23.4-13.5-42.4-32.8-56.9-57.8c-14.5-25.1-21.8-54-21.8-86.9c0-29.9,7-57.5,20.9-82.8s32.9-45.3,56.9-60.1
c24-14.7,50.3-22.1,79-22.1c20.2,0,38.7,3.4,55.3,10.1c16.6,6.7,29.4,15.8,38.3,27.2V148h83.4v312.3h-83.4v-28.4
c-13.1,12.2-27,21.5-41.7,27.8c-14.8,6.3-33.7,9.5-56.9,9.5C1674.6,469.1,1650.2,462.3,1626.9,448.8z M1778.9,366.7
c15.6-16.9,23.4-37.7,23.4-62.6s-7.8-45.7-23.4-62.6c-15.6-16.8-36.2-25.3-61.9-25.3c-25.7,0-46.4,8.4-62,25.3s-23.4,37.7-23.4,62.6
s7.8,45.7,23.4,62.6s36.2,25.3,62,25.3C1742.6,391.9,1763.3,383.5,1778.9,366.7z"/>
<path d="M1942.6,5.1h83.4v455.2h-83.4V5.1z"/>
<path d="M2091.2,89.8C2081,79.7,2076,67.4,2076,53.1c0-14.7,5.1-27.3,15.2-37.6C2101.3,5.2,2113.5,0,2127.8,0
c14.7,0,27.3,5.2,37.6,15.5s15.5,22.9,15.5,37.6c0,14.3-5.2,26.5-15.5,36.7c-10.3,10.1-22.9,15.2-37.6,15.2
C2113.5,104.9,2101.3,99.9,2091.2,89.8z M2086.7,147.9h83.4v312.3h-83.4V147.9z"/>
<path d="M2227.1,438.7l19-78.4h3.8c27.4,21.1,55.4,31.6,84.1,31.6c11.8,0,21.4-2.2,28.8-6.6c7.4-4.4,11.1-10.8,11.1-19.3
c0-8.8-4.3-16-13-21.5c-8.6-5.5-24.8-12.2-48.4-20.2c-24-8-42.7-19.6-55.9-34.8c-13.3-15.2-19.9-33.1-19.9-53.7
c0-29.1,10.8-52.5,32.6-70.2c21.7-17.7,49.2-26.6,82.5-26.6c16.9,0,31.8,1.6,44.9,4.7c13.1,3.2,25.5,8.3,37.3,15.5l3.2,79.6h-4.4
c-15.2-9.7-28.7-17-40.5-21.8s-25.1-7.3-39.8-7.3c-10.5,0-19.2,2.1-25.9,6.3s-10.1,9.7-10.1,16.4c0,8.9,4.2,16.1,12.7,21.8
c8.4,5.7,24.2,12.5,47.4,20.5c26.5,8.9,46.7,19.6,60.4,32.2s20.5,32.2,20.5,58.8c0,21.9-5.5,40.7-16.4,56.3
c-11,15.6-25.4,27.3-43.3,35.1c-17.9,7.8-37.6,11.7-59.1,11.7C2294.9,469.1,2257.8,459,2227.1,438.7z"/>
<path d="M2574.8,446.9c-26.1-14.7-46.7-34.9-61.6-60.4c-15-25.5-22.4-53.8-22.4-85c0-30.3,7.1-57.8,21.2-82.5
c14.1-24.7,33.7-44.1,58.8-58.5c25.1-14.3,53.4-21.5,85-21.5c32,0,59.7,7.5,83.1,22.4c23.4,15,41.1,34.9,53.1,59.7
c12,24.9,18,51.6,18,80.3v24.7h-239.6c5.1,23.6,15.9,41.5,32.6,53.7c16.6,12.2,38.7,18.3,66.1,18.3c41.7,0,78.2-13.7,109.4-41.1h8.9
l-3.2,79c-19,11-39,19.2-60.1,24.7s-41.3,8.2-60.7,8.2C2630.4,469.1,2600.9,461.7,2574.8,446.9z M2726.5,269.3
c-2.1-19-10-33.8-23.7-44.6c-13.7-10.7-30.5-16.1-50.3-16.1c-19.4,0-36.4,5.2-50.9,15.5s-24.3,25.4-29.4,45.2H2726.5z"/>
</svg>

Before

Width:  |  Height:  |  Size: 3.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 808 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 106 KiB

View file

@ -1,56 +0,0 @@
name: Check if Cog buildable
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
build:
name: Build Universal Cog.app
runs-on: macos-15
env:
XCODE_DERIVEDDATA_PATH: build
steps:
- name: Switch to Xcode 16
uses: maxim-lobanov/setup-xcode@v1
with:
xcode-version: 16
- name: Check out repository
uses: actions/checkout@v4
with:
submodules: recursive
- name: Unpack libraries
run: >
cd ThirdParty && tar xvf libraries.tar.xz
- name: Run xcodebuild
run: >
xcodebuild
-quiet
-project Cog.xcodeproj
-scheme Cog
-configuration Debug
-arch x86_64
-arch arm64
-derivedDataPath $XCODE_DERIVEDDATA_PATH
CODE_SIGNING_ALLOWED=NO
ONLY_ACTIVE_ARCH=NO
build
- name: Package archive
run: >
ditto
-c
-k
--sequesterRsrc
--keepParent
--zlibCompressionLevel 9
$XCODE_DERIVEDDATA_PATH/Build/Products/Debug/Cog.app
$XCODE_DERIVEDDATA_PATH/Cog.zip
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: Cog
path: ${{ env.XCODE_DERIVEDDATA_PATH }}/Cog.zip

56
.gitignore vendored
View file

@ -1,56 +0,0 @@
.DS_Store
xcuserdata
/build
# Special cog exceptions
!Frameworks/OpenMPT/OpenMPT/build
# User-specific xcconfig files
Xcode-config/DEVELOPMENT_TEAM.xcconfig
Xcode-config/SENTRY_SETTINGS.xcconfig
# Plist derived from template at build time
/Info.plist
# This indicates the libraries are up to date
/ThirdParty/libraries.updated
# Temporary file to indicate libraries are being extracted by one process
/ThirdParty/libraries.extracting
# The project will unpack these before building, if necessary
/ThirdParty/BASS/libbass.dylib
/ThirdParty/BASS/libbass_mpc.dylib
/ThirdParty/BASS/libbassflac.dylib
/ThirdParty/BASS/libbassmidi.dylib
/ThirdParty/BASS/libbassopus.dylib
/ThirdParty/BASS/libbasswv.dylib
/ThirdParty/avif/lib/libaom.a
/ThirdParty/avif/lib/libavif.a
/ThirdParty/fdk-aac/lib/libfdk-aac.2.dylib
/ThirdParty/fdk-aac/lib/libfdk-aac.a
/ThirdParty/fdk-aac/lib/libfdk-aac.dylib
/ThirdParty/fdk-aac/lib/libfdk-aac.la
/ThirdParty/fdk-aac/lib/pkgconfig/fdk-aac.pc
/ThirdParty/ffmpeg/lib/libavcodec.61.dylib
/ThirdParty/ffmpeg/lib/libavformat.61.dylib
/ThirdParty/ffmpeg/lib/libavutil.59.dylib
/ThirdParty/ffmpeg/lib/libswresample.5.dylib
/ThirdParty/flac/lib/libFLAC.12.dylib
/ThirdParty/libid3tag/lib/libid3tag.a
/ThirdParty/libmad/lib/libmad.a
/ThirdParty/libopenmpt/lib/libopenmpt.a
/ThirdParty/libopenmpt_old/lib/libopenmpt.old.a
/ThirdParty/libvgm/lib/libvgm-emu.a
/ThirdParty/libvgm/lib/libvgm-player.a
/ThirdParty/libvgm/lib/libvgm-utils.a
/ThirdParty/mpg123/lib/libmpg123.0.dylib
/ThirdParty/ogg/lib/libogg.0.dylib
/ThirdParty/opus/lib/libopus.0.dylib
/ThirdParty/opusfile/lib/libopusfile.0.dylib
/ThirdParty/rubberband/lib/librubberband.3.dylib
/ThirdParty/speex/libspeex.a
/ThirdParty/vorbis/lib/libvorbisfile.3.dylib
/ThirdParty/vorbis/lib/libvorbis.0.dylib
/ThirdParty/soxr/lib/libsoxr.0.dylib
/ThirdParty/WavPack/lib/libwavpack.a

21
.gitmodules vendored
View file

@ -1,21 +0,0 @@
[submodule "Frameworks/mGBA/mGBA/mgba"]
path = Frameworks/mGBA/mGBA/mgba
url = https://github.com/kode54/mgba.git
[submodule "Frameworks/AdPlug/AdPlug/adplug"]
path = Frameworks/AdPlug/AdPlug/adplug
url = https://github.com/kode54/adplug.git
[submodule "Frameworks/libbinio/libbinio/libbinio"]
path = Frameworks/libbinio/libbinio/libbinio
url = https://github.com/adplug/libbinio.git
[submodule "Frameworks/AdPlug/AdPlug/database"]
path = Frameworks/AdPlug/AdPlug/database
url = https://github.com/adplug/database.git
[submodule "Frameworks/libatrac9/libatrac9"]
path = Frameworks/libatrac9/libatrac9
url = https://github.com/Thealexbarney/LibAtrac9.git
[submodule "Frameworks/shpakovski/MASShortcut"]
path = Frameworks/shpakovski/MASShortcut
url = https://github.com/kode54/MASShortcut.git
[submodule "Frameworks/libsidplayfp/sidplayfp"]
path = Frameworks/libsidplayfp/sidplayfp
url = https://github.com/kode54/libsidplayfp.git

View file

@ -1,8 +0,0 @@
syntax: glob
*.mode1v3
*.mode2v3
*.pbxuser
xcuserdata
build
*/build
.DS_Store

View file

@ -1,2 +0,0 @@
4abdff3798f81313c3d98aa6ef3cf22c73777c88 r516
ddfc3989351f477a3431f749649e6d832ccfcdf2 k54

15
.vscode/launch.json vendored
View file

@ -1,15 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Debug Local File",
"type": "Ruby",
"request": "launch",
"program": "${workspaceRoot}/Scripts/update_feed.rb"
},
null
]
}

View file

@ -1,9 +0,0 @@
{
"editor.formatOnSave": false,
"files.associations": {
"*.h": "objective-c",
"*.m": "objective-c"
},
"clang-format.executable": "/usr/local/bin/clang-format",
"clang-format.language.objective-c.style": "{ BasedOnStyle: Google, IndentWidth: 4, ObjCBlockIndentWidth: 4 }"
}

View file

@ -2,117 +2,78 @@
#import <Cocoa/Cocoa.h>
@class FileTreeViewController;
#import "NDHotKeyEvent.h"
@class PlaybackController;
@class PlaylistController;
@class PlaylistView;
@class FileTreeController;
@class FileOutlineView;
@class AppleRemote;
@class PlaylistLoader;
@class PreferencesController;
@interface AppController : NSObject {
IBOutlet NSObjectController *currentEntryController;
@interface AppController : NSObject
{
IBOutlet PlaybackController *playbackController;
IBOutlet PlaylistController *playlistController;
IBOutlet PlaylistController *playlistController;
IBOutlet PlaylistLoader *playlistLoader;
IBOutlet NSWindow *mainWindow;
IBOutlet NSWindow *miniWindow;
IBOutlet NSSplitView *mainView;
IBOutlet NSSegmentedControl *playbackButtons;
IBOutlet NSPanel *mainWindow;
IBOutlet NSButton *playButton;
IBOutlet NSButton *prevButton;
IBOutlet NSButton *nextButton;
IBOutlet NSButton *infoButton;
IBOutlet NSButton *fileButton;
IBOutlet NSButton *shuffleButton;
IBOutlet NSButton *repeatButton;
IBOutlet NSButton *randomizeButton;
IBOutlet NSTextField *totalTimeField;
IBOutlet NSDrawer *infoDrawer;
IBOutlet NSDrawer *fileDrawer;
IBOutlet FileTreeController *fileTreeController;
IBOutlet FileOutlineView *fileOutlineView;
IBOutlet PlaylistView *playlistView;
IBOutlet NSMenuItem *showIndexColumn;
IBOutlet NSMenuItem *showTitleColumn;
IBOutlet NSMenuItem *showAlbumArtistColumn;
IBOutlet NSMenuItem *showArtistColumn;
IBOutlet NSMenuItem *showAlbumColumn;
IBOutlet NSMenuItem *showGenreColumn;
IBOutlet NSMenuItem *showPlayCountColumn;
IBOutlet NSMenuItem *showLengthColumn;
IBOutlet NSMenuItem *showTrackColumn;
IBOutlet NSMenuItem *showYearColumn;
IBOutlet NSMenu *dockMenu;
IBOutlet NSMenuItem *currentArtistItem;
IBOutlet NSWindowController *spotlightWindowController;
IBOutlet FileTreeViewController *fileTreeViewController;
IBOutlet PreferencesController *preferencesController;
NSOperationQueue *queue; // Since we are the app delegate, we take care of the op queue
NSMutableSet *expandedNodes;
BOOL miniMode;
NDHotKeyEvent *playHotKey;
NDHotKeyEvent *prevHotKey;
NDHotKeyEvent *nextHotKey;
AppleRemote *remote;
BOOL remoteButtonHeld; /* true as long as the user holds the left,right,plus or minus on the remote control */
}
@property(strong) IBOutlet NSButton *infoButton;
@property(strong) IBOutlet NSButton *infoButtonMini;
- (IBAction)openURL:(id)sender;
- (IBAction)openFiles:(id)sender;
- (IBAction)delEntries:(id)sender;
- (IBAction)savePlaylist:(id)sender;
- (IBAction)privacyPolicy:(id)sender;
- (IBAction)donate:(id)sender;
- (IBAction)feedback:(id)sender;
- (IBAction)toggleInfoDrawer:(id)sender;
- (IBAction)toggleFileDrawer:(id)sender;
- (void)drawerDidOpen:(NSNotification *)notification;
- (void)drawerDidClose:(NSNotification *)notification;
- (void)initDefaults;
// Fun stuff
//Fun stuff
- (BOOL)applicationShouldHandleReopen:(NSApplication *)theApplication hasVisibleWindows:(BOOL)flag;
- (BOOL)application:(NSApplication *)theApplication openFile:(NSString *)filename;
- (void)application:(NSApplication *)theApplication openFiles:(NSArray *)filenames;
- (void)registerHotKeys;
- (void)clickPlay;
- (void)clickPause;
- (void)clickStop;
- (void)clickPrev;
- (void)clickNext;
- (void)clickSpam;
- (void)clickSeek:(NSTimeInterval)position;
- (IBAction)increaseFontSize:(id)sender;
- (IBAction)decreaseFontSize:(id)sender;
- (void)changeFontSize:(float)size;
- (void)nodeExpanded:(NSNotification *)notification;
- (void)nodeCollapsed:(NSNotification *)notification;
- (IBAction)toggleMiniMode:(id)sender;
- (IBAction)toggleToolbarStyle:(id)sender;
- (BOOL)pathSuggesterEmpty;
+ (BOOL)globalPathSuggesterEmpty;
- (void)showPathSuggester;
+ (void)globalShowPathSuggester;
- (void)selectTrack:(id)sender;
- (IBAction)showRubberbandSettings:(id)sender;
+ (void)globalShowRubberbandSettings;
@property NSWindow *mainWindow;
@property NSWindow *miniWindow;
@property BOOL miniMode;
@property(nonatomic) BOOL floatingMiniWindow;
OSStatus handleHotKey(EventHandlerCallRef nextHandler,EventRef theEvent,void *userData);
@end

File diff suppressed because it is too large Load diff

View file

@ -1,33 +0,0 @@
//
// DockIconController.h
// Cog
//
// Created by Vincent Spader on 2/28/09.
// Copyright 2009 __MyCompanyName__. All rights reserved.
//
#import <Cocoa/Cocoa.h>
@class PlaybackController;
@interface DockIconController : NSObject {
NSImage *dockImage;
NSInteger lastDockCustom;
NSInteger lastDockCustomPlaque;
NSInteger dockCustomLoaded;
NSImage *dockCustomStop;
NSImage *dockCustomPlay;
NSImage *dockCustomPause;
IBOutlet PlaybackController *playbackController;
NSInteger lastPlaybackStatus;
NSInteger lastColorfulStatus;
NSNumber *lastProgressStatus;
NSImageView *imageView;
NSProgressIndicator *progressIndicator;
}
@end

View file

@ -1,281 +0,0 @@
//
// DockIconController.m
// Cog
//
// Created by Vincent Spader on 2/28/09.
// Copyright 2009 __MyCompanyName__. All rights reserved.
//
#import "DockIconController.h"
#import "PlaybackController.h"
#import <CogAudio/Status.h>
@implementation DockIconController
static NSString *DockIconPlaybackStatusObservationContext = @"DockIconPlaybackStatusObservationContext";
static NSString *CogCustomDockIconsReloadNotification = @"CogCustomDockIconsReloadNotification";
- (void)startObserving {
[playbackController addObserver:self forKeyPath:@"playbackStatus" options:(NSKeyValueObservingOptionNew | NSKeyValueObservingOptionInitial) context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[playbackController addObserver:self forKeyPath:@"progressOverall" options:(NSKeyValueObservingOptionNew | NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionOld) context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.colorfulDockIcons" options:0 context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.customDockIcons" options:0 context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.customDockIconsPlaque" options:0 context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(refreshDockIcons:) name:CogCustomDockIconsReloadNotification object:nil];
}
- (void)stopObserving {
[playbackController removeObserver:self forKeyPath:@"playbackStatus" context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[playbackController removeObserver:self forKeyPath:@"progressOverall" context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.colorfulDockIcons" context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.customDockIcons" context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.customDockIconsPlaque" context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
[[NSNotificationCenter defaultCenter] removeObserver:self name:CogCustomDockIconsReloadNotification object:nil];
}
- (void)startObservingProgress:(NSProgress *)progress {
[progress addObserver:self forKeyPath:@"fractionCompleted" options:0 context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
}
- (void)stopObservingProgress:(NSProgress *)progress {
[progress removeObserver:self forKeyPath:@"fractionCompleted" context:(__bridge void *_Nullable)(DockIconPlaybackStatusObservationContext)];
}
static NSString *getBadgeName(NSString *baseName, BOOL colorfulIcons) {
if(colorfulIcons) {
return [baseName stringByAppendingString:@"Colorful"];
} else {
return [baseName stringByAppendingString:@"Normal"];
}
}
static NSString *getCustomIconName(NSString *baseName) {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSApplicationSupportDirectory, NSUserDomainMask, YES);
NSString *basePath = [[paths firstObject] stringByAppendingPathComponent:@"Cog"];
basePath = [basePath stringByAppendingPathComponent:@"Icons"];
basePath = [basePath stringByAppendingPathComponent:baseName];
return [basePath stringByAppendingPathExtension:@"png"];
}
- (BOOL)loadCustomDockIcons {
NSError *error = nil;
NSData *dataStopIcon = [NSData dataWithContentsOfFile:getCustomIconName(@"Stop") options:(NSDataReadingMappedIfSafe) error:&error];
if(!dataStopIcon || error) {
return NO;
}
NSData *dataPlayIcon = [NSData dataWithContentsOfFile:getCustomIconName(@"Play") options:(NSDataReadingMappedIfSafe) error:&error];
if(!dataPlayIcon || error) {
return NO;
}
NSData *dataPauseIcon = [NSData dataWithContentsOfFile:getCustomIconName(@"Pause") options:(NSDataReadingMappedIfSafe) error:&error];
if(!dataPauseIcon || error) {
return NO;
}
dockCustomStop = [[NSImage alloc] initWithData:dataStopIcon];
dockCustomPlay = [[NSImage alloc] initWithData:dataPlayIcon];
dockCustomPause = [[NSImage alloc] initWithData:dataPauseIcon];
return (dockCustomStop && dockCustomPlay && dockCustomPause);
}
- (void)refreshDockIcon:(NSInteger)playbackStatus withProgress:(double)progressStatus {
// Really weird crash user experienced because the plaque image didn't load?
if(!dockImage || dockImage.size.width == 0 || dockImage.size.height == 0) return;
BOOL displayChanged = NO;
BOOL drawIcon = NO;
BOOL removeProgress = NO;
BOOL useCustomDockIcons = [[NSUserDefaults standardUserDefaults] boolForKey:@"customDockIcons"];
BOOL useCustomDockIconsPlaque = [[NSUserDefaults standardUserDefaults] boolForKey:@"customDockIconsPlaque"];
if(useCustomDockIcons && !dockCustomLoaded) {
dockCustomLoaded = [self loadCustomDockIcons];
if(!dockCustomLoaded) {
useCustomDockIcons = NO;
}
}
if(useCustomDockIcons != lastDockCustom ||
useCustomDockIconsPlaque != lastDockCustomPlaque) {
lastDockCustom = useCustomDockIcons;
lastDockCustomPlaque = useCustomDockIconsPlaque;
drawIcon = YES;
if(!useCustomDockIcons) {
dockCustomLoaded = NO;
dockCustomStop = nil;
dockCustomPlay = nil;
dockCustomPause = nil;
}
}
if(playbackStatus < 0)
playbackStatus = lastPlaybackStatus;
else {
lastPlaybackStatus = playbackStatus;
drawIcon = YES;
}
if(progressStatus < -2)
progressStatus = [lastProgressStatus doubleValue];
else {
if(progressStatus < 0 && [lastProgressStatus doubleValue] >= 0)
removeProgress = YES;
lastProgressStatus = @(progressStatus);
}
BOOL displayProgress = (progressStatus >= 0.0);
NSImage *badgeImage = nil;
BOOL colorfulIcons = [[NSUserDefaults standardUserDefaults] boolForKey:@"colorfulDockIcons"];
if((colorfulIcons && lastColorfulStatus < 1) ||
(!colorfulIcons && lastColorfulStatus != 0)) {
lastColorfulStatus = colorfulIcons ? 1 : 0;
drawIcon = YES;
}
NSDockTile *dockTile = [NSApp dockTile];
if(drawIcon) {
switch(playbackStatus) {
case CogStatusPlaying:
badgeImage = useCustomDockIcons ? dockCustomPlay : [NSImage imageNamed:getBadgeName(@"Play", colorfulIcons)];
break;
case CogStatusPaused:
badgeImage = useCustomDockIcons ? dockCustomPause : [NSImage imageNamed:getBadgeName(@"Pause", colorfulIcons)];
break;
default:
badgeImage = useCustomDockIcons ? dockCustomStop : [NSImage imageNamed:getBadgeName(@"Stop", colorfulIcons)];
break;
}
NSSize badgeSize = [badgeImage size];
NSImage *newDockImage = (useCustomDockIcons && !useCustomDockIconsPlaque) ? [[NSImage alloc] initWithSize:NSMakeSize(1024, 1024)] : [dockImage copy];
[newDockImage lockFocus];
[badgeImage drawInRect:NSMakeRect(0, 0, 1024, 1024)
fromRect:NSMakeRect(0, 0, badgeSize.width, badgeSize.height)
operation:NSCompositingOperationSourceOver
fraction:1.0];
[newDockImage unlockFocus];
imageView = [[NSImageView alloc] init];
[imageView setImage:newDockImage];
[dockTile setContentView:imageView];
progressIndicator = [[NSProgressIndicator alloc] initWithFrame:NSMakeRect(0.0, 0.0, dockTile.size.width, 10.0)];
[progressIndicator setStyle:NSProgressIndicatorStyleBar];
[progressIndicator setIndeterminate:NO];
[progressIndicator setBezeled:YES];
[progressIndicator setMinValue:0];
[progressIndicator setMaxValue:100];
[progressIndicator setHidden:YES];
[imageView addSubview:progressIndicator];
displayChanged = YES;
}
if(displayProgress) {
if(!imageView) {
imageView = [[NSImageView alloc] init];
[imageView setImage:[NSApp applicationIconImage]];
[dockTile setContentView:imageView];
}
if(!progressIndicator) {
progressIndicator = [[NSProgressIndicator alloc] initWithFrame:NSMakeRect(0.0, 0.0, dockTile.size.width, 10.0)];
[progressIndicator setIndeterminate:NO];
[progressIndicator setBezeled:YES];
[progressIndicator setMinValue:0];
[progressIndicator setMaxValue:100];
[imageView addSubview:progressIndicator];
}
[progressIndicator setDoubleValue:progressStatus];
[progressIndicator setHidden:NO];
displayChanged = YES;
}
if(removeProgress) {
if(progressIndicator)
[progressIndicator setHidden:YES];
displayChanged = YES;
}
if(displayChanged)
[dockTile display];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if([DockIconPlaybackStatusObservationContext isEqual:(__bridge id)(context)]) {
if([keyPath isEqualToString:@"playbackStatus"]) {
NSInteger playbackStatus = [[change objectForKey:NSKeyValueChangeNewKey] integerValue];
[self refreshDockIcon:playbackStatus withProgress:-10];
} else if([keyPath isEqualToString:@"progressOverall"]) {
double progressStatus = [lastProgressStatus doubleValue];
id objNew = [change objectForKey:NSKeyValueChangeNewKey];
id objOld = [change objectForKey:NSKeyValueChangeOldKey];
NSProgress *progressNew = nil, *progressOld = nil;
if(objNew && [objNew isKindOfClass:[NSProgress class]])
progressNew = (NSProgress *)objNew;
if(objOld && [objOld isKindOfClass:[NSProgress class]])
progressOld = (NSProgress *)objOld;
if(progressOld) {
[self stopObservingProgress:progressOld];
progressStatus = -1;
}
if(progressNew) {
[self startObservingProgress:progressNew];
progressStatus = progressNew.fractionCompleted * 100.0;
}
[self refreshDockIcon:-1 withProgress:progressStatus];
} else if([keyPath isEqualToString:@"values.colorfulDockIcons"] ||
[keyPath isEqualToString:@"values.customDockIcons"] ||
[keyPath isEqualToString:@"values.customDockIconsPlaque"]) {
[self refreshDockIcon:-1 withProgress:-10];
} else if([keyPath isEqualToString:@"fractionCompleted"]) {
double progressStatus = [(NSProgress *)object fractionCompleted];
[self refreshDockIcon:-1 withProgress:progressStatus * 100.0];
}
} else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
- (void)refreshDockIcons:(NSNotification *)notification {
lastDockCustom = NO;
dockCustomLoaded = NO;
[self refreshDockIcon:-1 withProgress:-10];
}
- (void)awakeFromNib {
dockImage = [[NSImage imageNamed:@"Plaque"] copy];
lastColorfulStatus = -1;
lastProgressStatus = @(-1.0);
imageView = nil;
progressIndicator = nil;
[self startObserving];
}
- (void)dealloc {
[self stopObserving];
}
@end

View file

@ -1,13 +0,0 @@
//
// MediaKeysApplication.h
// Cog
//
// Created by Vincent Spader on 10/3/07.
// Copyright 2007 __MyCompanyName__. All rights reserved.
//
#import <Cocoa/Cocoa.h>
@interface MediaKeysApplication : NSApplication
@end

View file

@ -1,76 +0,0 @@
//
// MediaKeysApplication.m
// Cog
//
// Created by Vincent Spader on 10/3/07.
// Copyright 2007 __MyCompanyName__. All rights reserved.
//
#import "MediaKeysApplication.h"
#import "AppController.h"
#import "Logging.h"
#import <MediaPlayer/MPMediaItem.h>
#import <MediaPlayer/MPNowPlayingInfoCenter.h>
#import <MediaPlayer/MPRemoteCommand.h>
#import <MediaPlayer/MPRemoteCommandCenter.h>
#import <MediaPlayer/MPRemoteCommandEvent.h>
@implementation MediaKeysApplication {
AppController *_appController;
}
- (void)finishLaunching {
[super finishLaunching];
_appController = (AppController *)[self delegate];
MPRemoteCommandCenter *remoteCommandCenter = [MPRemoteCommandCenter sharedCommandCenter];
[remoteCommandCenter.playCommand setEnabled:YES];
[remoteCommandCenter.pauseCommand setEnabled:YES];
[remoteCommandCenter.togglePlayPauseCommand setEnabled:YES];
[remoteCommandCenter.stopCommand setEnabled:YES];
[remoteCommandCenter.changePlaybackPositionCommand setEnabled:YES];
[remoteCommandCenter.nextTrackCommand setEnabled:YES];
[remoteCommandCenter.previousTrackCommand setEnabled:YES];
[[remoteCommandCenter playCommand] addTarget:self action:@selector(clickPlay)];
[[remoteCommandCenter pauseCommand] addTarget:self action:@selector(clickPause)];
[[remoteCommandCenter togglePlayPauseCommand] addTarget:self action:@selector(clickPlay)];
[[remoteCommandCenter stopCommand] addTarget:self action:@selector(clickStop)];
[[remoteCommandCenter changePlaybackPositionCommand] addTarget:self action:@selector(clickSeek:)];
[[remoteCommandCenter nextTrackCommand] addTarget:self action:@selector(clickNext)];
[[remoteCommandCenter previousTrackCommand] addTarget:self action:@selector(clickPrev)];
}
- (MPRemoteCommandHandlerStatus)clickPlay {
[_appController clickPlay];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus)clickPause {
[_appController clickPause];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus)clickStop {
[_appController clickStop];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus)clickNext {
[_appController clickNext];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus)clickPrev {
[_appController clickPrev];
return MPRemoteCommandHandlerStatusSuccess;
}
- (MPRemoteCommandHandlerStatus)clickSeek:(MPChangePlaybackPositionCommandEvent *)event {
[_appController clickSeek:event.positionTime];
return MPRemoteCommandHandlerStatusSuccess;
}
@end

View file

@ -2,94 +2,51 @@
#import <Cocoa/Cocoa.h>
#import "AppController.h"
#import <Growl/GrowlApplicationBridge.h>
#import "CogAudio/AudioPlayer.h"
#import "CogAudio/Status.h"
#import "TrackingSlider.h"
#import <AVFoundation/AVFoundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import <AudioUnit/AudioUnit.h>
#import <CoreAudio/CoreAudioTypes.h>
#import "EqualizerWindowController.h"
#import "PlaylistEntry.h"
#define DEFAULT_VOLUME_DOWN 5
#define DEFAULT_VOLUME_UP DEFAULT_VOLUME_DOWN
#define DEFAULT_PITCH_DOWN 0.2
#define DEFAULT_PITCH_UP DEFAULT_PITCH_DOWN
#define DEFAULT_TEMPO_DOWN 0.2
#define DEFAULT_TEMPO_UP DEFAULT_TEMPO_DOWN
extern NSString *CogPlaybackDidBeginNotificiation;
extern NSString *CogPlaybackDidPauseNotificiation;
extern NSString *CogPlaybackDidResumeNotificiation;
extern NSString *CogPlaybackDidStopNotificiation;
extern NSDictionary *makeRGInfo(PlaylistEntry *pe);
#import "AudioScrobbler.h"
@class PlaylistController;
@class PlaylistView;
@class PlaylistLoader;
@interface PlaybackController : NSObject {
IBOutlet AppController *appController;
IBOutlet PlaylistController *playlistController;
@interface PlaybackController : NSObject <GrowlApplicationBridgeDelegate>
{
IBOutlet PlaylistController *playlistController;
IBOutlet PlaylistView *playlistView;
IBOutlet PlaylistLoader *playlistLoader;
IBOutlet EqualizerWindowController *equalizerWindowController;
IBOutlet TrackingSlider *positionSlider;
IBOutlet NSSlider *volumeSlider;
IBOutlet NSSlider *pitchSlider;
IBOutlet NSSlider *tempoSlider;
IBOutlet NSButton *lockButton;
IBOutlet NSTextField *timeField;
IBOutlet NSTextField *lengthField;
IBOutlet NSTextField *bitrateField;
IBOutlet NSButton *playButton;
IBOutlet NSArrayController *outputDevices;
NSTimer *positionTimer;
AudioPlayer *audioPlayer;
int playbackStatus;
double currentVolume;
BOOL showTimeRemaining;
AudioScrobbler *scrobbler;
}
CogStatus playbackStatus;
double position;
double lastPosition;
BOOL seekable;
BOOL fading;
// progress bar display
NSProgress *progressOverall;
NSProgress *progressJob;
AudioUnit _eq;
}
@property CogStatus playbackStatus;
@property NSProgress *progressOverall;
@property NSProgress *progressJob;
- (IBAction)toggleShowTimeRemaining:(id)sender;
- (IBAction)changeVolume:(id)sender;
- (IBAction)volumeDown:(id)sender;
- (IBAction)volumeUp:(id)sender;
- (IBAction)changePitch:(id)sender;
- (IBAction)pitchDown:(id)sender;
- (IBAction)pitchUp:(id)sender;
- (IBAction)changeTempo:(id)sender;
- (IBAction)tempoDown:(id)sender;
- (IBAction)tempoUp:(id)sender;
- (IBAction)playPauseResume:(id)sender;
- (IBAction)pauseResume:(id)sender;
- (IBAction)skipToNextAlbum:(id)sender;
- (IBAction)skipToPreviousAlbum:(id)sender;
- (IBAction)play:(id)sender;
- (IBAction)pause:(id)sender;
@ -99,37 +56,12 @@ extern NSDictionary *makeRGInfo(PlaylistEntry *pe);
- (IBAction)next:(id)sender;
- (IBAction)prev:(id)sender;
- (IBAction)seek:(id)sender;
- (IBAction)seek:(id)sender toTime:(NSTimeInterval)time;
- (IBAction)eventSeekForward:(id)sender;
- (void)seekForward:(double)sender;
- (IBAction)eventSeekBackward:(id)sender;
- (void)seekBackward:(double)amount;
- (IBAction)fade:(id)sender;
- (IBAction)spam:(id)sender;
- (void)sendMetaData;
- (void)initDefaults;
- (void)audioFadeDown:(NSTimer *)audioTimer;
- (void)audioFadeUp:(NSTimer *)audioTimer;
- (void)playEntryAtIndex:(NSInteger)i;
- (void)playEntryAtIndex:(NSInteger)i startPaused:(BOOL)paused;
- (void)playEntryAtIndex:(NSInteger)i startPaused:(BOOL)paused andSeekTo:(id)offset;
- (void)updateTimeField:(double)pos;
- (void)playEntryAtIndex:(int)i;
- (void)playEntry:(PlaylistEntry *)pe;
- (void)playEntry:(PlaylistEntry *)pe startPaused:(BOOL)paused;
- (void)playEntry:(PlaylistEntry *)pe startPaused:(BOOL)paused andSeekTo:(id)offset;
// Playlist notifications
- (void)playlistDidChange:(PlaylistController *)p;
// For bindings
- (void)setPosition:(double)p;
- (double)position;
- (void)setSeekable:(BOOL)s;
- (BOOL)seekable;
@end

File diff suppressed because it is too large Load diff

View file

@ -1,22 +0,0 @@
//
// PlaybackEventController.h
// Cog
//
// Created by Vincent Spader on 3/5/09.
// Copyright 2009 __MyCompanyName__. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import <UserNotifications/UserNotifications.h>
#import "PlaybackController.h"
@interface PlaybackEventController
: NSObject <NSUserNotificationCenterDelegate, UNUserNotificationCenterDelegate> {
IBOutlet PlaybackController *playbackController;
IBOutlet NSWindow *mainWindow;
IBOutlet NSWindow *miniWindow;
}
@end

View file

@ -1,388 +0,0 @@
//
// PlaybackEventController.m
// Cog
//
// Created by Vincent Spader on 3/5/09.
// Copyright 2009 __MyCompanyName__. All rights reserved.
#import "PlaybackEventController.h"
#import "PlaylistEntry.h"
#if 0
NSString *TrackNotification = @"com.apple.iTunes.playerInfo";
NSString *TrackArtist = @"Artist";
NSString *TrackAlbum = @"Album";
NSString *TrackTitle = @"Name";
NSString *TrackGenre = @"Genre";
NSString *TrackNumber = @"Track Number";
NSString *TrackLength = @"Total Time";
NSString *TrackPath = @"Location";
NSString *TrackState = @"Player State";
typedef NS_ENUM(NSInteger, TrackStatus) { TrackPlaying,
TrackPaused,
TrackStopped };
#endif
@implementation PlaybackEventController {
NSOperationQueue *queue;
PlaylistEntry *entry;
Boolean didGainUN API_AVAILABLE(macosx(10.14));
}
- (void)initDefaults {
NSDictionary *defaultsDictionary = @{
@"notifications.enable": @YES,
@"notifications.itunes-style": @YES,
@"notifications.show-album-art": @YES
};
[[NSUserDefaults standardUserDefaults] registerDefaults:defaultsDictionary];
}
- (id)init {
self = [super init];
if(self) {
[self initDefaults];
didGainUN = NO;
if(@available(macOS 10.14, *)) {
UNUserNotificationCenter *center = [UNUserNotificationCenter currentNotificationCenter];
[center
requestAuthorizationWithOptions:UNAuthorizationOptionAlert
completionHandler:^(BOOL granted, NSError *_Nullable error) {
self->didGainUN = granted;
if(granted) {
UNNotificationAction *skipAction = [UNNotificationAction
actionWithIdentifier:@"skip"
title:@"Skip"
options:UNNotificationActionOptionNone];
UNNotificationCategory *playCategory = [UNNotificationCategory
categoryWithIdentifier:@"play"
actions:@[skipAction]
intentIdentifiers:@[]
options:UNNotificationCategoryOptionNone];
[center setNotificationCategories:
[NSSet setWithObject:playCategory]];
}
}];
[center setDelegate:self];
}
queue = [[NSOperationQueue alloc] init];
[queue setMaxConcurrentOperationCount:1];
[[NSUserNotificationCenter defaultUserNotificationCenter] setDelegate:self];
entry = nil;
}
return self;
}
- (void)userNotificationCenter:(UNUserNotificationCenter *)center
willPresentNotification:(UNNotification *)notification
withCompletionHandler:
(void (^)(UNNotificationPresentationOptions options))completionHandler
API_AVAILABLE(macos(10.14)) {
UNNotificationPresentationOptions presentationOptions = UNNotificationPresentationOptionAlert;
completionHandler(presentationOptions);
}
- (void)userNotificationCenter:(UNUserNotificationCenter *)center
didReceiveNotificationResponse:(UNNotificationResponse *)response
withCompletionHandler:(void (^)(void))completionHandler API_AVAILABLE(macos(10.14)) {
if([[response actionIdentifier] isEqualToString:@"skip"]) {
[playbackController next:self];
}
}
#if 0
- (NSDictionary *)fillNotificationDictionary:(PlaylistEntry *)pe status:(TrackStatus)status {
NSMutableDictionary *dict = [NSMutableDictionary dictionary];
if(pe == nil || pe.deLeted || pe.url == nil) return dict;
[dict setObject:[pe.url absoluteString] forKey:TrackPath];
if(pe.title) [dict setObject:pe.title forKey:TrackTitle];
if(pe.artist) [dict setObject:pe.artist forKey:TrackArtist];
if(pe.album) [dict setObject:pe.album forKey:TrackAlbum];
if(pe.genre) [dict setObject:pe.genre forKey:TrackGenre];
if(pe.track)
[dict setObject:pe.trackText forKey:TrackNumber];
if(pe.length)
[dict setObject:@((NSInteger)([pe.length doubleValue] * 1000.0))
forKey:TrackLength];
NSString *state = nil;
switch(status) {
case TrackPlaying:
state = @"Playing";
break;
case TrackPaused:
state = @"Paused";
break;
case TrackStopped:
state = @"Stopped";
break;
default:
break;
}
[dict setObject:state forKey:TrackState];
return dict;
}
#endif
- (void)performPlaybackDidBeginActions:(PlaylistEntry *)pe {
if(NO == [pe error]) {
entry = pe;
#if 0
[[NSDistributedNotificationCenter defaultCenter]
postNotificationName:TrackNotification
object:nil
userInfo:[self fillNotificationDictionary:pe status:TrackPlaying]
deliverImmediately:YES];
#endif
NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults];
if([defaults boolForKey:@"notifications.enable"]) {
if(@available(macOS 10.14, *)) {
if(didGainUN) {
UNUserNotificationCenter *center =
[UNUserNotificationCenter currentNotificationCenter];
UNMutableNotificationContent *content =
[[UNMutableNotificationContent alloc] init];
content.title = @"Now Playing";
NSString *subtitle;
NSString *artist = (pe.artist && [pe.artist length]) ? pe.artist : nil;
NSString *album = (pe.album && [pe.album length]) ? pe.album : nil;
if(artist && album) {
subtitle = [NSString stringWithFormat:@"%@ - %@", artist, album];
} else if(artist) {
subtitle = artist;
} else if(album) {
subtitle = album;
} else {
subtitle = @"";
}
NSString *body = [NSString stringWithFormat:@"%@\n%@", [pe title], subtitle];
content.body = body;
content.sound = nil;
content.categoryIdentifier = @"play";
if([defaults boolForKey:@"notifications.show-album-art"] &&
[pe albumArt]) {
NSError *error = nil;
NSFileManager *fileManager = [NSFileManager defaultManager];
NSURL *tmpSubFolderURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()]
URLByAppendingPathComponent:@"cog-artworks-cache"
isDirectory:true];
if([fileManager createDirectoryAtPath:[tmpSubFolderURL path]
withIntermediateDirectories:true
attributes:nil
error:&error]) {
NSString *tmpFileName =
[[NSProcessInfo.processInfo globallyUniqueString]
stringByAppendingString:@".jpg"];
NSURL *fileURL =
[tmpSubFolderURL URLByAppendingPathComponent:tmpFileName];
NSImage *image = [pe albumArt];
CGImageRef cgRef = [image CGImageForProposedRect:NULL
context:nil
hints:nil];
if(cgRef) {
NSBitmapImageRep *newRep =
[[NSBitmapImageRep alloc] initWithCGImage:cgRef];
NSData *jpgData = [newRep
representationUsingType:NSBitmapImageFileTypeJPEG
properties:@{ NSImageCompressionFactor: @0.5f }];
[jpgData writeToURL:fileURL atomically:YES];
UNNotificationAttachment *icon =
[UNNotificationAttachment attachmentWithIdentifier:@"art"
URL:fileURL
options:nil
error:&error];
if(error) {
// We have size limit of 10MB per image attachment.
NSLog(@"%@", error.localizedDescription);
} else {
content.attachments = @[icon];
}
}
}
}
UNNotificationRequest *request =
[UNNotificationRequest requestWithIdentifier:@"PlayTrack"
content:content
trigger:nil];
[center addNotificationRequest:request
withCompletionHandler:^(NSError *_Nullable error) {
NSLog(@"%@", error.localizedDescription);
}];
}
} else {
NSUserNotification *notif = [[NSUserNotification alloc] init];
notif.title = [pe title];
NSString *subtitle;
NSString *artist = (pe.artist && [pe.artist length]) ? pe.artist : nil;
NSString *album = (pe.album && [pe.album length]) ? pe.album : nil;
if(artist && album) {
subtitle = [NSString stringWithFormat:@"%@ - %@", artist, album];
} else if(artist) {
subtitle = artist;
} else if(album) {
subtitle = album;
} else {
subtitle = @"";
}
if([defaults boolForKey:@"notifications.itunes-style"]) {
notif.subtitle = subtitle;
[notif setValue:@YES forKey:@"_showsButtons"];
} else {
notif.informativeText = subtitle;
}
if([notif respondsToSelector:@selector(setContentImage:)]) {
if([defaults boolForKey:@"notifications.show-album-art"] &&
[pe albumArtInternal]) {
NSImage *image = [pe albumArt];
if([defaults boolForKey:@"notifications.itunes-style"]) {
[notif setValue:image forKey:@"_identityImage"];
} else {
notif.contentImage = image;
}
}
}
notif.actionButtonTitle = NSLocalizedString(@"SkipAction", @"");
[[NSUserNotificationCenter defaultUserNotificationCenter]
scheduleNotification:notif];
}
}
}
}
- (void)performPlaybackDidPauseActions {
#if 0
[[NSDistributedNotificationCenter defaultCenter]
postNotificationName:TrackNotification
object:nil
userInfo:[self fillNotificationDictionary:entry status:TrackPaused]
deliverImmediately:YES];
#endif
}
- (void)performPlaybackDidResumeActions {
#if 0
[[NSDistributedNotificationCenter defaultCenter]
postNotificationName:TrackNotification
object:nil
userInfo:[self fillNotificationDictionary:entry status:TrackPlaying]
deliverImmediately:YES];
#endif
}
- (void)performPlaybackDidStopActions {
#if 0
[[NSDistributedNotificationCenter defaultCenter]
postNotificationName:TrackNotification
object:nil
userInfo:[self fillNotificationDictionary:entry status:TrackStopped]
deliverImmediately:YES];
#endif
entry = nil;
}
- (void)awakeFromNib {
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(playbackDidBegin:)
name:CogPlaybackDidBeginNotificiation
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(playbackDidPause:)
name:CogPlaybackDidPauseNotificiation
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(playbackDidResume:)
name:CogPlaybackDidResumeNotificiation
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(playbackDidStop:)
name:CogPlaybackDidStopNotificiation
object:nil];
}
- (void)playbackDidBegin:(NSNotification *)notification {
NSOperation *op = [NSBlockOperation blockOperationWithBlock:^{
[self performPlaybackDidBeginActions:(PlaylistEntry *)[notification object]];
}];
[queue addOperation:op];
}
- (void)playbackDidPause:(NSNotification *)notification {
NSOperation *op = [NSBlockOperation blockOperationWithBlock:^{
[self performPlaybackDidPauseActions];
}];
[queue addOperation:op];
}
- (void)playbackDidResume:(NSNotification *)notification {
NSOperation *op = [NSBlockOperation blockOperationWithBlock:^{
[self performPlaybackDidResumeActions];
}];
[queue addOperation:op];
}
- (void)playbackDidStop:(NSNotification *)notification {
NSOperation *op = [NSBlockOperation blockOperationWithBlock:^{
[self performPlaybackDidStopActions];
}];
[queue addOperation:op];
}
- (void)userNotificationCenter:(NSUserNotificationCenter *)center
didActivateNotification:(NSUserNotification *)notification {
switch(notification.activationType) {
case NSUserNotificationActivationTypeActionButtonClicked:
[playbackController next:self];
break;
case NSUserNotificationActivationTypeContentsClicked: {
NSWindow *window = [[NSUserDefaults standardUserDefaults] boolForKey:@"miniMode"] ? miniWindow : mainWindow;
[NSApp activateIgnoringOtherApps:YES];
[window makeKeyAndOrderFront:self];
}; break;
default:
break;
}
}
@end

View file

@ -1,19 +0,0 @@
//
// ScriptAdditions.h
// Cog
//
// Created by Christopher Snowhill on 2/21/22.
//
#ifndef ScriptAdditions_h
#define ScriptAdditions_h
@interface NSApplication (APLApplicationExtensions)
- (id)playbackStart:(NSScriptCommand *)command;
- (id)playbackPause:(NSScriptCommand *)command;
- (id)playbackStop:(NSScriptCommand *)command;
- (id)playbackPrevious:(NSScriptCommand *)command;
- (id)playbackNext:(NSScriptCommand *)command;
@end
#endif /* ScriptAdditions_h */

View file

@ -1,33 +0,0 @@
//
// ScriptAdditions.m
// Cog
//
// Created by Christopher Snowhill on 2/21/22.
//
#import <Cocoa/Cocoa.h>
#import "AppController.h"
@implementation NSApplication (APLApplicationExtensions)
- (id)playbackStart:(NSScriptCommand *)command {
[(AppController *)[NSApp delegate] clickPlay];
return @(YES);
}
- (id)playbackPause:(NSScriptCommand *)command {
[(AppController *)[NSApp delegate] clickPause];
return @(YES);
}
- (id)playbackStop:(NSScriptCommand *)command {
[(AppController *)[NSApp delegate] clickStop];
return @(YES);
}
- (id)playbackPrevious:(NSScriptCommand *)command {
[(AppController *)[NSApp delegate] clickPrev];
return @(YES);
}
- (id)playbackNext:(NSScriptCommand *)command {
[(AppController *)[NSApp delegate] clickNext];
return @(YES);
}
@end

View file

@ -1,18 +0,0 @@
//
// AudioContainer.h
// CogAudio
//
// Created by Zaphod Beeblebrox on 10/8/07.
// Copyright 2007 __MyCompanyName__. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import <CogAudio/Plugin.h>
@interface AudioContainer : NSObject {
}
+ (NSArray *)urlsForContainerURL:(NSURL *)url;
+ (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url;
@end

View file

@ -1,27 +0,0 @@
//
// AudioContainer.m
// CogAudio
//
// Created by Zaphod Beeblebrox on 10/8/07.
// Copyright 2007 __MyCompanyName__. All rights reserved.
//
#import "AudioContainer.h"
#import "PluginController.h"
@implementation AudioContainer
+ (NSArray *)urlsForContainerURL:(NSURL *)url {
@autoreleasepool {
return [[PluginController sharedPluginController] urlsForContainerURL:url];
}
}
+ (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url {
@autoreleasepool {
return [[PluginController sharedPluginController] dependencyUrlsForContainerURL:url];
}
}
@end

View file

@ -8,12 +8,11 @@
#import <Cocoa/Cocoa.h>
#import <CogAudio/Plugin.h>
#import "Plugin.h"
@interface AudioDecoder : NSObject {
}
+ (id<CogDecoder>)audioDecoderForSource:(id<CogSource>)source;
+ (id<CogDecoder>)audioDecoderForSource:(id<CogSource>)source skipCue:(BOOL)skip;
+ (id<CogDecoder>)audioDecoderForURL:(NSURL *)url;
@end

View file

@ -12,12 +12,15 @@
@implementation AudioDecoder
+ (id<CogDecoder>)audioDecoderForSource:(id<CogSource>)source {
return [[PluginController sharedPluginController] audioDecoderForSource:source skipCue:NO];
}
+ (id<CogDecoder>)audioDecoderForSource:(id<CogSource>)source skipCue:(BOOL)skip {
return [[PluginController sharedPluginController] audioDecoderForSource:source skipCue:skip];
+ (id<CogDecoder>) audioDecoderForURL:(NSURL *)url
{
NSString *ext = [[url path] pathExtension];
NSDictionary *decoders = [[PluginController sharedPluginController] decoders];
Class decoder = NSClassFromString([decoders objectForKey:[ext lowercaseString]]);
return [[[decoder alloc] init] autorelease];
}
@end

View file

@ -8,10 +8,11 @@
#import <Cocoa/Cocoa.h>
@interface AudioMetadataReader : NSObject {
}
+ (NSDictionary *)metadataForURL:(NSURL *)url;
+ (NSDictionary *)metadataForURL:(NSURL *)url skipCue:(BOOL)skip;
@end

View file

@ -11,16 +11,16 @@
@implementation AudioMetadataReader
+ (NSDictionary *)metadataForURL:(NSURL *)url {
@autoreleasepool {
return [[PluginController sharedPluginController] metadataForURL:url skipCue:NO];
}
}
+ (NSDictionary *)metadataForURL:(NSURL *)url
{
NSString *ext = [[url path] pathExtension];
NSDictionary *metadataReaders = [[PluginController sharedPluginController] metadataReaders];
Class metadataReader = NSClassFromString([metadataReaders objectForKey:[ext lowercaseString]]);
return [[[[metadataReader alloc] init] autorelease] metadataForURL:url];
+ (NSDictionary *)metadataForURL:(NSURL *)url skipCue:(BOOL)skip {
@autoreleasepool {
return [[PluginController sharedPluginController] metadataForURL:url skipCue:skip];
}
}
@end

View file

@ -1,16 +0,0 @@
//
// AudioMetadataWriter.h
// CogAudio
//
// Created by Safari on 08/11/18.
// Copyright 2008 __MyCompanyName__. All rights reserved.
//
#import <Cocoa/Cocoa.h>
@interface AudioMetadataWriter : NSObject {
}
+ (int)putMetadataInURL:(NSURL *)url;
@end

View file

@ -1,16 +0,0 @@
//
// AudioMetadataWriter.m
// CogAudio
//
// Created by Safari on 08/11/18.
// Copyright 2008 __MyCompanyName__. All rights reserved.
//
#import "AudioMetadataWriter.h"
#import "PluginController.h"
@implementation AudioMetadataWriter
+ (int)putMetadataInURL:(NSURL *)url {
return [[PluginController sharedPluginController] putMetadataInURL:url];
}
@end

View file

@ -8,50 +8,22 @@
#import <Cocoa/Cocoa.h>
#import <CogAudio/CogSemaphore.h>
#import <AVFoundation/AVFoundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import <AudioUnit/AudioUnit.h>
#import <CoreAudio/CoreAudio.h>
#import <CoreAudio/CoreAudioTypes.h>
#import <stdatomic.h>
@class BufferChain;
@class OutputNode;
@interface AudioPlayer : NSObject {
@interface AudioPlayer : NSObject
{
BufferChain *bufferChain;
OutputNode *output;
double volume;
double pitch;
double tempo;
NSMutableArray *chainQueue;
NSURL *nextStream;
id nextStreamUserInfo;
NSDictionary *nextStreamRGInfo;
id previousUserInfo; // Track currently last heard track for play counts
id delegate;
BOOL outputLaunched;
BOOL endOfInputReached;
BOOL startedPaused;
BOOL initialBufferFilled;
Semaphore *semaphore;
atomic_bool resettingNow;
atomic_int refCount;
int currentPlaybackStatus;
BOOL shouldContinue;
}
- (id)init;
@ -60,84 +32,49 @@
- (id)delegate;
- (void)play:(NSURL *)url;
- (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi;
- (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(BOOL)paused;
- (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(BOOL)paused andSeekTo:(double)time;
- (void)playBG:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(NSNumber *)paused andSeekTo:(NSNumber *)time;
- (void)play:(NSURL *)url withUserInfo:(id)userInfo;
- (void)stop;
- (void)pause;
- (void)resume;
- (void)seekToTime:(double)time;
- (void)seekToTimeBG:(NSNumber *)time;
- (void)setVolume:(double)v;
- (double)volume;
- (double)volumeUp:(double)amount;
- (double)volumeDown:(double)amount;
- (double)amountPlayed;
- (double)amountPlayedInterval;
- (void)setNextStream:(NSURL *)url;
- (void)setNextStream:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi;
- (void)resetNextStreams;
- (void)restartPlaybackAtCurrentPosition;
- (void)pushInfo:(NSDictionary *)info toTrack:(id)userInfo;
- (void)setNextStream:(NSURL *)url withUserInfo:(id)userInfo;
+ (NSArray *)fileTypes;
+ (NSArray *)schemes;
+ (NSArray *)containerTypes;
@end
@interface AudioPlayer (Private) // Dont use this stuff!
@interface AudioPlayer (Private) //Dont use this stuff!
- (OutputNode *)output;
- (BufferChain *)bufferChain;
- (OutputNode *) output;
- (BufferChain *) bufferChain;
- (id)initWithDelegate:(id)d;
- (void)setPlaybackStatus:(int)status waitUntilDone:(BOOL)wait;
- (void)setPlaybackStatus:(int)s;
- (void)requestNextStream:(id)userInfo;
- (void)requestNextStreamMainThread:(id)userInfo;
- (void)notifyStreamChanged:(id)userInfo;
- (void)notifyStreamChangedMainThread:(id)userInfo;
- (void)beginEqualizer:(AudioUnit)eq;
- (void)refreshEqualizer:(AudioUnit)eq;
- (void)endEqualizer:(AudioUnit)eq;
- (BOOL)endOfInputReached:(BufferChain *)sender;
- (void)endOfInputReached:(BufferChain *)sender;
- (void)setShouldContinue:(BOOL)s;
//- (BufferChain *)bufferChain;
- (BufferChain *)bufferChain;
- (void)launchOutputThread;
- (BOOL)selectNextBuffer;
- (void)endOfInputPlayed;
- (void)reportPlayCount;
- (void)sendDelegateMethod:(SEL)selector withVoid:(void *)obj waitUntilDone:(BOOL)wait;
- (void)sendDelegateMethod:(SEL)selector withObject:(id)obj waitUntilDone:(BOOL)wait;
- (void)sendDelegateMethod:(SEL)selector withObject:(id)obj withObject:(id)obj2 waitUntilDone:(BOOL)wait;
- (BOOL)chainQueueHasTracks;
@end
@protocol AudioPlayerDelegate
- (void)audioPlayer:(AudioPlayer *)player willEndStream:(id)userInfo; // You must use setNextStream in this method
- (void)audioPlayer:(AudioPlayer *)player didBeginStream:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player didChangeStatus:(id)status userInfo:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player didStopNaturally:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player displayEqualizer:(AudioUnit)eq;
- (void)audioPlayer:(AudioPlayer *)player refreshEqualizer:(AudioUnit)eq;
- (void)audioPlayer:(AudioPlayer *)player removeEqualizer:(AudioUnit)eq;
- (void)audioPlayer:(AudioPlayer *)player sustainHDCD:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player restartPlaybackAtCurrentPosition:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player pushInfo:(NSDictionary *)info toTrack:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player reportPlayCountForTrack:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player updatePosition:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player setError:(NSNumber *)status toTrack:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player requestNextStream:(id)userInfo; //You must use setNextStream in this method
- (void)audioPlayer:(AudioPlayer *)player streamChanged:(id)userInfo;
- (void)audioPlayer:(AudioPlayer *)player changedStatus:(id)status;
@end

View file

@ -1,4 +1,4 @@
//
// AudioController.m
// Cog
//
@ -8,39 +8,30 @@
#import "AudioPlayer.h"
#import "BufferChain.h"
#import "Helper.h"
#import "OutputNode.h"
#import "PluginController.h"
#import "Status.h"
#import "PluginController.h"
#import "Logging.h"
@implementation AudioPlayer
- (id)init {
- (id)init
{
self = [super init];
if(self) {
if (self)
{
output = NULL;
bufferChain = nil;
bufferChain = NULL;
outputLaunched = NO;
endOfInputReached = NO;
// Safety
pitch = 1.0;
tempo = 1.0;
chainQueue = [[NSMutableArray alloc] init];
semaphore = [[Semaphore alloc] init];
atomic_init(&resettingNow, false);
atomic_init(&refCount, 0);
}
return self;
}
- (void)setDelegate:(id)d {
- (void)setDelegate:(id)d
{
delegate = d;
}
@ -48,656 +39,260 @@
return delegate;
}
- (void)play:(NSURL *)url {
[self play:url withUserInfo:nil withRGInfo:nil startPaused:NO andSeekTo:0.0];
- (void)play:(NSURL *)url
{
[self play:url withUserInfo:nil];
}
- (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi {
[self play:url withUserInfo:userInfo withRGInfo:rgi startPaused:NO andSeekTo:0.0];
}
- (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(BOOL)paused {
[self play:url withUserInfo:userInfo withRGInfo:rgi startPaused:paused andSeekTo:0.0];
}
- (void)playBG:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(NSNumber *)paused andSeekTo:(NSNumber *)time {
@synchronized (self) {
[self play:url withUserInfo:userInfo withRGInfo:rgi startPaused:[paused boolValue] andSeekTo:[time doubleValue]];
- (void)play:(NSURL *)url withUserInfo:(id)userInfo
{
if (output)
{
[output release];
}
}
- (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(BOOL)paused andSeekTo:(double)time {
[self play:url withUserInfo:userInfo withRGInfo:rgi startPaused:paused andSeekTo:time andResumeInterval:NO];
}
- (void)play:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi startPaused:(BOOL)paused andSeekTo:(double)time andResumeInterval:(BOOL)resumeInterval {
ALog(@"Opening file for playback: %@ at seek offset %f%@", url, time, (paused) ? @", starting paused" : @"");
[self waitUntilCallbacksExit];
if(output) {
[output fadeOutBackground];
output = [[OutputNode alloc] initWithController:self previous:nil];
[output setup];
NSEnumerator *enumerator = [chainQueue objectEnumerator];
id anObject;
while (anObject = [enumerator nextObject])
{
[anObject setShouldContinue:NO];
}
if(!output) {
output = [[OutputNode alloc] initWithController:self previous:nil];
[output setupWithInterval:resumeInterval];
}
[output setVolume:volume];
@synchronized(chainQueue) {
for(id anObject in chainQueue) {
[anObject setShouldContinue:NO];
}
[chainQueue removeAllObjects];
endOfInputReached = NO;
if(bufferChain) {
[bufferChain setShouldContinue:NO];
[chainQueue removeAllObjects];
if (bufferChain)
{
[bufferChain setShouldContinue:NO];
bufferChain = nil;
}
[bufferChain release];
}
bufferChain = [[BufferChain alloc] initWithController:self];
if(!resumeInterval) {
[self notifyStreamChanged:userInfo];
}
while(![bufferChain open:url withOutputFormat:[output format] withUserInfo:userInfo withRGInfo:rgi]) {
while (![bufferChain open:url withOutputFormat:[output format]])
{
[bufferChain release];
bufferChain = nil;
[self requestNextStream:userInfo];
if([nextStream isEqualTo:url]) {
return;
}
[self requestNextStream: userInfo];
url = nextStream;
if(url == nil) {
if (url == nil)
{
return;
}
userInfo = nextStreamUserInfo;
rgi = nextStreamRGInfo;
[self notifyStreamChanged:userInfo];
bufferChain = [[BufferChain alloc] initWithController:self];
}
if(time > 0.0) {
[output seek:time];
[bufferChain seek:time];
}
[bufferChain setUserInfo:userInfo];
[self setShouldContinue:YES];
if(!resumeInterval) {
outputLaunched = NO;
}
startedPaused = paused;
initialBufferFilled = NO;
previousUserInfo = userInfo;
outputLaunched = NO;
[bufferChain launchThreads];
if(paused) {
[self setPlaybackStatus:CogStatusPaused waitUntilDone:YES];
if(time > 0.0) {
[self updatePosition:userInfo];
}
} else if(resumeInterval) {
[output fadeIn];
}
}
- (void)stop {
// Set shouldoContinue to NO on all things
- (void)stop
{
//Set shouldoContinue to NO on allll things
[self setShouldContinue:NO];
[self setPlaybackStatus:CogStatusStopped waitUntilDone:YES];
@synchronized(chainQueue) {
for(id anObject in chainQueue) {
[anObject setShouldContinue:NO];
}
[chainQueue removeAllObjects];
endOfInputReached = NO;
if(bufferChain) {
bufferChain = nil;
}
}
if(output) {
[output setShouldContinue:NO];
[output close];
}
output = nil;
[self setPlaybackStatus:kCogStatusStopped];
}
- (void)pause {
[output fadeOut];
- (void)pause
{
[output pause];
[self setPlaybackStatus:CogStatusPaused waitUntilDone:YES];
[self setPlaybackStatus:kCogStatusPaused];
}
- (void)resume {
if(startedPaused) {
startedPaused = NO;
if(initialBufferFilled)
[self launchOutputThread];
}
[output fadeIn];
- (void)resume
{
[output resume];
[self setPlaybackStatus:CogStatusPlaying waitUntilDone:YES];
[self setPlaybackStatus:kCogStatusPlaying];
}
- (void)seekToTimeBG:(NSNumber *)time {
[self seekToTime:[time doubleValue]];
- (void)seekToTime:(double)time
{
//Need to reset everything's buffers, and then seek?
/*HACK TO TEST HOW WELL THIS WOULD WORK*/
[output seek:time];
[bufferChain seek:time];
/*END HACK*/
}
- (void)seekToTime:(double)time {
CogStatus status = (CogStatus)currentPlaybackStatus;
NSURL *url;
id userInfo;
NSDictionary *rgi;
@synchronized(chainQueue) {
url = [bufferChain streamURL];
userInfo = [bufferChain userInfo];
rgi = [bufferChain rgInfo];
}
[self play:url withUserInfo:userInfo withRGInfo:rgi startPaused:(status == CogStatusPaused) andSeekTo:time andResumeInterval:YES];
}
- (void)setVolume:(double)v {
volume = v;
- (void)setVolume:(double)v
{
[output setVolume:v];
}
- (double)volume {
return volume;
- (void)setNextStream:(NSURL *)url
{
[self setNextStream:url withUserInfo:nil];
}
// This is called by the delegate DURING a requestNextStream request.
- (void)setNextStream:(NSURL *)url {
[self setNextStream:url withUserInfo:nil withRGInfo:nil];
}
- (void)setNextStream:(NSURL *)url withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi {
- (void)setNextStream:(NSURL *)url withUserInfo:(id)userInfo
{
[url retain];
[nextStream release];
nextStream = url;
[userInfo retain];
[nextStreamUserInfo release];
nextStreamUserInfo = userInfo;
nextStreamRGInfo = rgi;
}
// Called when the playlist changed before we actually started playing a requested stream. We will re-request.
- (void)resetNextStreams {
[self waitUntilCallbacksExit];
@synchronized(chainQueue) {
for(id anObject in chainQueue) {
[anObject setShouldContinue:NO];
}
[chainQueue removeAllObjects];
if(endOfInputReached) {
[self endOfInputReached:bufferChain];
}
}
- (void)setShouldContinue:(BOOL)s
{
[bufferChain setShouldContinue:s];
[output setShouldContinue:s];
}
- (void)restartPlaybackAtCurrentPosition {
[self sendDelegateMethod:@selector(audioPlayer:restartPlaybackAtCurrentPosition:) withObject:previousUserInfo waitUntilDone:NO];
}
- (void)updatePosition:(id)userInfo {
[self sendDelegateMethod:@selector(audioPlayer:updatePosition:) withObject:userInfo waitUntilDone:NO];
}
- (void)pushInfo:(NSDictionary *)info toTrack:(id)userInfo {
[self sendDelegateMethod:@selector(audioPlayer:pushInfo:toTrack:) withObject:info withObject:userInfo waitUntilDone:NO];
}
- (void)reportPlayCountForTrack:(id)userInfo {
[self sendDelegateMethod:@selector(audioPlayer:reportPlayCountForTrack:) withObject:userInfo waitUntilDone:NO];
}
- (void)setShouldContinue:(BOOL)s {
shouldContinue = s;
if(bufferChain)
[bufferChain setShouldContinue:s];
if(output)
[output setShouldContinue:s];
}
- (double)amountPlayed {
- (double)amountPlayed
{
return [output amountPlayed];
}
- (double)amountPlayedInterval {
return [output amountPlayedInterval];
}
- (void)launchOutputThread {
initialBufferFilled = YES;
if(outputLaunched == NO && startedPaused == NO) {
[self setPlaybackStatus:CogStatusPlaying];
- (void)launchOutputThread
{
[self setPlaybackStatus:kCogStatusPlaying];
if (outputLaunched == NO) {
[output launchThread];
outputLaunched = YES;
}
else {
[self setShouldContinue:YES];
[output resume];
}
}
- (void)requestNextStream:(id)userInfo {
[self sendDelegateMethod:@selector(audioPlayer:willEndStream:) withObject:userInfo waitUntilDone:YES];
}
- (void)notifyStreamChanged:(id)userInfo {
[self sendDelegateMethod:@selector(audioPlayer:didBeginStream:) withObject:userInfo waitUntilDone:YES];
}
- (void)notifyPlaybackStopped:(id)userInfo {
[self sendDelegateMethod:@selector(audioPlayer:didStopNaturally:) withObject:userInfo waitUntilDone:NO];
}
- (void)beginEqualizer:(AudioUnit)eq {
[self sendDelegateMethod:@selector(audioPlayer:displayEqualizer:) withVoid:eq waitUntilDone:YES];
}
- (void)refreshEqualizer:(AudioUnit)eq {
[self sendDelegateMethod:@selector(audioPlayer:refreshEqualizer:) withVoid:eq waitUntilDone:YES];
}
- (void)endEqualizer:(AudioUnit)eq {
[self sendDelegateMethod:@selector(audioPlayer:removeEqualizer:) withVoid:eq waitUntilDone:YES];
}
- (void)addChainToQueue:(BufferChain *)newChain {
[newChain setShouldContinue:YES];
[newChain launchThreads];
[chainQueue insertObject:newChain atIndex:[chainQueue count]];
}
- (BOOL)endOfInputReached:(BufferChain *)sender // Sender is a BufferChain
- (void)requestNextStream:(id)userInfo
{
previousUserInfo = [sender userInfo];
[self sendDelegateMethod:@selector(audioPlayer:requestNextStream:) withObject:userInfo waitUntilDone:YES];
}
- (void)notifyStreamChanged:(id)userInfo
{
[self sendDelegateMethod:@selector(audioPlayer:streamChanged:) withObject:userInfo waitUntilDone:NO];
}
- (void)endOfInputReached:(BufferChain *)sender //Sender is a BufferChain
{
BufferChain *newChain = nil;
if(atomic_load_explicit(&resettingNow, memory_order_relaxed))
return YES;
atomic_fetch_add(&refCount, 1);
@synchronized(chainQueue) {
// No point in constructing new chain for the next playlist entry
// if there's already one at the head of chainQueue... r-r-right?
for(BufferChain *chain in chainQueue) {
if([chain isRunning]) {
if(output)
[output setShouldPlayOutBuffer:YES];
atomic_fetch_sub(&refCount, 1);
return YES;
}
}
// We don't want to do this, it may happen with a lot of short files
// if ([chainQueue count] >= 5)
//{
// return YES;
//}
}
double duration = 0.0;
@synchronized(chainQueue) {
for(BufferChain *chain in chainQueue) {
duration += [chain secondsBuffered];
}
}
while(duration >= 30.0 && shouldContinue) {
[semaphore wait];
if(atomic_load_explicit(&resettingNow, memory_order_relaxed)) {
if(output)
[output setShouldPlayOutBuffer:YES];
atomic_fetch_sub(&refCount, 1);
return YES;
}
@synchronized(chainQueue) {
duration = 0.0;
for(BufferChain *chain in chainQueue) {
duration += [chain secondsBuffered];
}
}
}
nextStreamUserInfo = [sender userInfo];
nextStreamRGInfo = [sender rgInfo];
// This call can sometimes lead to invoking a chainQueue block on another thread
[self requestNextStream:nextStreamUserInfo];
if(!nextStream) {
if(output)
[output setShouldPlayOutBuffer:YES];
atomic_fetch_sub(&refCount, 1);
return YES;
}
BufferChain *lastChain;
@synchronized(chainQueue) {
newChain = [[BufferChain alloc] initWithController:self];
endOfInputReached = YES;
lastChain = [chainQueue lastObject];
if(lastChain == nil) {
lastChain = bufferChain;
}
}
BOOL pathsEqual = NO;
if([nextStream isFileURL] && [[lastChain streamURL] isFileURL]) {
NSString *unixPathNext = [nextStream path];
NSString *unixPathPrev = [[lastChain streamURL] path];
if([unixPathNext isEqualToString:unixPathPrev])
pathsEqual = YES;
} else if(![nextStream isFileURL] && ![[lastChain streamURL] isFileURL]) {
@try {
NSURL *lastURL = [lastChain streamURL];
NSString *nextScheme = [nextStream scheme];
NSString *lastScheme = [lastURL scheme];
NSString *nextHost = [nextStream host];
NSString *lastHost = [lastURL host];
NSString *nextPath = [nextStream path];
NSString *lastPath = [lastURL path];
if(nextScheme && lastScheme && [nextScheme isEqualToString:lastScheme]) {
if((!nextHost && !lastHost) ||
(nextHost && lastHost && [nextHost isEqualToString:lastHost])) {
if(nextPath && lastPath && [nextPath isEqualToString:lastPath]) {
pathsEqual = YES;
}
}
}
}
@catch(NSException *e) {
DLog(@"Exception thrown checking file match: %@", e);
}
}
if(pathsEqual) {
if([lastChain setTrack:nextStream] && [newChain openWithInput:[lastChain inputNode] withOutputFormat:[output format] withUserInfo:nextStreamUserInfo withRGInfo:nextStreamRGInfo]) {
[newChain setStreamURL:nextStream];
@synchronized(chainQueue) {
[self addChainToQueue:newChain];
}
DLog(@"TRACK SET!!! %@", newChain);
// Keep on-playin
newChain = nil;
atomic_fetch_sub(&refCount, 1);
return NO;
}
}
lastChain = nil;
NSURL *url = nextStream;
while(shouldContinue && ![newChain open:url withOutputFormat:[output format] withUserInfo:nextStreamUserInfo withRGInfo:nextStreamRGInfo]) {
if(nextStream == nil) {
newChain = nil;
if(output)
[output setShouldPlayOutBuffer:YES];
atomic_fetch_sub(&refCount, 1);
return YES;
}
newChain = nil;
[self requestNextStream:nextStreamUserInfo];
if([nextStream isEqualTo:url]) {
newChain = nil;
if(output)
[output setShouldPlayOutBuffer:YES];
atomic_fetch_sub(&refCount, 1);
return YES;
}
url = nextStream;
newChain = [[BufferChain alloc] initWithController:self];
}
@synchronized(chainQueue) {
[self addChainToQueue:newChain];
}
newChain = nil;
// I'm stupid and can't hold too much stuff in my head all at once, so writing it here.
//
// Once we get here:
// - buffer chain for previous stream finished reading
// - there are (probably) some bytes of the previous stream in the output buffer which haven't been played
// (by output node) yet
// - self.bufferChain == previous playlist entry's buffer chain
// - self.nextStream == next playlist entry's URL
// - self.nextStreamUserInfo == next playlist entry
// - head of chainQueue is the buffer chain for the next entry (which has launched its threads already)
if(output)
[output setShouldPlayOutBuffer:YES];
atomic_fetch_sub(&refCount, 1);
return YES;
}
- (void)reportPlayCount {
[self reportPlayCountForTrack:previousUserInfo];
}
- (BOOL)selectNextBuffer {
BOOL signalStopped = NO;
[nextStreamUserInfo retain]; //Retained because when setNextStream is called, it will be released!!!
do {
@synchronized(chainQueue) {
endOfInputReached = NO;
[newChain release];
[self requestNextStream: nextStreamUserInfo];
if([chainQueue count] <= 0) {
// End of playlist
signalStopped = YES;
break;
}
[bufferChain setShouldContinue:NO];
bufferChain = nil;
bufferChain = [chainQueue objectAtIndex:0];
[chainQueue removeObjectAtIndex:0];
DLog(@"New!!! %@ %@", bufferChain, [[bufferChain inputNode] decoder]);
[semaphore signal];
if (nextStream == nil)
{
return;
}
} while(0);
if(signalStopped) {
double latency = 0;
if(output) latency = [output latency];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, latency * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
[self stop];
newChain = [[BufferChain alloc] initWithController:self];
} while (![newChain open:nextStream withOutputFormat:[output format]]);
[newChain setUserInfo: nextStreamUserInfo];
[newChain setShouldContinue:YES];
[newChain launchThreads];
[chainQueue insertObject:newChain atIndex:[chainQueue count]];
self->bufferChain = nil;
[newChain release];
}
[self notifyPlaybackStopped:nil];
});
return YES;
- (void)endOfInputPlayed
{
if ([chainQueue count] <= 0)
{
//End of playlist
[self stop];
return;
}
[bufferChain release];
bufferChain = [chainQueue objectAtIndex:0];
[bufferChain retain];
[chainQueue removeObjectAtIndex:0];
[self notifyStreamChanged:[bufferChain userInfo]];
[output setEndOfStream:NO];
return NO;
}
- (void)endOfInputPlayed {
// Once we get here:
// - the buffer chain for the next playlist entry (started in endOfInputReached) have been working for some time
// already, so that there is some decoded and converted data to play
// - the buffer chain for the next entry is the first item in chainQueue
previousUserInfo = [bufferChain userInfo];
[self notifyStreamChanged:previousUserInfo];
}
- (BOOL)chainQueueHasTracks {
@synchronized(chainQueue) {
return [chainQueue count] > 0;
}
return NO;
}
- (void)sendDelegateMethod:(SEL)selector withVoid:(void *)obj waitUntilDone:(BOOL)wait {
- (void)sendDelegateMethod:(SEL)selector withObject:(id)obj waitUntilDone:(BOOL)wait
{
NSInvocation *invocation = [NSInvocation invocationWithMethodSignature:[delegate methodSignatureForSelector:selector]];
[invocation setTarget:delegate];
[invocation setSelector:selector];
[invocation setArgument:(void *)&self atIndex:2];
[invocation setArgument:&obj atIndex:3];
[invocation retainArguments];
[invocation setArgument:&self atIndex:2]; //Indexes start at 2, the first being self, the second being command.
[invocation setArgument:&obj atIndex:3];
[invocation performSelectorOnMainThread:@selector(invoke) withObject:nil waitUntilDone:wait];
[self performSelectorOnMainThread:@selector(sendDelegateMethodMainThread:) withObject:invocation waitUntilDone:wait];
}
- (void)sendDelegateMethod:(SEL)selector withObject:(id)obj waitUntilDone:(BOOL)wait {
NSInvocation *invocation = [NSInvocation invocationWithMethodSignature:[delegate methodSignatureForSelector:selector]];
[invocation setTarget:delegate];
[invocation setSelector:selector];
[invocation setArgument:(void *)&self atIndex:2];
[invocation setArgument:&obj atIndex:3];
[invocation retainArguments];
[invocation performSelectorOnMainThread:@selector(invoke) withObject:nil waitUntilDone:wait];
- (void)sendDelegateMethodMainThread:(id)invocation
{
[invocation invokeWithTarget:delegate];
}
- (void)sendDelegateMethod:(SEL)selector withObject:(id)obj withObject:(id)obj2 waitUntilDone:(BOOL)wait {
NSInvocation *invocation = [NSInvocation invocationWithMethodSignature:[delegate methodSignatureForSelector:selector]];
[invocation setTarget:delegate];
[invocation setSelector:selector];
[invocation setArgument:(void *)&self atIndex:2];
[invocation setArgument:&obj atIndex:3];
[invocation setArgument:&obj2 atIndex:4];
[invocation retainArguments];
[invocation performSelectorOnMainThread:@selector(invoke) withObject:nil waitUntilDone:wait];
- (void)setPlaybackStatus:(int)status
{
[self sendDelegateMethod:@selector(audioPlayer:statusChanged:) withObject:[NSNumber numberWithInt:status] waitUntilDone:NO];
}
- (void)setPlaybackStatus:(int)status waitUntilDone:(BOOL)wait {
currentPlaybackStatus = status;
[self sendDelegateMethod:@selector(audioPlayer:didChangeStatus:userInfo:) withObject:@(status) withObject:[bufferChain userInfo] waitUntilDone:wait];
}
- (void)sustainHDCD {
[self sendDelegateMethod:@selector(audioPlayer:sustainHDCD:) withObject:[bufferChain userInfo] waitUntilDone:NO];
}
- (void)setError:(BOOL)status {
[self sendDelegateMethod:@selector(audioPlayer:setError:toTrack:) withObject:@(status) withObject:[bufferChain userInfo] waitUntilDone:NO];
}
- (void)setPlaybackStatus:(int)status {
[self setPlaybackStatus:status waitUntilDone:NO];
}
- (BufferChain *)bufferChain {
- (BufferChain *)bufferChain
{
return bufferChain;
}
- (OutputNode *)output {
- (OutputNode *) output
{
return output;
}
+ (NSArray *)containerTypes {
return [[[PluginController sharedPluginController] containers] allKeys];
}
+ (NSArray *)fileTypes {
+ (NSArray *)fileTypes
{
PluginController *pluginController = [PluginController sharedPluginController];
NSArray *containerTypes = [[pluginController containers] allKeys];
NSArray *decoderTypes = [[pluginController decodersByExtension] allKeys];
NSArray *decoderTypes = [[pluginController decoders] allKeys];
NSArray *metdataReaderTypes = [[pluginController metadataReaders] allKeys];
NSArray *propertiesReaderTypes = [[pluginController propertiesReadersByExtension] allKeys];
NSArray *propertiesReaderTypes = [[pluginController propertiesReaders] allKeys];
NSMutableSet *types = [NSMutableSet set];
[types addObjectsFromArray:containerTypes];
[types addObjectsFromArray:decoderTypes];
[types addObjectsFromArray:metdataReaderTypes];
[types addObjectsFromArray:propertiesReaderTypes];
return [types allObjects];
}
+ (NSArray *)schemes {
+ (NSArray *)schemes
{
PluginController *pluginController = [PluginController sharedPluginController];
return [[pluginController sources] allKeys];
}
- (double)volumeUp:(double)amount {
BOOL volumeLimit = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] boolForKey:@"volumeLimit"];
const double MAX_VOLUME = (volumeLimit) ? 100.0 : 800.0;
double newVolume = linearToLogarithmic(logarithmicToLinear(volume + amount, MAX_VOLUME), MAX_VOLUME);
if(newVolume > MAX_VOLUME)
newVolume = MAX_VOLUME;
[self setVolume:newVolume];
// the playbackController needs to know the new volume, so it can update the
// volumeSlider accordingly.
return newVolume;
}
- (double)volumeDown:(double)amount {
BOOL volumeLimit = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] boolForKey:@"volumeLimit"];
const double MAX_VOLUME = (volumeLimit) ? 100.0 : 800.0;
double newVolume;
if(amount > volume)
newVolume = 0.0;
else
newVolume = linearToLogarithmic(logarithmicToLinear(volume - amount, MAX_VOLUME), MAX_VOLUME);
[self setVolume:newVolume];
return newVolume;
}
- (void)waitUntilCallbacksExit {
// This sucks! And since the thread that's inside the function can be calling
// event dispatches, we have to pump the message queue if we're on the main
// thread. Damn.
if(atomic_load_explicit(&refCount, memory_order_relaxed) != 0) {
BOOL mainThread = (dispatch_queue_get_label(dispatch_get_main_queue()) == dispatch_queue_get_label(DISPATCH_CURRENT_QUEUE_LABEL));
atomic_store(&resettingNow, true);
while(atomic_load_explicit(&refCount, memory_order_relaxed) != 0) {
[semaphore signal]; // Gotta poke this periodically
if(mainThread)
[[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.001]];
else
usleep(500);
}
atomic_store(&resettingNow, false);
}
}
@end

View file

@ -8,10 +8,11 @@
#import <Cocoa/Cocoa.h>
@interface AudioPropertiesReader : NSObject {
}
+ (NSDictionary *)propertiesForURL:(NSURL *)url;
+ (NSDictionary *)propertiesForURL:(NSURL *)url skipCue:(BOOL)skip;
@end

View file

@ -13,16 +13,24 @@
@implementation AudioPropertiesReader
+ (NSDictionary *)propertiesForURL:(NSURL *)url {
@autoreleasepool {
return [[PluginController sharedPluginController] propertiesForURL:url skipCue:NO];
}
}
+ (NSDictionary *)propertiesForURL:(NSURL *)url
{
NSString *ext = [[url path] pathExtension];
id<CogSource> source = [AudioSource audioSourceForURL:url];
if (![source open:url])
return nil;
NSDictionary *propertiesReaders = [[PluginController sharedPluginController] propertiesReaders];
Class propertiesReader = NSClassFromString([propertiesReaders objectForKey:[ext lowercaseString]]);
NSDictionary *properties = [propertiesReader propertiesForSource:source];
[source close];
return properties;
+ (NSDictionary *)propertiesForURL:(NSURL *)url skipCue:(BOOL)skip {
@autoreleasepool {
return [[PluginController sharedPluginController] propertiesForURL:url skipCue:skip];
}
}
@end

View file

@ -13,6 +13,6 @@
@interface AudioSource : NSObject {
}
+ (id<CogSource>)audioSourceForURL:(NSURL *)url;
+ audioSourceForURL:(NSURL *)url;
@end

View file

@ -8,10 +8,18 @@
#import "AudioSource.h"
@implementation AudioSource
+ (id<CogSource>)audioSourceForURL:(NSURL *)url {
return [[PluginController sharedPluginController] audioSourceForURL:url];
+ audioSourceForURL:(NSURL *)url
{
NSString *scheme = [url scheme];
NSDictionary *sources = [[PluginController sharedPluginController] sources];
Class source = NSClassFromString([sources objectForKey:scheme]);
return [[[source alloc] init] autorelease];
}
@end

View file

@ -1,112 +0,0 @@
//
// AudioChunk.h
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/5/22.
//
#ifndef AudioChunk_h
#define AudioChunk_h
#import <CoreAudio/CoreAudio.h>
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
enum {
AudioChannelFrontLeft = 1 << 0,
AudioChannelFrontRight = 1 << 1,
AudioChannelFrontCenter = 1 << 2,
AudioChannelLFE = 1 << 3,
AudioChannelBackLeft = 1 << 4,
AudioChannelBackRight = 1 << 5,
AudioChannelFrontCenterLeft = 1 << 6,
AudioChannelFrontCenterRight = 1 << 7,
AudioChannelBackCenter = 1 << 8,
AudioChannelSideLeft = 1 << 9,
AudioChannelSideRight = 1 << 10,
AudioChannelTopCenter = 1 << 11,
AudioChannelTopFrontLeft = 1 << 12,
AudioChannelTopFrontCenter = 1 << 13,
AudioChannelTopFrontRight = 1 << 14,
AudioChannelTopBackLeft = 1 << 15,
AudioChannelTopBackCenter = 1 << 16,
AudioChannelTopBackRight = 1 << 17,
AudioConfigMono = AudioChannelFrontCenter,
AudioConfigStereo = AudioChannelFrontLeft | AudioChannelFrontRight,
AudioConfig3Point0 = AudioChannelFrontLeft | AudioChannelFrontRight |
AudioChannelFrontCenter,
AudioConfig4Point0 = AudioChannelFrontLeft | AudioChannelFrontRight |
AudioChannelBackLeft | AudioChannelBackRight,
AudioConfig5Point0 = AudioChannelFrontLeft | AudioChannelFrontRight |
AudioChannelFrontCenter | AudioChannelBackLeft |
AudioChannelBackRight,
AudioConfig5Point1 = AudioChannelFrontLeft | AudioChannelFrontRight |
AudioChannelFrontCenter | AudioChannelLFE |
AudioChannelBackLeft | AudioChannelBackRight,
AudioConfig5Point1Side = AudioChannelFrontLeft | AudioChannelFrontRight |
AudioChannelFrontCenter | AudioChannelLFE |
AudioChannelSideLeft | AudioChannelSideRight,
AudioConfig6Point1 = AudioChannelFrontLeft | AudioChannelFrontRight |
AudioChannelFrontCenter | AudioChannelLFE |
AudioChannelBackCenter | AudioChannelSideLeft |
AudioChannelSideRight,
AudioConfig7Point1 = AudioChannelFrontLeft | AudioChannelFrontRight |
AudioChannelFrontCenter | AudioChannelLFE |
AudioChannelBackLeft | AudioChannelBackRight |
AudioChannelSideLeft | AudioChannelSideRight,
AudioChannelsBackLeftRight = AudioChannelBackLeft | AudioChannelBackRight,
AudioChannelsSideLeftRight = AudioChannelSideLeft | AudioChannelSideRight,
};
@interface AudioChunk : NSObject {
AudioStreamBasicDescription format;
NSMutableData *chunkData;
uint32_t channelConfig;
double streamTimestamp;
double streamTimeRatio;
BOOL formatAssigned;
BOOL lossless;
BOOL hdcd;
}
@property AudioStreamBasicDescription format;
@property uint32_t channelConfig;
@property double streamTimestamp;
@property double streamTimeRatio;
@property BOOL lossless;
+ (uint32_t)guessChannelConfig:(uint32_t)channelCount;
+ (uint32_t)channelIndexFromConfig:(uint32_t)channelConfig forFlag:(uint32_t)flag;
+ (uint32_t)extractChannelFlag:(uint32_t)index fromConfig:(uint32_t)channelConfig;
+ (uint32_t)countChannels:(uint32_t)channelConfig;
+ (uint32_t)findChannelIndex:(uint32_t)flag;
- (id)init;
- (id)initWithProperties:(NSDictionary *)properties;
- (void)assignSamples:(const void *_Nonnull)data frameCount:(size_t)count;
- (void)assignData:(NSData *)data;
- (NSData *)removeSamples:(size_t)frameCount;
- (BOOL)isEmpty;
- (size_t)frameCount;
- (void)setFrameCount:(size_t)count; // For truncation only
- (double)duration;
- (double)durationRatioed;
- (BOOL)isHDCD;
- (void)setHDCD;
- (AudioChunk *)copy;
@end
NS_ASSUME_NONNULL_END
#endif /* AudioChunk_h */

View file

@ -1,233 +0,0 @@
//
// AudioChunk.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/5/22.
//
#import "AudioChunk.h"
#import "CoreAudioUtils.h"
@implementation AudioChunk
- (id)init {
self = [super init];
if(self) {
chunkData = [[NSMutableData alloc] init];
formatAssigned = NO;
lossless = NO;
hdcd = NO;
streamTimestamp = 0.0;
streamTimeRatio = 1.0;
}
return self;
}
- (id)initWithProperties:(NSDictionary *)properties {
self = [super init];
if(self) {
chunkData = [[NSMutableData alloc] init];
[self setFormat:propertiesToASBD(properties)];
lossless = [[properties objectForKey:@"encoding"] isEqualToString:@"lossless"];
hdcd = NO;
streamTimestamp = 0.0;
streamTimeRatio = 1.0;
}
return self;
}
- (AudioChunk *)copy {
AudioChunk *outputChunk = [[AudioChunk alloc] init];
[outputChunk setFormat:format];
[outputChunk setChannelConfig:channelConfig];
if(hdcd) [outputChunk setHDCD];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:streamTimeRatio];
[outputChunk assignData:chunkData];
return outputChunk;
}
static const uint32_t AudioChannelConfigTable[] = {
0,
AudioConfigMono,
AudioConfigStereo,
AudioConfig3Point0,
AudioConfig4Point0,
AudioConfig5Point0,
AudioConfig5Point1,
AudioConfig6Point1,
AudioConfig7Point1,
0,
AudioConfig7Point1 | AudioChannelFrontCenterLeft | AudioChannelFrontCenterRight
};
+ (uint32_t)guessChannelConfig:(uint32_t)channelCount {
if(channelCount == 0) return 0;
if(channelCount > 32) return 0;
int ret = 0;
if(channelCount < (sizeof(AudioChannelConfigTable) / sizeof(AudioChannelConfigTable[0])))
ret = AudioChannelConfigTable[channelCount];
if(!ret) {
ret = (1 << channelCount) - 1;
}
assert([AudioChunk countChannels:ret] == channelCount);
return ret;
}
+ (uint32_t)channelIndexFromConfig:(uint32_t)channelConfig forFlag:(uint32_t)flag {
uint32_t index = 0;
for(uint32_t walk = 0; walk < 32; ++walk) {
uint32_t query = 1 << walk;
if(flag & query) return index;
if(channelConfig & query) ++index;
}
return ~0;
}
+ (uint32_t)extractChannelFlag:(uint32_t)index fromConfig:(uint32_t)channelConfig {
uint32_t toskip = index;
uint32_t flag = 1;
while(flag) {
if(channelConfig & flag) {
if(toskip == 0) break;
toskip--;
}
flag <<= 1;
}
return flag;
}
+ (uint32_t)countChannels:(uint32_t)channelConfig {
return __builtin_popcount(channelConfig);
}
+ (uint32_t)findChannelIndex:(uint32_t)flag {
uint32_t rv = 0;
if((flag & 0xFFFF) == 0) {
rv += 16;
flag >>= 16;
}
if((flag & 0xFF) == 0) {
rv += 8;
flag >>= 8;
}
if((flag & 0xF) == 0) {
rv += 4;
flag >>= 4;
}
if((flag & 0x3) == 0) {
rv += 2;
flag >>= 2;
}
if((flag & 0x1) == 0) {
rv += 1;
flag >>= 1;
}
assert(flag & 1);
return rv;
}
@synthesize lossless;
@synthesize streamTimestamp;
@synthesize streamTimeRatio;
- (AudioStreamBasicDescription)format {
return format;
}
- (void)setFormat:(AudioStreamBasicDescription)informat {
formatAssigned = YES;
format = informat;
channelConfig = [AudioChunk guessChannelConfig:format.mChannelsPerFrame];
}
- (uint32_t)channelConfig {
return channelConfig;
}
- (void)setChannelConfig:(uint32_t)config {
if(formatAssigned) {
if(config == 0) {
config = [AudioChunk guessChannelConfig:format.mChannelsPerFrame];
}
}
channelConfig = config;
}
- (void)assignSamples:(const void *_Nonnull)data frameCount:(size_t)count {
if(formatAssigned) {
const size_t bytesPerPacket = format.mBytesPerPacket;
[chunkData appendBytes:data length:bytesPerPacket * count];
}
}
- (void)assignData:(NSData *)data {
[chunkData appendData:data];
}
- (NSData *)removeSamples:(size_t)frameCount {
if(formatAssigned) {
@autoreleasepool {
const double secondsDuration = (double)(frameCount) / format.mSampleRate;
const double DSDrate = (format.mBitsPerChannel == 1) ? 8.0 : 1.0;
const size_t bytesPerPacket = format.mBytesPerPacket;
const size_t byteCount = bytesPerPacket * frameCount;
NSData *ret = [chunkData subdataWithRange:NSMakeRange(0, byteCount)];
[chunkData replaceBytesInRange:NSMakeRange(0, byteCount) withBytes:NULL length:0];
streamTimestamp += secondsDuration * streamTimeRatio * DSDrate;
return ret;
}
}
return [NSData data];
}
- (BOOL)isEmpty {
return [chunkData length] == 0;
}
- (size_t)frameCount {
if(formatAssigned) {
const size_t bytesPerPacket = format.mBytesPerPacket;
return [chunkData length] / bytesPerPacket;
}
return 0;
}
- (void)setFrameCount:(size_t)count {
if(formatAssigned) {
count *= format.mBytesPerPacket;
size_t currentLength = [chunkData length];
if(count < currentLength) {
[chunkData replaceBytesInRange:NSMakeRange(count, currentLength - count) withBytes:NULL length:0];
}
}
}
- (double)duration {
if(formatAssigned && [chunkData length]) {
const size_t bytesPerPacket = format.mBytesPerPacket;
const double sampleRate = format.mSampleRate;
const double DSDrate = (format.mBitsPerChannel == 1) ? 8.0 : 1.0;
return ((double)([chunkData length] / bytesPerPacket) / sampleRate) * DSDrate;
}
return 0.0;
}
- (double)durationRatioed {
return [self duration] * streamTimeRatio;
}
- (BOOL)isHDCD {
return hdcd;
}
- (void)setHDCD {
hdcd = YES;
}
@end

View file

@ -8,77 +8,41 @@
#import <Cocoa/Cocoa.h>
#import <CogAudio/AudioPlayer.h>
#import <CogAudio/ConverterNode.h>
#import <CogAudio/InputNode.h>
#import "InputNode.h"
#import "AudioPlayer.h"
@interface BufferChain : NSObject {
InputNode *inputNode;
ConverterNode *converterNode;
NSURL *streamURL;
id userInfo;
NSDictionary *rgInfo;
id finalNode; // Final buffer in the chain.
id finalNode; //Final buffer in the chain.
id controller;
}
- (id)initWithController:(id)c;
- (BOOL)buildChain;
- (BOOL)open:(NSURL *)url withOutputFormat:(AudioStreamBasicDescription)outputFormat withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi;
// Used when changing tracks to reuse the same decoder
- (BOOL)openWithInput:(InputNode *)i withOutputFormat:(AudioStreamBasicDescription)outputFormat withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi;
// Used when resetting the decoder on seek
- (BOOL)openWithDecoder:(id<CogDecoder>)decoder
withOutputFormat:(AudioStreamBasicDescription)outputFormat
withUserInfo:(id)userInfo
withRGInfo:(NSDictionary *)rgi;
- (void)buildChain;
- (BOOL)open:(NSURL *)url withOutputFormat:(AudioStreamBasicDescription)outputFormat;
- (void)seek:(double)time;
- (void)launchThreads;
- (InputNode *)inputNode;
- (id)finalNode;
- (id)userInfo;
- (void)setUserInfo:(id)i;
- (NSDictionary *)rgInfo;
- (void)setRGInfo:(NSDictionary *)rgi;
- (NSURL *)streamURL;
- (void)setStreamURL:(NSURL *)url;
- (void)setShouldContinue:(BOOL)s;
- (void)initialBufferFilled:(id)sender;
- (void)initialBufferFilled;
- (BOOL)endOfInputReached;
- (BOOL)setTrack:(NSURL *)track;
- (void)endOfInputReached;
- (BOOL)isRunning;
- (id)controller;
- (ConverterNode *)converter;
- (AudioStreamBasicDescription)inputFormat;
- (uint32_t)inputConfig;
- (double)secondsBuffered;
- (void)sustainHDCD;
- (void)restartPlaybackAtCurrentPosition;
- (void)pushInfo:(NSDictionary *)info;
- (void)setError:(BOOL)status;
@end

View file

@ -7,278 +7,119 @@
//
#import "BufferChain.h"
#import "OutputNode.h"
#import "AudioSource.h"
#import "CoreAudioUtils.h"
#import "DSPDownmixNode.h"
#import "OutputNode.h"
#import "AudioPlayer.h"
#import "Logging.h"
@implementation BufferChain
- (id)initWithController:(id)c {
- (id)initWithController:(id)c
{
self = [super init];
if(self) {
if (self)
{
controller = c;
streamURL = nil;
userInfo = nil;
rgInfo = nil;
inputNode = nil;
converterNode = nil;
}
return self;
}
- (BOOL)buildChain {
// Cut off output source
finalNode = nil;
// Tear them down in reverse
converterNode = nil;
inputNode = nil;
inputNode = [[InputNode alloc] initWithController:self previous:nil];
if(!inputNode) return NO;
converterNode = [[ConverterNode alloc] initWithController:self previous:inputNode];
if(!converterNode) return NO;
finalNode = converterNode;
return YES;
}
- (BOOL)open:(NSURL *)url withOutputFormat:(AudioStreamBasicDescription)outputFormat withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi {
[self setStreamURL:url];
[self setUserInfo:userInfo];
if (![self buildChain]) {
DLog(@"Couldn't build processing chain...");
return NO;
}
id<CogSource> source = [AudioSource audioSourceForURL:url];
DLog(@"Opening: %@", url);
if(!source || ![source open:url]) {
DLog(@"Couldn't open source...");
url = [NSURL URLWithString:@"silence://10"];
source = [AudioSource audioSourceForURL:url];
if(![source open:url])
return NO;
}
if(![inputNode openWithSource:source])
return NO;
if(![self initConverter:outputFormat])
return NO;
[self initDownmixer];
[self setRGInfo:rgi];
// return NO;
return YES;
}
- (BOOL)openWithInput:(InputNode *)i withOutputFormat:(AudioStreamBasicDescription)outputFormat withUserInfo:(id)userInfo withRGInfo:(NSDictionary *)rgi {
DLog(@"New buffer chain!");
[self setUserInfo:userInfo];
if(![self buildChain]) {
DLog(@"Couldn't build processing chain...");
return NO;
}
if(![inputNode openWithDecoder:[i decoder]])
return NO;
if(![self initConverter:outputFormat])
return NO;
[self initDownmixer];
[self setRGInfo:rgi];
return YES;
}
- (BOOL)openWithDecoder:(id<CogDecoder>)decoder
withOutputFormat:(AudioStreamBasicDescription)outputFormat
withUserInfo:(id)userInfo
withRGInfo:(NSDictionary *)rgi;
- (void)buildChain
{
DLog(@"New buffer chain!");
[self setUserInfo:userInfo];
if(![self buildChain]) {
DLog(@"Couldn't build processing chain...");
[inputNode release];
inputNode = [[InputNode alloc] initWithController:self previous:nil];
finalNode = inputNode;
}
- (BOOL)open:(NSURL *)url withOutputFormat:(AudioStreamBasicDescription)outputFormat
{
[self setStreamURL:url];
[self buildChain];
id<CogSource> source = [AudioSource audioSourceForURL:url];
if (![source open:url])
{
NSLog(@"Couldn't open source...");
return NO;
}
if(![inputNode openWithDecoder:decoder])
if (![inputNode openURL:url withSource:source outputFormat:outputFormat])
return NO;
if(![self initConverter:outputFormat])
return NO;
[self initDownmixer];
[self setRGInfo:rgi];
return YES;
}
- (BOOL)initConverter:(AudioStreamBasicDescription)outputFormat {
NSDictionary *properties = [inputNode properties];
DLog(@"Input Properties: %@", properties);
AudioStreamBasicDescription inputFormat = [inputNode nodeFormat];
uint32_t inputChannelConfig = 0;
if([properties valueForKey:@"channelConfig"])
inputChannelConfig = [[properties valueForKey:@"channelConfig"] unsignedIntValue];
outputFormat.mChannelsPerFrame = inputFormat.mChannelsPerFrame;
outputFormat.mBytesPerFrame = ((outputFormat.mBitsPerChannel + 7) / 8) * outputFormat.mChannelsPerFrame;
outputFormat.mBytesPerPacket = outputFormat.mBytesPerFrame * outputFormat.mFramesPerPacket;
if(![converterNode setupWithInputFormat:inputFormat withInputConfig:inputChannelConfig outputFormat:outputFormat isLossless:[[properties valueForKey:@"encoding"] isEqualToString:@"lossless"]])
return NO;
return YES;
}
- (void)initDownmixer {
AudioPlayer * audioPlayer = controller;
OutputNode *outputNode = [audioPlayer output];
DSPDownmixNode *downmixNode = [outputNode downmix];
[downmixNode setOutputFormat:[outputNode deviceFormat] withChannelConfig:[outputNode deviceChannelConfig]];
}
- (void)launchThreads {
DLog(@"Properties: %@", [inputNode properties]);
- (void)launchThreads
{
[inputNode launchThread];
[converterNode launchThread];
}
- (void)setUserInfo:(id)i {
- (void)setUserInfo:(id)i
{
[i retain];
[userInfo release];
userInfo = i;
}
- (id)userInfo {
- (id)userInfo
{
return userInfo;
}
- (void)setRGInfo:(NSDictionary *)rgi {
rgInfo = rgi;
[converterNode setRGInfo:rgi];
- (void)dealloc
{
[userInfo release];
[inputNode release];
[super dealloc];
}
- (NSDictionary *)rgInfo {
return rgInfo;
- (void)seek:(double)time
{
[inputNode seek:time];
}
- (void)dealloc {
[inputNode setShouldContinue:NO];
[[inputNode exitAtTheEndOfTheStream] signal];
[[inputNode writeSemaphore] signal];
if(![inputNode threadExited])
[[inputNode exitAtTheEndOfTheStream] wait]; // wait for decoder to be closed (see InputNode's -(void)process )
DLog(@"Bufferchain dealloc");
- (void)endOfInputReached
{
[controller endOfInputReached:self];
}
- (void)seek:(double)time {
long frame = (long)round(time * [[[inputNode properties] objectForKey:@"sampleRate"] floatValue]);
[inputNode seek:frame];
}
- (BOOL)endOfInputReached {
return [controller endOfInputReached:self];
}
- (BOOL)setTrack:(NSURL *)track {
return [inputNode setTrack:track];
}
- (void)initialBufferFilled:(id)sender {
DLog(@"INITIAL BUFFER FILLED");
- (void)initialBufferFilled
{
[controller launchOutputThread];
}
- (InputNode *)inputNode {
return inputNode;
}
- (id)finalNode {
- (id)finalNode
{
return finalNode;
}
- (NSURL *)streamURL {
- (NSURL *)streamURL
{
return streamURL;
}
- (void)setStreamURL:(NSURL *)url {
- (void)setStreamURL:(NSURL *)url
{
[url retain];
[streamURL release];
streamURL = url;
}
- (void)setShouldContinue:(BOOL)s {
- (void)setShouldContinue:(BOOL)s
{
[inputNode setShouldContinue:s];
[converterNode setShouldContinue:s];
}
- (BOOL)isRunning {
InputNode *node = [self inputNode];
if(nil != node && [node shouldContinue] && ![node endOfStream]) {
return YES;
}
return NO;
}
- (id)controller {
return controller;
}
- (ConverterNode *)converter {
return converterNode;
}
- (AudioStreamBasicDescription)inputFormat {
return [inputNode nodeFormat];
}
- (uint32_t)inputConfig {
return [inputNode nodeChannelConfig];
}
- (double)secondsBuffered {
double duration = 0.0;
Node *node = [self finalNode];
while(node) {
duration += [node secondsBuffered];
node = [node previousNode];
}
return duration;
}
- (void)sustainHDCD {
OutputNode *outputNode = (OutputNode *)[controller output];
[outputNode sustainHDCD];
[controller sustainHDCD];
}
- (void)restartPlaybackAtCurrentPosition {
[controller restartPlaybackAtCurrentPosition];
}
- (void)pushInfo:(NSDictionary *)info {
[controller pushInfo:info toTrack:userInfo];
}
- (void)setError:(BOOL)status {
[controller setError:status];
}
@end

View file

@ -1,86 +0,0 @@
//
// ChunkList.h
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/5/22.
//
#import <CoreAudio/CoreAudio.h>
#import <Foundation/Foundation.h>
#import <CogAudio/AudioChunk.h>
#import <CogAudio/CogSemaphore.h>
NS_ASSUME_NONNULL_BEGIN
#define DSD_DECIMATE 1
@interface ChunkList : NSObject {
NSMutableArray<AudioChunk *> *chunkList;
double listDuration;
double listDurationRatioed;
double maxDuration;
BOOL inAdder;
BOOL inRemover;
BOOL inPeeker;
BOOL inMerger;
BOOL inConverter;
BOOL stopping;
// For format converter
void *inputBuffer;
size_t inputBufferSize;
#if DSD_DECIMATE
void **dsd2pcm;
size_t dsd2pcmCount;
int dsd2pcmLatency;
#endif
BOOL observersRegistered;
BOOL halveDSDVolume;
BOOL enableHDCD;
void *hdcd_decoder;
BOOL formatRead;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription floatFormat;
uint32_t inputChannelConfig;
BOOL inputLossless;
uint8_t *tempData;
size_t tempDataSize;
}
@property(readonly) double listDuration;
@property(readonly) double listDurationRatioed;
@property(readonly) double maxDuration;
- (id)initWithMaximumDuration:(double)duration;
- (void)reset;
- (BOOL)isEmpty;
- (BOOL)isFull;
- (void)addChunk:(AudioChunk *)chunk;
- (AudioChunk *)removeSamples:(size_t)maxFrameCount;
- (AudioChunk *)removeSamplesAsFloat32:(size_t)maxFrameCount;
- (BOOL)peekFormat:(nonnull AudioStreamBasicDescription *)format channelConfig:(nonnull uint32_t *)config;
- (BOOL)peekTimestamp:(nonnull double *)timestamp timeRatio:(nonnull double *)timeRatio;
// Helpers
- (AudioChunk *)removeAndMergeSamples:(size_t)maxFrameCount callBlock:(BOOL(NS_NOESCAPE ^ _Nonnull)(void))block;
- (AudioChunk *)removeAndMergeSamplesAsFloat32:(size_t)maxFrameCount callBlock:(BOOL(NS_NOESCAPE ^ _Nonnull)(void))block;
@end
NS_ASSUME_NONNULL_END

View file

@ -1,977 +0,0 @@
//
// ChunkList.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/5/22.
//
#import <Accelerate/Accelerate.h>
#import "ChunkList.h"
#import "hdcd_decode2.h"
#if !DSD_DECIMATE
#import "dsd2float.h"
#endif
#ifdef _DEBUG
#import "BadSampleCleaner.h"
#endif
static void *kChunkListContext = &kChunkListContext;
#if DSD_DECIMATE
/**
* DSD 2 PCM: Stage 1:
* Decimate by factor 8
* (one byte (8 samples) -> one float sample)
* The bits are processed from least signicifant to most signicicant.
* @author Sebastian Gesemann
*/
/**
* This is the 2nd half of an even order symmetric FIR
* lowpass filter (to be used on a signal sampled at 44100*64 Hz)
* Passband is 0-24 kHz (ripples +/- 0.025 dB)
* Stopband starts at 176.4 kHz (rejection: 170 dB)
* The overall gain is 2.0
*/
#define dsd2pcm_FILTER_COEFFS_COUNT 64
static const float dsd2pcm_FILTER_COEFFS[64] = {
0.09712411121659f, 0.09613438994044f, 0.09417884216316f, 0.09130441727307f,
0.08757947648990f, 0.08309142055179f, 0.07794369263673f, 0.07225228745463f,
0.06614191680338f, 0.05974199351302f, 0.05318259916599f, 0.04659059631228f,
0.04008603356890f, 0.03377897290478f, 0.02776684382775f, 0.02213240062966f,
0.01694232798846f, 0.01224650881275f, 0.00807793792573f, 0.00445323755944f,
0.00137370697215f, -0.00117318019994f, -0.00321193033831f, -0.00477694265140f,
-0.00591028841335f, -0.00665946056286f, -0.00707518873201f, -0.00720940203988f,
-0.00711340642819f, -0.00683632603227f, -0.00642384017266f, -0.00591723006715f,
-0.00535273320457f, -0.00476118922548f, -0.00416794965654f, -0.00359301524813f,
-0.00305135909510f, -0.00255339111833f, -0.00210551956895f, -0.00171076760278f,
-0.00136940723130f, -0.00107957856005f, -0.00083786862365f, -0.00063983084245f,
-0.00048043272086f, -0.00035442550015f, -0.00025663481039f, -0.00018217573430f,
-0.00012659899635f, -0.00008597726991f, -0.00005694188820f, -0.00003668060332f,
-0.00002290670286f, -0.00001380895679f, -0.00000799057558f, -0.00000440385083f,
-0.00000228567089f, -0.00000109760778f, -0.00000047286430f, -0.00000017129652f,
-0.00000004282776f, 0.00000000119422f, 0.00000000949179f, 0.00000000747450f
};
struct dsd2pcm_state {
/*
* This is the 2nd half of an even order symmetric FIR
* lowpass filter (to be used on a signal sampled at 44100*64 Hz)
* Passband is 0-24 kHz (ripples +/- 0.025 dB)
* Stopband starts at 176.4 kHz (rejection: 170 dB)
* The overall gain is 2.0
*/
/* These remain constant for the duration */
int FILT_LOOKUP_PARTS;
float *FILT_LOOKUP_TABLE;
uint8_t *REVERSE_BITS;
int FIFO_LENGTH;
int FIFO_OFS_MASK;
/* These are altered */
int *fifo;
int fpos;
};
static void dsd2pcm_free(void *);
static void dsd2pcm_reset(void *);
static void *dsd2pcm_alloc(void) {
struct dsd2pcm_state *state = (struct dsd2pcm_state *)calloc(1, sizeof(struct dsd2pcm_state));
float *FILT_LOOKUP_TABLE;
double *temp;
uint8_t *REVERSE_BITS;
if(!state)
return NULL;
state->FILT_LOOKUP_PARTS = (dsd2pcm_FILTER_COEFFS_COUNT + 7) / 8;
const int FILT_LOOKUP_PARTS = state->FILT_LOOKUP_PARTS;
// The current 128 tap FIR leads to an 8 KB lookup table
state->FILT_LOOKUP_TABLE = (float *)calloc(sizeof(float), FILT_LOOKUP_PARTS << 8);
if(!state->FILT_LOOKUP_TABLE)
goto fail;
FILT_LOOKUP_TABLE = state->FILT_LOOKUP_TABLE;
temp = (double *)calloc(sizeof(double), 0x100);
if(!temp)
goto fail;
for(int part = 0, sofs = 0, dofs = 0; part < FILT_LOOKUP_PARTS;) {
memset(temp, 0, 0x100 * sizeof(double));
for(int bit = 0, bitmask = 0x80; bit < 8 && sofs + bit < dsd2pcm_FILTER_COEFFS_COUNT;) {
double coeff = dsd2pcm_FILTER_COEFFS[sofs + bit];
for(int bite = 0; bite < 0x100; bite++) {
if((bite & bitmask) == 0) {
temp[bite] -= coeff;
} else {
temp[bite] += coeff;
}
}
bit++;
bitmask >>= 1;
}
for(int s = 0; s < 0x100;) {
FILT_LOOKUP_TABLE[dofs++] = (float)temp[s++];
}
part++;
sofs += 8;
}
free(temp);
{ // calculate FIFO stuff
int k = 1;
while(k < FILT_LOOKUP_PARTS * 2) k <<= 1;
state->FIFO_LENGTH = k;
state->FIFO_OFS_MASK = k - 1;
}
state->REVERSE_BITS = (uint8_t *)calloc(1, 0x100);
if(!state->REVERSE_BITS)
goto fail;
REVERSE_BITS = state->REVERSE_BITS;
for(int i = 0, j = 0; i < 0x100; i++) {
REVERSE_BITS[i] = (uint8_t)j;
// "reverse-increment" of j
for(int bitmask = 0x80;;) {
if(((j ^= bitmask) & bitmask) != 0) break;
if(bitmask == 1) break;
bitmask >>= 1;
}
}
state->fifo = (int *)calloc(sizeof(int), state->FIFO_LENGTH);
if(!state->fifo)
goto fail;
dsd2pcm_reset(state);
return (void *)state;
fail:
dsd2pcm_free(state);
return NULL;
}
static void *dsd2pcm_dup(void *_state) {
struct dsd2pcm_state *state = (struct dsd2pcm_state *)_state;
if(state) {
struct dsd2pcm_state *newstate = (struct dsd2pcm_state *)calloc(1, sizeof(struct dsd2pcm_state));
if(newstate) {
newstate->FILT_LOOKUP_PARTS = state->FILT_LOOKUP_PARTS;
newstate->FIFO_LENGTH = state->FIFO_LENGTH;
newstate->FIFO_OFS_MASK = state->FIFO_OFS_MASK;
newstate->fpos = state->fpos;
newstate->FILT_LOOKUP_TABLE = (float *)calloc(sizeof(float), state->FILT_LOOKUP_PARTS << 8);
if(!newstate->FILT_LOOKUP_TABLE)
goto fail;
memcpy(newstate->FILT_LOOKUP_TABLE, state->FILT_LOOKUP_TABLE, sizeof(float) * (state->FILT_LOOKUP_PARTS << 8));
newstate->REVERSE_BITS = (uint8_t *)calloc(1, 0x100);
if(!newstate->REVERSE_BITS)
goto fail;
memcpy(newstate->REVERSE_BITS, state->REVERSE_BITS, 0x100);
newstate->fifo = (int *)calloc(sizeof(int), state->FIFO_LENGTH);
if(!newstate->fifo)
goto fail;
memcpy(newstate->fifo, state->fifo, sizeof(int) * state->FIFO_LENGTH);
return (void *)newstate;
}
fail:
dsd2pcm_free(newstate);
return NULL;
}
return NULL;
}
static void dsd2pcm_free(void *_state) {
struct dsd2pcm_state *state = (struct dsd2pcm_state *)_state;
if(state) {
free(state->fifo);
free(state->REVERSE_BITS);
free(state->FILT_LOOKUP_TABLE);
free(state);
}
}
static void dsd2pcm_reset(void *_state) {
struct dsd2pcm_state *state = (struct dsd2pcm_state *)_state;
const int FILT_LOOKUP_PARTS = state->FILT_LOOKUP_PARTS;
int *fifo = state->fifo;
for(int i = 0; i < FILT_LOOKUP_PARTS; i++) {
fifo[i] = 0x55;
fifo[i + FILT_LOOKUP_PARTS] = 0xAA;
}
state->fpos = FILT_LOOKUP_PARTS;
}
static int dsd2pcm_latency(void *_state) {
struct dsd2pcm_state *state = (struct dsd2pcm_state *)_state;
if(state)
return state->FILT_LOOKUP_PARTS * 8;
else
return 0;
}
static void dsd2pcm_process(void *_state, const uint8_t *src, size_t sofs, size_t sinc, float *dest, size_t dofs, size_t dinc, size_t len) {
struct dsd2pcm_state *state = (struct dsd2pcm_state *)_state;
int bite1, bite2, temp;
float sample;
int *fifo = state->fifo;
const uint8_t *REVERSE_BITS = state->REVERSE_BITS;
const float *FILT_LOOKUP_TABLE = state->FILT_LOOKUP_TABLE;
const int FILT_LOOKUP_PARTS = state->FILT_LOOKUP_PARTS;
const int FIFO_OFS_MASK = state->FIFO_OFS_MASK;
int fpos = state->fpos;
while(len > 0) {
fifo[fpos] = REVERSE_BITS[fifo[fpos]] & 0xFF;
fifo[(fpos + FILT_LOOKUP_PARTS) & FIFO_OFS_MASK] = src[sofs] & 0xFF;
sofs += sinc;
temp = (fpos + 1) & FIFO_OFS_MASK;
sample = 0;
for(int k = 0, lofs = 0; k < FILT_LOOKUP_PARTS;) {
bite1 = fifo[(fpos - k) & FIFO_OFS_MASK];
bite2 = fifo[(temp + k) & FIFO_OFS_MASK];
sample += FILT_LOOKUP_TABLE[lofs + bite1] + FILT_LOOKUP_TABLE[lofs + bite2];
k++;
lofs += 0x100;
}
fpos = temp;
dest[dofs] = sample;
dofs += dinc;
len--;
}
state->fpos = fpos;
}
static void convert_dsd_to_f32(float *output, const uint8_t *input, size_t count, size_t channels, void **dsd2pcm) {
for(size_t channel = 0; channel < channels; ++channel) {
dsd2pcm_process(dsd2pcm[channel], input, channel, channels, output, channel, channels, count);
}
}
#else
static void convert_dsd_to_f32(float *output, const uint8_t *input, size_t count, size_t channels) {
const uint8_t *iptr = input;
float *optr = output;
for(size_t index = 0; index < count; ++index) {
for(size_t channel = 0; channel < channels; ++channel) {
uint8_t sample = *iptr++;
cblas_scopy(8, &dsd2float[sample][0], 1, optr++, (int)channels);
}
optr += channels * 7;
}
}
#endif
static void convert_u8_to_s16(int16_t *output, const uint8_t *input, size_t count) {
for(size_t i = 0; i < count; ++i) {
uint16_t sample = (input[i] << 8) | input[i];
sample ^= 0x8080;
output[i] = (int16_t)(sample);
}
}
static void convert_s8_to_s16(int16_t *output, const uint8_t *input, size_t count) {
for(size_t i = 0; i < count; ++i) {
uint16_t sample = (input[i] << 8) | input[i];
output[i] = (int16_t)(sample);
}
}
static void convert_u16_to_s16(int16_t *buffer, size_t count) {
for(size_t i = 0; i < count; ++i) {
buffer[i] ^= 0x8000;
}
}
static void convert_s16_to_hdcd_input(int32_t *output, const int16_t *input, size_t count) {
for(size_t i = 0; i < count; ++i) {
output[i] = input[i];
}
}
static void convert_s24_to_s32(int32_t *output, const uint8_t *input, size_t count) {
for(size_t i = 0; i < count; ++i) {
int32_t sample = (input[i * 3] << 8) | (input[i * 3 + 1] << 16) | (input[i * 3 + 2] << 24);
output[i] = sample;
}
}
static void convert_u24_to_s32(int32_t *output, const uint8_t *input, size_t count) {
for(size_t i = 0; i < count; ++i) {
int32_t sample = (input[i * 3] << 8) | (input[i * 3 + 1] << 16) | (input[i * 3 + 2] << 24);
output[i] = sample ^ 0x80000000;
}
}
static void convert_u32_to_s32(int32_t *buffer, size_t count) {
for(size_t i = 0; i < count; ++i) {
buffer[i] ^= 0x80000000;
}
}
static void convert_f64_to_f32(float *output, const double *input, size_t count) {
vDSP_vdpsp(input, 1, output, 1, count);
}
static void convert_be_to_le(uint8_t *buffer, size_t bitsPerSample, size_t bytes) {
size_t i;
bitsPerSample = (bitsPerSample + 7) / 8;
switch(bitsPerSample) {
case 2:
for(i = 0; i < bytes; i += 2) {
*(int16_t *)buffer = __builtin_bswap16(*(int16_t *)buffer);
buffer += 2;
}
break;
case 3: {
union {
vDSP_int24 int24;
uint32_t int32;
} intval;
intval.int32 = 0;
for(i = 0; i < bytes; i += 3) {
intval.int24 = *(vDSP_int24 *)buffer;
intval.int32 = __builtin_bswap32(intval.int32 << 8);
*(vDSP_int24 *)buffer = intval.int24;
buffer += 3;
}
} break;
case 4:
for(i = 0; i < bytes; i += 4) {
*(uint32_t *)buffer = __builtin_bswap32(*(uint32_t *)buffer);
buffer += 4;
}
break;
case 8:
for(i = 0; i < bytes; i += 8) {
*(uint64_t *)buffer = __builtin_bswap64(*(uint64_t *)buffer);
buffer += 8;
}
break;
}
}
@implementation ChunkList
@synthesize listDuration;
@synthesize listDurationRatioed;
@synthesize maxDuration;
- (id)initWithMaximumDuration:(double)duration {
self = [super init];
if(self) {
chunkList = [[NSMutableArray alloc] init];
listDuration = 0.0;
listDurationRatioed = 0.0;
maxDuration = duration;
inAdder = NO;
inRemover = NO;
inPeeker = NO;
inMerger = NO;
inConverter = NO;
stopping = NO;
formatRead = NO;
inputBuffer = NULL;
inputBufferSize = 0;
#if DSD_DECIMATE
dsd2pcm = NULL;
dsd2pcmCount = 0;
dsd2pcmLatency = 0;
#endif
observersRegistered = NO;
}
return self;
}
- (void)addObservers {
if(!observersRegistered) {
halveDSDVolume = NO;
enableHDCD = NO;
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.halveDSDVolume" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kChunkListContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.enableHDCD" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kChunkListContext];
observersRegistered = YES;
}
}
- (void)removeObservers {
if(observersRegistered) {
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.halveDSDVolume" context:kChunkListContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.enableHDCD" context:kChunkListContext];
observersRegistered = NO;
}
}
- (void)dealloc {
stopping = YES;
while(inAdder || inRemover || inPeeker || inMerger || inConverter) {
usleep(500);
}
[self removeObservers];
if(hdcd_decoder) {
free(hdcd_decoder);
hdcd_decoder = NULL;
}
#if DSD_DECIMATE
if(dsd2pcm && dsd2pcmCount) {
for(size_t i = 0; i < dsd2pcmCount; ++i) {
dsd2pcm_free(dsd2pcm[i]);
dsd2pcm[i] = NULL;
}
free(dsd2pcm);
dsd2pcm = NULL;
}
#endif
if(tempData) {
free(tempData);
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if(context != kChunkListContext) {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
if([keyPath isEqualToString:@"values.halveDSDVolume"]) {
halveDSDVolume = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] boolForKey:@"halveDSDVolume"];
} else if([keyPath isEqualToString:@"values.enableHDCD"]) {
enableHDCD = [[[NSUserDefaultsController sharedUserDefaultsController] defaults] boolForKey:@"enableHDCD"];
}
}
- (void)reset {
@synchronized(chunkList) {
[chunkList removeAllObjects];
listDuration = 0.0;
listDurationRatioed = 0.0;
}
}
- (BOOL)isEmpty {
@synchronized(chunkList) {
return [chunkList count] == 0;
}
}
- (BOOL)isFull {
@synchronized (chunkList) {
return (maxDuration - listDuration) < 0.001;
}
}
- (void)addChunk:(AudioChunk *)chunk {
if(stopping) return;
inAdder = YES;
const double chunkDuration = [chunk duration];
const double chunkDurationRatioed = [chunk durationRatioed];
@synchronized(chunkList) {
[chunkList addObject:chunk];
listDuration += chunkDuration;
listDurationRatioed += chunkDurationRatioed;
}
inAdder = NO;
}
- (AudioChunk *)removeSamples:(size_t)maxFrameCount {
if(stopping) {
return [[AudioChunk alloc] init];
}
@synchronized(chunkList) {
inRemover = YES;
if(![chunkList count]) {
inRemover = NO;
return [[AudioChunk alloc] init];
}
AudioChunk *chunk = [chunkList objectAtIndex:0];
if([chunk frameCount] <= maxFrameCount) {
[chunkList removeObjectAtIndex:0];
listDuration -= [chunk duration];
listDurationRatioed -= [chunk durationRatioed];
inRemover = NO;
return chunk;
}
double streamTimestamp = [chunk streamTimestamp];
NSData *removedData = [chunk removeSamples:maxFrameCount];
AudioChunk *ret = [[AudioChunk alloc] init];
[ret setFormat:[chunk format]];
[ret setChannelConfig:[chunk channelConfig]];
[ret setLossless:[chunk lossless]];
[ret setStreamTimestamp:streamTimestamp];
[ret setStreamTimeRatio:[chunk streamTimeRatio]];
[ret assignData:removedData];
listDuration -= [ret duration];
listDurationRatioed -= [ret durationRatioed];
inRemover = NO;
return ret;
}
}
- (AudioChunk *)removeSamplesAsFloat32:(size_t)maxFrameCount {
if(stopping) {
return [[AudioChunk alloc] init];
}
@synchronized (chunkList) {
inRemover = YES;
if(![chunkList count]) {
inRemover = NO;
return [[AudioChunk alloc] init];
}
AudioChunk *chunk = [chunkList objectAtIndex:0];
#if !DSD_DECIMATE
AudioStreamBasicDescription asbd = [chunk format];
if(asbd.mBitsPerChannel == 1) {
maxFrameCount /= 8;
}
#endif
if([chunk frameCount] <= maxFrameCount) {
[chunkList removeObjectAtIndex:0];
listDuration -= [chunk duration];
listDurationRatioed -= [chunk durationRatioed];
inRemover = NO;
return [self convertChunk:chunk];
}
double streamTimestamp = [chunk streamTimestamp];
NSData *removedData = [chunk removeSamples:maxFrameCount];
AudioChunk *ret = [[AudioChunk alloc] init];
[ret setFormat:[chunk format]];
[ret setChannelConfig:[chunk channelConfig]];
[ret setLossless:[chunk lossless]];
[ret setStreamTimestamp:streamTimestamp];
[ret setStreamTimeRatio:[chunk streamTimeRatio]];
[ret assignData:removedData];
listDuration -= [ret duration];
listDurationRatioed -= [ret durationRatioed];
inRemover = NO;
return [self convertChunk:ret];
}
}
- (AudioChunk *)removeAndMergeSamples:(size_t)maxFrameCount callBlock:(BOOL(NS_NOESCAPE ^ _Nonnull)(void))block {
if(stopping) {
return [[AudioChunk alloc] init];
}
inMerger = YES;
BOOL formatSet = NO;
AudioStreamBasicDescription currentFormat;
uint32_t currentChannelConfig = 0;
double streamTimestamp = 0.0;
double streamTimeRatio = 1.0;
BOOL blocked = NO;
while(![self peekTimestamp:&streamTimestamp timeRatio:&streamTimeRatio]) {
if((blocked = block())) {
break;
}
}
if(blocked) {
inMerger = NO;
return [[AudioChunk alloc] init];
}
AudioChunk *chunk;
size_t totalFrameCount = 0;
AudioChunk *outputChunk = [[AudioChunk alloc] init];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:streamTimeRatio];
while(!stopping && totalFrameCount < maxFrameCount) {
AudioStreamBasicDescription newFormat;
uint32_t newChannelConfig;
if(![self peekFormat:&newFormat channelConfig:&newChannelConfig]) {
if(block()) {
break;
}
continue;
}
if(formatSet &&
(memcmp(&newFormat, &currentFormat, sizeof(newFormat)) != 0 ||
newChannelConfig != currentChannelConfig)) {
break;
} else if(!formatSet) {
[outputChunk setFormat:newFormat];
[outputChunk setChannelConfig:newChannelConfig];
currentFormat = newFormat;
currentChannelConfig = newChannelConfig;
formatSet = YES;
}
chunk = [self removeSamples:maxFrameCount - totalFrameCount];
if(!chunk || ![chunk frameCount]) {
if(block()) {
break;
}
continue;
}
if([chunk isHDCD]) {
[outputChunk setHDCD];
}
size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount];
[outputChunk assignData:sampleData];
totalFrameCount += frameCount;
}
if(!totalFrameCount) {
inMerger = NO;
return [[AudioChunk alloc] init];
}
inMerger = NO;
return outputChunk;
}
- (AudioChunk *)removeAndMergeSamplesAsFloat32:(size_t)maxFrameCount callBlock:(BOOL(NS_NOESCAPE ^ _Nonnull)(void))block {
AudioChunk *ret = [self removeAndMergeSamples:maxFrameCount callBlock:block];
return [self convertChunk:ret];
}
- (AudioChunk *)convertChunk:(AudioChunk *)inChunk {
if(stopping) return [[AudioChunk alloc] init];
inConverter = YES;
AudioStreamBasicDescription chunkFormat = [inChunk format];
if(![inChunk frameCount] ||
(chunkFormat.mFormatFlags == kAudioFormatFlagsNativeFloatPacked &&
chunkFormat.mBitsPerChannel == 32)) {
inConverter = NO;
return inChunk;
}
uint32_t chunkConfig = [inChunk channelConfig];
BOOL chunkLossless = [inChunk lossless];
if(!formatRead || memcmp(&chunkFormat, &inputFormat, sizeof(chunkFormat)) != 0 ||
chunkConfig != inputChannelConfig || chunkLossless != inputLossless) {
formatRead = YES;
inputFormat = chunkFormat;
inputChannelConfig = chunkConfig;
inputLossless = chunkLossless;
BOOL isFloat = !!(inputFormat.mFormatFlags & kAudioFormatFlagIsFloat);
if((!isFloat && !(inputFormat.mBitsPerChannel >= 1 && inputFormat.mBitsPerChannel <= 32)) || (isFloat && !(inputFormat.mBitsPerChannel == 32 || inputFormat.mBitsPerChannel == 64))) {
inConverter = NO;
return [[AudioChunk alloc] init];
}
// These are really placeholders, as we're doing everything internally now
if(inputLossless &&
inputFormat.mBitsPerChannel == 16 &&
inputFormat.mChannelsPerFrame == 2 &&
inputFormat.mSampleRate == 44100) {
// possibly HDCD, run through decoder
[self addObservers];
if(hdcd_decoder) {
free(hdcd_decoder);
hdcd_decoder = NULL;
}
hdcd_decoder = calloc(1, sizeof(hdcd_state_stereo_t));
hdcd_reset_stereo((hdcd_state_stereo_t *)hdcd_decoder, 44100);
}
floatFormat = inputFormat;
floatFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked;
floatFormat.mBitsPerChannel = 32;
floatFormat.mBytesPerFrame = (32 / 8) * floatFormat.mChannelsPerFrame;
floatFormat.mBytesPerPacket = floatFormat.mBytesPerFrame * floatFormat.mFramesPerPacket;
#if DSD_DECIMATE
if(inputFormat.mBitsPerChannel == 1) {
// Decimate this for speed
floatFormat.mSampleRate *= 1.0 / 8.0;
if(dsd2pcm && dsd2pcmCount) {
for(size_t i = 0; i < dsd2pcmCount; ++i) {
dsd2pcm_free(dsd2pcm[i]);
dsd2pcm[i] = NULL;
}
free(dsd2pcm);
dsd2pcm = NULL;
}
dsd2pcmCount = floatFormat.mChannelsPerFrame;
dsd2pcm = (void **)calloc(dsd2pcmCount, sizeof(void *));
dsd2pcm[0] = dsd2pcm_alloc();
dsd2pcmLatency = dsd2pcm_latency(dsd2pcm[0]);
for(size_t i = 1; i < dsd2pcmCount; ++i) {
dsd2pcm[i] = dsd2pcm_dup(dsd2pcm[0]);
}
}
#endif
}
NSUInteger samplesRead = [inChunk frameCount];
if(!samplesRead) {
inConverter = NO;
return [[AudioChunk alloc] init];
}
BOOL isFloat = !!(inputFormat.mFormatFlags & kAudioFormatFlagIsFloat);
BOOL isUnsigned = !isFloat && !(inputFormat.mFormatFlags & kAudioFormatFlagIsSignedInteger);
size_t bitsPerSample = inputFormat.mBitsPerChannel;
BOOL isBigEndian = !!(inputFormat.mFormatFlags & kAudioFormatFlagIsBigEndian);
double streamTimestamp = [inChunk streamTimestamp];
NSData *inputData = [inChunk removeSamples:samplesRead];
#if DSD_DECIMATE
const size_t sizeFactor = 3;
#else
const size_t sizeFactor = (bitsPerSample == 1) ? 9 : 3;
#endif
size_t newSize = samplesRead * floatFormat.mBytesPerPacket * sizeFactor + 64;
if(!tempData || tempDataSize < newSize)
tempData = realloc(tempData, tempDataSize = newSize); // Either two buffers plus padding, and/or double precision in case of endian flip
// double buffer system, with alignment
const size_t buffer_adder_base = (samplesRead * floatFormat.mBytesPerPacket + 31) & ~31;
NSUInteger bytesReadFromInput = samplesRead * inputFormat.mBytesPerPacket;
uint8_t *inputBuffer = (uint8_t *)[inputData bytes];
BOOL inputChanged = NO;
BOOL hdcdSustained = NO;
if(bytesReadFromInput && isBigEndian) {
// Time for endian swap!
memcpy(&tempData[0], [inputData bytes], bytesReadFromInput);
convert_be_to_le((uint8_t *)(&tempData[0]), inputFormat.mBitsPerChannel, bytesReadFromInput);
inputBuffer = &tempData[0];
inputChanged = YES;
}
if(bytesReadFromInput && isFloat && bitsPerSample == 64) {
// Time for precision loss from weird inputs
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base * 2 : 0;
samplesRead = bytesReadFromInput / sizeof(double);
convert_f64_to_f32((float *)(&tempData[buffer_adder]), (const double *)inputBuffer, samplesRead);
bytesReadFromInput = samplesRead * sizeof(float);
inputBuffer = &tempData[buffer_adder];
inputChanged = YES;
bitsPerSample = 32;
}
if(bytesReadFromInput && !isFloat) {
float gain = 1.0;
if(bitsPerSample == 1) {
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
samplesRead = bytesReadFromInput / inputFormat.mBytesPerPacket;
convert_dsd_to_f32((float *)(&tempData[buffer_adder]), (const uint8_t *)inputBuffer, samplesRead, inputFormat.mChannelsPerFrame
#if DSD_DECIMATE
,
dsd2pcm
#endif
);
#if !DSD_DECIMATE
samplesRead *= 8;
#endif
bitsPerSample = 32;
bytesReadFromInput = samplesRead * floatFormat.mBytesPerPacket;
isFloat = YES;
inputBuffer = &tempData[buffer_adder];
inputChanged = YES;
[self addObservers];
#if DSD_DECIMATE
if(halveDSDVolume) {
float scaleFactor = 2.0f;
vDSP_vsdiv((float *)inputBuffer, 1, &scaleFactor, (float *)inputBuffer, 1, bytesReadFromInput / sizeof(float));
}
#else
if(!halveDSDVolume) {
float scaleFactor = 2.0f;
vDSP_vsmul((float *)inputBuffer, 1, &scaleFactor, (float *)inputBuffer, 1, bytesReadFromInput / sizeof(float));
}
#endif
} else if(bitsPerSample <= 8) {
samplesRead = bytesReadFromInput;
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
if(!isUnsigned)
convert_s8_to_s16((int16_t *)(&tempData[buffer_adder]), (const uint8_t *)inputBuffer, samplesRead);
else
convert_u8_to_s16((int16_t *)(&tempData[buffer_adder]), (const uint8_t *)inputBuffer, samplesRead);
bitsPerSample = 16;
bytesReadFromInput = samplesRead * 2;
isUnsigned = NO;
inputBuffer = &tempData[buffer_adder];
inputChanged = YES;
}
if(hdcd_decoder) { // implied bits per sample is 16, produces 32 bit int scale
samplesRead = bytesReadFromInput / 2;
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
if(isUnsigned) {
if(!inputChanged) {
memcpy(&tempData[buffer_adder], inputBuffer, samplesRead * 2);
inputBuffer = &tempData[buffer_adder];
inputChanged = YES;
}
convert_u16_to_s16((int16_t *)inputBuffer, samplesRead);
isUnsigned = NO;
}
const size_t buffer_adder2 = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
convert_s16_to_hdcd_input((int32_t *)(&tempData[buffer_adder2]), (int16_t *)inputBuffer, samplesRead);
hdcd_process_stereo((hdcd_state_stereo_t *)hdcd_decoder, (int32_t *)(&tempData[buffer_adder2]), (int)(samplesRead / 2));
if(((hdcd_state_stereo_t *)hdcd_decoder)->channel[0].sustain &&
((hdcd_state_stereo_t *)hdcd_decoder)->channel[1].sustain) {
hdcdSustained = YES;
}
if(enableHDCD) {
gain = 2.0;
bitsPerSample = 32;
bytesReadFromInput = samplesRead * 4;
isUnsigned = NO;
inputBuffer = &tempData[buffer_adder2];
inputChanged = YES;
} else {
// Discard the output of the decoder and process again
goto process16bit;
}
} else if(bitsPerSample <= 16) {
process16bit:
samplesRead = bytesReadFromInput / 2;
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
if(isUnsigned) {
if(!inputChanged) {
memcpy(&tempData[buffer_adder], inputBuffer, samplesRead * 2);
inputBuffer = &tempData[buffer_adder];
inputChanged = YES;
}
convert_u16_to_s16((int16_t *)inputBuffer, samplesRead);
}
const size_t buffer_adder2 = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
vDSP_vflt16((const short *)inputBuffer, 1, (float *)(&tempData[buffer_adder2]), 1, samplesRead);
float scale = 1ULL << 15;
vDSP_vsdiv((const float *)(&tempData[buffer_adder2]), 1, &scale, (float *)(&tempData[buffer_adder2]), 1, samplesRead);
bitsPerSample = 32;
bytesReadFromInput = samplesRead * sizeof(float);
isUnsigned = NO;
isFloat = YES;
inputBuffer = &tempData[buffer_adder2];
inputChanged = YES;
} else if(bitsPerSample <= 24) {
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0;
samplesRead = bytesReadFromInput / 3;
if(isUnsigned)
convert_u24_to_s32((int32_t *)(&tempData[buffer_adder]), (uint8_t *)inputBuffer, samplesRead);
else
convert_s24_to_s32((int32_t *)(&tempData[buffer_adder]), (uint8_t *)inputBuffer, samplesRead);
bitsPerSample = 32;
bytesReadFromInput = samplesRead * 4;
isUnsigned = NO;
inputBuffer = &tempData[buffer_adder];
inputChanged = YES;
}
if(!isFloat && bitsPerSample <= 32) {
samplesRead = bytesReadFromInput / 4;
if(isUnsigned) {
if(!inputChanged) {
memcpy(&tempData[0], inputBuffer, bytesReadFromInput);
inputBuffer = &tempData[0];
}
convert_u32_to_s32((int32_t *)inputBuffer, samplesRead);
}
const size_t buffer_adder = (inputBuffer == &tempData[0]) ? buffer_adder_base : 0; // vDSP functions expect aligned to four elements
vDSP_vflt32((const int *)inputBuffer, 1, (float *)(&tempData[buffer_adder]), 1, samplesRead);
float scale = (1ULL << 31) / gain;
vDSP_vsdiv((const float *)(&tempData[buffer_adder]), 1, &scale, (float *)(&tempData[buffer_adder]), 1, samplesRead);
bitsPerSample = 32;
bytesReadFromInput = samplesRead * sizeof(float);
isUnsigned = NO;
isFloat = YES;
inputBuffer = &tempData[buffer_adder];
}
#ifdef _DEBUG
[BadSampleCleaner cleanSamples:(float *)inputBuffer
amount:bytesReadFromInput / sizeof(float)
location:@"post int to float conversion"];
#endif
}
AudioChunk *outChunk = [[AudioChunk alloc] init];
[outChunk setFormat:floatFormat];
[outChunk setChannelConfig:inputChannelConfig];
[outChunk setLossless:inputLossless];
[outChunk setStreamTimestamp:streamTimestamp];
[outChunk setStreamTimeRatio:[inChunk streamTimeRatio]];
if(hdcdSustained) [outChunk setHDCD];
[outChunk assignSamples:inputBuffer frameCount:bytesReadFromInput / floatFormat.mBytesPerPacket];
inConverter = NO;
return outChunk;
}
- (BOOL)peekFormat:(AudioStreamBasicDescription *)format channelConfig:(uint32_t *)config {
if(stopping) return NO;
inPeeker = YES;
@synchronized(chunkList) {
if([chunkList count]) {
AudioChunk *chunk = [chunkList objectAtIndex:0];
*format = [chunk format];
*config = [chunk channelConfig];
inPeeker = NO;
return YES;
}
}
inPeeker = NO;
return NO;
}
- (BOOL)peekTimestamp:(double *)timestamp timeRatio:(double *)timeRatio {
if(stopping) return NO;
inPeeker = YES;
@synchronized (chunkList) {
if([chunkList count]) {
AudioChunk *chunk = [chunkList objectAtIndex:0];
*timestamp = [chunk streamTimestamp];
*timeRatio = [chunk streamTimeRatio];
inPeeker = NO;
return YES;
}
}
*timestamp = 0.0;
*timeRatio = 1.0;
inPeeker = NO;
return NO;
}
@end

44
Audio/Chain/Converter.h Normal file
View file

@ -0,0 +1,44 @@
//
// ConverterNode.h
// Cog
//
// Created by Vincent Spader on 8/2/05.
// Copyright 2005 Vincent Spader. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import <CoreAudio/AudioHardware.h>
#import <AudioToolbox/AudioToolbox.h>
#import <AudioUnit/AudioUnit.h>
@interface Converter : NSObject
{
AudioConverterRef converter;
void *outputBuffer;
int outputBufferSize;
//Temporary for callback use
void *inputBuffer;
int inputBufferSize;
BOOL needsReset;
//end
int outputSize;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription outputFormat;
}
- (void *)outputBuffer;
- (int)outputBufferSize;
- (void)setupWithInputFormat:(AudioStreamBasicDescription)inputFormat outputFormat:(AudioStreamBasicDescription)outputFormat;
- (void)cleanUp;
- (void)reset;
//Returns the amount actually read from input
- (int)convert:(void *)input amount:(int)inputSize;
@end

167
Audio/Chain/Converter.m Normal file
View file

@ -0,0 +1,167 @@
//
// ConverterNode.m
// Cog
//
// Created by Vincent Spader on 8/2/05.
// Copyright 2005 Vincent Spader. All rights reserved.
//
#import "Converter.h"
#import "Node.h"
void PrintStreamDesc (AudioStreamBasicDescription *inDesc)
{
if (!inDesc) {
printf ("Can't print a NULL desc!\n");
return;
}
printf ("- - - - - - - - - - - - - - - - - - - -\n");
printf (" Sample Rate:%f\n", inDesc->mSampleRate);
printf (" Format ID:%s\n", (char*)&inDesc->mFormatID);
printf (" Format Flags:%lX\n", inDesc->mFormatFlags);
printf (" Bytes per Packet:%ld\n", inDesc->mBytesPerPacket);
printf (" Frames per Packet:%ld\n", inDesc->mFramesPerPacket);
printf (" Bytes per Frame:%ld\n", inDesc->mBytesPerFrame);
printf (" Channels per Frame:%ld\n", inDesc->mChannelsPerFrame);
printf (" Bits per Channel:%ld\n", inDesc->mBitsPerChannel);
printf ("- - - - - - - - - - - - - - - - - - - -\n");
}
@implementation Converter
//called from the complexfill when the audio is converted...good clean fun
static OSStatus ACInputProc(AudioConverterRef inAudioConverter, UInt32* ioNumberDataPackets, AudioBufferList* ioData, AudioStreamPacketDescription** outDataPacketDescription, void* inUserData)
{
Converter *converter = (Converter *)inUserData;
OSStatus err = noErr;
if (converter->inputBufferSize > 0) {
int amountConverted = *ioNumberDataPackets * converter->inputFormat.mBytesPerPacket;
if (amountConverted > converter->inputBufferSize) {
amountConverted = converter->inputBufferSize;
}
ioData->mBuffers[0].mData = converter->inputBuffer;
ioData->mBuffers[0].mDataByteSize = amountConverted;
ioData->mBuffers[0].mNumberChannels = (converter->inputFormat.mChannelsPerFrame);
ioData->mNumberBuffers = 1;
*ioNumberDataPackets = amountConverted / converter->inputFormat.mBytesPerPacket;
converter->inputBufferSize -= amountConverted;
converter->inputBuffer = ((char *)converter->inputBuffer) + amountConverted;
}
else {
ioData->mBuffers[0].mData = NULL;
ioData->mBuffers[0].mDataByteSize = 0;
ioData->mNumberBuffers = 1;
*ioNumberDataPackets = 0;
//Reset the converter's internal bufferrs.
converter->needsReset = YES;
}
return err;
}
- (void)reset
{
AudioConverterReset(converter);
}
- (int)convert:(void *)input amount:(int)inputSize
{
AudioBufferList ioData;
UInt32 ioNumberFrames;
if (inputSize <= 0) {
outputBufferSize = inputSize;
return inputSize;
}
OSStatus err;
needsReset = NO;
ioNumberFrames = inputSize/inputFormat.mBytesPerFrame;
ioData.mBuffers[0].mData = outputBuffer;
ioData.mBuffers[0].mDataByteSize = outputSize;
ioData.mBuffers[0].mNumberChannels = outputFormat.mChannelsPerFrame;
ioData.mNumberBuffers = 1;
inputBuffer = input;
inputBufferSize = inputSize;
err = AudioConverterFillComplexBuffer(converter, ACInputProc, self, &ioNumberFrames, &ioData, NULL);
if (err != noErr || needsReset) //It returns insz at EOS at times...so run it again to make sure all data is converted
{
[self reset];
}
outputBufferSize = ioData.mBuffers[0].mDataByteSize;
return inputSize - inputBufferSize;
}
- (void)setupWithInputFormat:(AudioStreamBasicDescription)inf outputFormat:(AudioStreamBasicDescription)outf
{
//Make the converter
OSStatus stat = noErr;
inputFormat = inf;
outputFormat = outf;
stat = AudioConverterNew ( &inputFormat, &outputFormat, &converter);
if (stat != noErr)
{
NSLog(@"Error creating converter %i", stat);
}
if (inputFormat.mChannelsPerFrame == 1)
{
SInt32 channelMap[2] = { 0, 0 };
stat = AudioConverterSetProperty(converter,kAudioConverterChannelMap,sizeof(channelMap),channelMap);
if (stat != noErr)
{
NSLog(@"Error mapping channels %i", stat);
}
}
outputSize = CHUNK_SIZE;
UInt32 dataSize = sizeof(outputSize);
AudioConverterGetProperty(converter,
kAudioConverterPropertyCalculateOutputBufferSize,
&dataSize,
(void*)&outputSize);
if (outputBuffer)
{
free(outputBuffer);
}
outputBuffer = malloc(outputSize);
//PrintStreamDesc(&inf);
//PrintStreamDesc(&outf);
}
- (void *)outputBuffer
{
return outputBuffer;
}
- (int)outputBufferSize
{
return outputBufferSize;
}
- (void)cleanUp
{
if (outputBuffer) {
free(outputBuffer);
outputBuffer = NULL;
}
AudioConverterDispose(converter);
}
@end

View file

@ -1,91 +0,0 @@
//
// ConverterNode.h
// Cog
//
// Created by Zaphod Beeblebrox on 8/2/05.
// Copyright 2005 __MyCompanyName__. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import <AudioToolbox/AudioToolbox.h>
#import <AudioUnit/AudioUnit.h>
#import <CoreAudio/AudioHardware.h>
#import <CogAudio/soxr.h>
#import <CogAudio/Node.h>
@interface ConverterNode : Node {
NSDictionary *rgInfo;
soxr_t soxr;
void *inputBuffer;
size_t inputBufferSize;
size_t inpSize, inpOffset;
double streamTimestamp, streamTimeRatio;
BOOL stopping;
BOOL convertEntered;
BOOL paused;
BOOL skipResampler;
unsigned int PRIME_LEN_;
unsigned int N_samples_to_add_;
unsigned int N_samples_to_drop_;
BOOL is_preextrapolated_;
int is_postextrapolated_;
int latencyEaten;
int latencyEatenPost;
double sampleRatio;
BOOL observersAdded;
float volumeScale;
void *floatBuffer;
size_t floatBufferSize;
void *extrapolateBuffer;
size_t extrapolateBufferSize;
BOOL rememberedLossless;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription floatFormat;
AudioStreamBasicDescription outputFormat;
uint32_t inputChannelConfig;
BOOL streamFormatChanged;
AudioStreamBasicDescription newInputFormat;
uint32_t newInputChannelConfig;
}
@property AudioStreamBasicDescription inputFormat;
- (id)initWithController:(id)c previous:(id)p;
- (BOOL)setupWithInputFormat:(AudioStreamBasicDescription)inputFormat withInputConfig:(uint32_t)inputConfig outputFormat:(AudioStreamBasicDescription)outputFormat isLossless:(BOOL)lossless;
- (void)cleanUp;
- (BOOL)paused;
- (void)process;
- (AudioChunk *)convert;
- (void)setRGInfo:(NSDictionary *)rgi;
- (void)setOutputFormat:(AudioStreamBasicDescription)outputFormat;
- (void)inputFormatDidChange:(AudioStreamBasicDescription)format inputConfig:(uint32_t)inputConfig;
- (void)refreshVolumeScaling;
@end

View file

@ -1,564 +0,0 @@
//
// ConverterNode.m
// Cog
//
// Created by Zaphod Beeblebrox on 8/2/05.
// Copyright 2005 __MyCompanyName__. All rights reserved.
//
#import <Accelerate/Accelerate.h>
#import <Foundation/Foundation.h>
#import "ConverterNode.h"
#import "BufferChain.h"
#import "OutputNode.h"
#import "Logging.h"
#import "lpc.h"
#import "util.h"
#ifdef _DEBUG
#import "BadSampleCleaner.h"
#endif
void PrintStreamDesc(AudioStreamBasicDescription *inDesc) {
if(!inDesc) {
DLog(@"Can't print a NULL desc!\n");
return;
}
DLog(@"- - - - - - - - - - - - - - - - - - - -\n");
DLog(@" Sample Rate:%f\n", inDesc->mSampleRate);
DLog(@" Format ID:%s\n", (char *)&inDesc->mFormatID);
DLog(@" Format Flags:%X\n", inDesc->mFormatFlags);
DLog(@" Bytes per Packet:%d\n", inDesc->mBytesPerPacket);
DLog(@" Frames per Packet:%d\n", inDesc->mFramesPerPacket);
DLog(@" Bytes per Frame:%d\n", inDesc->mBytesPerFrame);
DLog(@" Channels per Frame:%d\n", inDesc->mChannelsPerFrame);
DLog(@" Bits per Channel:%d\n", inDesc->mBitsPerChannel);
DLog(@"- - - - - - - - - - - - - - - - - - - -\n");
}
@implementation ConverterNode
static void *kConverterNodeContext = &kConverterNodeContext;
@synthesize inputFormat;
- (id)initWithController:(id)c previous:(id)p {
self = [super initWithController:c previous:p];
if(self) {
rgInfo = nil;
soxr = 0;
inputBuffer = NULL;
inputBufferSize = 0;
floatBuffer = NULL;
floatBufferSize = 0;
stopping = NO;
convertEntered = NO;
paused = NO;
skipResampler = YES;
extrapolateBuffer = NULL;
extrapolateBufferSize = 0;
#ifdef LOG_CHAINS
[self initLogFiles];
#endif
}
return self;
}
- (void)addObservers {
if(!observersAdded) {
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.volumeScaling" options:(NSKeyValueObservingOptionInitial|NSKeyValueObservingOptionNew) context:kConverterNodeContext];
observersAdded = YES;
}
}
- (void)removeObservers {
if(observersAdded) {
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.volumeScaling" context:kConverterNodeContext];
observersAdded = NO;
}
}
void scale_by_volume(float *buffer, size_t count, float volume) {
if(volume != 1.0) {
size_t unaligned = (uintptr_t)buffer & 15;
if(unaligned) {
size_t count_unaligned = (16 - unaligned) / sizeof(float);
while(count > 0 && count_unaligned > 0) {
*buffer++ *= volume;
count_unaligned--;
count--;
}
}
if(count) {
vDSP_vsmul(buffer, 1, &volume, buffer, 1, count);
}
}
}
- (BOOL)paused {
return paused;
}
- (void)process {
// Removed endOfStream check from here, since we want to be able to flush the converter
// when the end of stream is reached. Convert function instead processes what it can,
// and returns 0 samples when it has nothing more to process at the end of stream.
while([self shouldContinue] == YES) {
while(paused) {
usleep(500);
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
endOfStream = YES;
break;
}
if(paused || !streamFormatChanged) {
continue;
}
usleep(500);
} else {
[self writeChunk:chunk];
chunk = nil;
}
if(streamFormatChanged) {
[self cleanUp];
[self setupWithInputFormat:newInputFormat withInputConfig:newInputChannelConfig outputFormat:self->outputFormat isLossless:rememberedLossless];
}
}
}
endOfStream = YES;
}
- (AudioChunk *)convert {
UInt32 ioNumberPackets;
if(stopping)
return 0;
convertEntered = YES;
if(stopping || [self shouldContinue] == NO) {
convertEntered = NO;
return nil;
}
if(inpOffset == inpSize) {
streamTimestamp = 0.0;
streamTimeRatio = 1.0;
if(![self peekTimestamp:&streamTimestamp timeRatio:&streamTimeRatio]) {
convertEntered = NO;
return nil;
}
}
while(inpOffset == inpSize) {
// Approximately the most we want on input
ioNumberPackets = 4096;
size_t newSize = ioNumberPackets * floatFormat.mBytesPerPacket;
if(!inputBuffer || inputBufferSize < newSize)
inputBuffer = realloc(inputBuffer, inputBufferSize = newSize);
ssize_t amountToWrite = ioNumberPackets * floatFormat.mBytesPerPacket;
ssize_t bytesReadFromInput = 0;
while(bytesReadFromInput < amountToWrite && !stopping && !paused && !streamFormatChanged && [self shouldContinue] == YES && !([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES)) {
AudioStreamBasicDescription inf;
uint32_t config;
if([self peekFormat:&inf channelConfig:&config]) {
if(config != inputChannelConfig || memcmp(&inf, &inputFormat, sizeof(inf)) != 0) {
if(inputChannelConfig == 0 && memcmp(&inf, &inputFormat, sizeof(inf)) == 0) {
inputChannelConfig = config;
continue;
} else {
newInputFormat = inf;
newInputChannelConfig = config;
streamFormatChanged = YES;
break;
}
}
}
AudioChunk *chunk = [self readChunkAsFloat32:((amountToWrite - bytesReadFromInput) / floatFormat.mBytesPerPacket)];
inf = [chunk format];
size_t frameCount = [chunk frameCount];
config = [chunk channelConfig];
size_t bytesRead = frameCount * inf.mBytesPerPacket;
if(frameCount) {
NSData *samples = [chunk removeSamples:frameCount];
memcpy(((uint8_t *)inputBuffer) + bytesReadFromInput, [samples bytes], bytesRead);
if([chunk isHDCD]) {
[controller sustainHDCD];
}
}
bytesReadFromInput += bytesRead;
if(!frameCount) {
usleep(500);
}
}
if(!bytesReadFromInput) {
convertEntered = NO;
return nil;
}
if(stopping || paused || streamFormatChanged || [self shouldContinue] == NO || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES)) {
if(!skipResampler) {
if(!is_postextrapolated_) {
is_postextrapolated_ = 1;
}
} else {
is_postextrapolated_ = 3;
}
}
// Extrapolate start
if(!skipResampler && !is_preextrapolated_) {
size_t inputSamples = bytesReadFromInput / floatFormat.mBytesPerPacket;
size_t prime = MIN(inputSamples, PRIME_LEN_);
size_t _N_samples_to_add_ = N_samples_to_add_;
size_t newSize = _N_samples_to_add_ * floatFormat.mBytesPerPacket;
newSize += bytesReadFromInput;
if(newSize > inputBufferSize) {
inputBuffer = realloc(inputBuffer, inputBufferSize = newSize * 3);
}
memmove(inputBuffer + _N_samples_to_add_ * floatFormat.mBytesPerPacket, inputBuffer, bytesReadFromInput);
lpc_extrapolate_bkwd(inputBuffer + _N_samples_to_add_ * floatFormat.mBytesPerPacket, inputSamples, prime, floatFormat.mChannelsPerFrame, LPC_ORDER, _N_samples_to_add_, &extrapolateBuffer, &extrapolateBufferSize);
bytesReadFromInput += _N_samples_to_add_ * floatFormat.mBytesPerPacket;
latencyEaten = N_samples_to_drop_;
is_preextrapolated_ = YES;
}
if(is_postextrapolated_ == 1) {
size_t inputSamples = bytesReadFromInput / floatFormat.mBytesPerPacket;
size_t prime = MIN(inputSamples, PRIME_LEN_);
size_t _N_samples_to_add_ = N_samples_to_add_;
size_t newSize = bytesReadFromInput;
newSize += _N_samples_to_add_ * floatFormat.mBytesPerPacket;
if(newSize > inputBufferSize) {
inputBuffer = realloc(inputBuffer, inputBufferSize = newSize * 3);
}
lpc_extrapolate_fwd(inputBuffer, inputSamples, prime, floatFormat.mChannelsPerFrame, LPC_ORDER, _N_samples_to_add_, &extrapolateBuffer, &extrapolateBufferSize);
bytesReadFromInput += _N_samples_to_add_ * floatFormat.mBytesPerPacket;
latencyEatenPost = N_samples_to_drop_;
is_postextrapolated_ = 2;
} else if(is_postextrapolated_ == 3) {
latencyEatenPost = 0;
}
// Input now contains bytesReadFromInput worth of floats, in the input sample rate
inpSize = bytesReadFromInput;
inpOffset = 0;
}
ioNumberPackets = (UInt32)(inpSize - inpOffset);
ioNumberPackets -= ioNumberPackets % floatFormat.mBytesPerPacket;
if(ioNumberPackets) {
size_t inputSamples = ioNumberPackets / floatFormat.mBytesPerPacket;
ioNumberPackets = (UInt32)inputSamples;
ioNumberPackets = (UInt32)ceil((float)ioNumberPackets * sampleRatio);
ioNumberPackets += soxr_delay(soxr);
ioNumberPackets = (ioNumberPackets + 255) & ~255;
size_t newSize = ioNumberPackets * floatFormat.mBytesPerPacket;
if(!floatBuffer || floatBufferSize < newSize) {
floatBuffer = realloc(floatBuffer, floatBufferSize = newSize * 3);
}
if(stopping) {
convertEntered = NO;
return nil;
}
size_t inputDone = 0;
size_t outputDone = 0;
if(!skipResampler) {
soxr_process(soxr, (float *)(((uint8_t *)inputBuffer) + inpOffset), inputSamples, &inputDone, floatBuffer, ioNumberPackets, &outputDone);
if(latencyEatenPost) {
// Post file or format change flush
size_t idone = 0, odone = 0;
do {
soxr_process(soxr, NULL, 0, &idone, floatBuffer + outputDone * floatFormat.mBytesPerPacket, ioNumberPackets - outputDone, &odone);
outputDone += odone;
} while(odone > 0);
}
} else {
memcpy(floatBuffer, (((uint8_t *)inputBuffer) + inpOffset), inputSamples * floatFormat.mBytesPerPacket);
inputDone = inputSamples;
outputDone = inputSamples;
}
inpOffset += inputDone * floatFormat.mBytesPerPacket;
if(latencyEaten) {
if(outputDone > latencyEaten) {
outputDone -= latencyEaten;
memmove(floatBuffer, floatBuffer + latencyEaten * floatFormat.mBytesPerPacket, outputDone * floatFormat.mBytesPerPacket);
latencyEaten = 0;
} else {
latencyEaten -= outputDone;
outputDone = 0;
}
}
if(latencyEatenPost) {
if(outputDone > latencyEatenPost) {
outputDone -= latencyEatenPost;
} else {
outputDone = 0;
}
latencyEatenPost = 0;
}
ioNumberPackets = (UInt32)outputDone * floatFormat.mBytesPerPacket;
}
if(ioNumberPackets) {
AudioChunk *chunk = [[AudioChunk alloc] init];
[chunk setFormat:nodeFormat];
if(nodeChannelConfig) {
[chunk setChannelConfig:nodeChannelConfig];
}
[self addObservers];
scale_by_volume(floatBuffer, ioNumberPackets / sizeof(float), volumeScale);
[chunk setStreamTimestamp:streamTimestamp];
[chunk setStreamTimeRatio:streamTimeRatio];
[chunk assignSamples:floatBuffer frameCount:ioNumberPackets / floatFormat.mBytesPerPacket];
streamTimestamp += [chunk durationRatioed];
convertEntered = NO;
return chunk;
}
convertEntered = NO;
return nil;
}
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context {
if(context == kConverterNodeContext) {
DLog(@"SOMETHING CHANGED!");
if([keyPath isEqualToString:@"values.volumeScaling"]) {
// User reset the volume scaling option
[self refreshVolumeScaling];
}
} else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
static float db_to_scale(float db) {
return pow(10.0, db / 20);
}
- (void)refreshVolumeScaling {
if(rgInfo == nil) {
volumeScale = 1.0;
return;
}
NSString *scaling = [[NSUserDefaults standardUserDefaults] stringForKey:@"volumeScaling"];
BOOL useAlbum = [scaling hasPrefix:@"albumGain"];
BOOL useTrack = useAlbum || [scaling hasPrefix:@"trackGain"];
BOOL useVolume = useAlbum || useTrack || [scaling isEqualToString:@"volumeScale"];
BOOL usePeak = [scaling hasSuffix:@"WithPeak"];
float scale = 1.0;
float peak = 0.0;
if(useVolume) {
id pVolumeScale = [rgInfo objectForKey:@"volume"];
if(pVolumeScale != nil)
scale = [pVolumeScale floatValue];
}
if(useTrack) {
id trackGain = [rgInfo objectForKey:@"replayGainTrackGain"];
id trackPeak = [rgInfo objectForKey:@"replayGainTrackPeak"];
if(trackGain != nil)
scale = db_to_scale([trackGain floatValue]);
if(trackPeak != nil)
peak = [trackPeak floatValue];
}
if(useAlbum) {
id albumGain = [rgInfo objectForKey:@"replayGainAlbumGain"];
id albumPeak = [rgInfo objectForKey:@"replayGainAlbumPeak"];
if(albumGain != nil)
scale = db_to_scale([albumGain floatValue]);
if(albumPeak != nil)
peak = [albumPeak floatValue];
}
if(usePeak) {
if(scale * peak > 1.0)
scale = 1.0 / peak;
}
volumeScale = scale;
}
- (BOOL)setupWithInputFormat:(AudioStreamBasicDescription)inf withInputConfig:(uint32_t)inputConfig outputFormat:(AudioStreamBasicDescription)outf isLossless:(BOOL)lossless {
// Make the converter
inputFormat = inf;
outputFormat = outf;
inputChannelConfig = inputConfig;
rememberedLossless = lossless;
// These are the only sample formats we support translating
BOOL isFloat = !!(inputFormat.mFormatFlags & kAudioFormatFlagIsFloat);
if((!isFloat && !(inputFormat.mBitsPerChannel >= 1 && inputFormat.mBitsPerChannel <= 32)) || (isFloat && !(inputFormat.mBitsPerChannel == 32 || inputFormat.mBitsPerChannel == 64)))
return NO;
floatFormat = inputFormat;
floatFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked;
floatFormat.mBitsPerChannel = 32;
floatFormat.mBytesPerFrame = (32 / 8) * floatFormat.mChannelsPerFrame;
floatFormat.mBytesPerPacket = floatFormat.mBytesPerFrame * floatFormat.mFramesPerPacket;
#if DSD_DECIMATE
if(inputFormat.mBitsPerChannel == 1) {
// Decimate this for speed
floatFormat.mSampleRate *= 1.0 / 8.0;
}
#endif
inpOffset = 0;
inpSize = 0;
// This is a post resampler format
nodeFormat = floatFormat;
nodeFormat.mSampleRate = outputFormat.mSampleRate;
nodeChannelConfig = inputChannelConfig;
sampleRatio = (double)outputFormat.mSampleRate / (double)floatFormat.mSampleRate;
skipResampler = fabs(sampleRatio - 1.0) < 1e-7;
if(!skipResampler) {
soxr_quality_spec_t q_spec = soxr_quality_spec(SOXR_HQ, 0);
soxr_io_spec_t io_spec = soxr_io_spec(SOXR_FLOAT32_I, SOXR_FLOAT32_I);
soxr_runtime_spec_t runtime_spec = soxr_runtime_spec(0);
soxr_error_t error;
soxr = soxr_create(floatFormat.mSampleRate, outputFormat.mSampleRate, floatFormat.mChannelsPerFrame, &error, &io_spec, &q_spec, &runtime_spec);
if(error)
return NO;
PRIME_LEN_ = MAX(floatFormat.mSampleRate / 20, 1024u);
PRIME_LEN_ = MIN(PRIME_LEN_, 16384u);
PRIME_LEN_ = MAX(PRIME_LEN_, (unsigned int)(2 * LPC_ORDER + 1));
N_samples_to_add_ = floatFormat.mSampleRate;
N_samples_to_drop_ = outputFormat.mSampleRate;
samples_len(&N_samples_to_add_, &N_samples_to_drop_, 20, 8192u);
is_preextrapolated_ = NO;
is_postextrapolated_ = 0;
}
latencyEaten = 0;
latencyEatenPost = 0;
PrintStreamDesc(&inf);
PrintStreamDesc(&nodeFormat);
[self refreshVolumeScaling];
// Move this here so process call isn't running the resampler until it's allocated
stopping = NO;
convertEntered = NO;
streamFormatChanged = NO;
paused = NO;
return YES;
}
- (void)dealloc {
DLog(@"Converter dealloc");
[self removeObservers];
paused = NO;
[self cleanUp];
[super cleanUp];
}
- (void)setOutputFormat:(AudioStreamBasicDescription)format {
DLog(@"SETTING OUTPUT FORMAT!");
outputFormat = format;
}
- (void)inputFormatDidChange:(AudioStreamBasicDescription)format inputConfig:(uint32_t)inputConfig {
DLog(@"FORMAT CHANGED");
paused = YES;
while(convertEntered) {
usleep(500);
}
[self cleanUp];
[self setupWithInputFormat:format withInputConfig:inputConfig outputFormat:self->outputFormat isLossless:rememberedLossless];
}
- (void)setRGInfo:(NSDictionary *)rgi {
DLog(@"Setting ReplayGain info");
rgInfo = rgi;
[self refreshVolumeScaling];
}
- (void)cleanUp {
stopping = YES;
while(convertEntered) {
usleep(500);
}
if(soxr) {
soxr_delete(soxr);
soxr = NULL;
}
if(extrapolateBuffer) {
free(extrapolateBuffer);
extrapolateBuffer = NULL;
extrapolateBufferSize = 0;
}
if(floatBuffer) {
free(floatBuffer);
floatBuffer = NULL;
floatBufferSize = 0;
}
if(inputBuffer) {
free(inputBuffer);
inputBuffer = NULL;
inputBufferSize = 0;
}
inpOffset = 0;
inpSize = 0;
}
- (double)secondsBuffered {
return [buffer listDuration];
}
@end

View file

@ -1,34 +0,0 @@
//
// DSPDownmixNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/13/25.
//
#ifndef DSPDownmixNode_h
#define DSPDownmixNode_h
#import <AudioToolbox/AudioToolbox.h>
#import <CogAudio/DSPNode.h>
@interface DSPDownmixNode : DSPNode {
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (BOOL)setup;
- (void)cleanUp;
- (void)resetBuffer;
- (BOOL)paused;
- (void)process;
- (AudioChunk * _Nullable)convert;
- (void)setOutputFormat:(AudioStreamBasicDescription)format withChannelConfig:(uint32_t)config;
@end
#endif /* DSPDownmixNode_h */

View file

@ -1,201 +0,0 @@
//
// DSPDownmixNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/13/25.
//
#import <Foundation/Foundation.h>
#import "Downmix.h"
#import "Logging.h"
#import "DSPDownmixNode.h"
@implementation DSPDownmixNode {
DownmixProcessor *downmix;
BOOL stopping, paused;
BOOL processEntered;
BOOL formatSet;
AudioStreamBasicDescription lastInputFormat;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription outputFormat;
uint32_t lastInputChannelConfig, inputChannelConfig;
uint32_t outputChannelConfig;
float outBuffer[4096 * 32];
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super initWithController:c previous:p latency:latency];
return self;
}
- (void)dealloc {
DLog(@"Downmix dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[super cleanUp];
}
- (BOOL)fullInit {
if(formatSet) {
downmix = [[DownmixProcessor alloc] initWithInputFormat:inputFormat inputConfig:inputChannelConfig andOutputFormat:outputFormat outputConfig:outputChannelConfig];
if(!downmix) {
return NO;
}
}
return YES;
}
- (void)fullShutdown {
downmix = nil;
}
- (BOOL)setup {
if(stopping)
return NO;
[self fullShutdown];
return [self fullInit];
}
- (void)cleanUp {
stopping = YES;
while(processEntered) {
usleep(500);
}
[self fullShutdown];
formatSet = NO;
}
- (void)resetBuffer {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
paused = NO;
}
- (void)setOutputFormat:(AudioStreamBasicDescription)format withChannelConfig:(uint32_t)config {
if(memcmp(&outputFormat, &format, sizeof(outputFormat)) != 0 ||
outputChannelConfig != config) {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
[self fullShutdown];
paused = NO;
}
outputFormat = format;
outputChannelConfig = config;
formatSet = YES;
}
- (BOOL)paused {
return paused;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
}
usleep(500);
} else {
[self writeChunk:chunk];
chunk = nil;
}
}
}
}
- (AudioChunk *)convert {
if(stopping)
return nil;
processEntered = YES;
if(stopping || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}
if(![self peekFormat:&inputFormat channelConfig:&inputChannelConfig]) {
processEntered = NO;
return nil;
}
if(!inputFormat.mSampleRate ||
!inputFormat.mBitsPerChannel ||
!inputFormat.mChannelsPerFrame ||
!inputFormat.mBytesPerFrame ||
!inputFormat.mFramesPerPacket ||
!inputFormat.mBytesPerPacket) {
processEntered = NO;
return nil;
}
if((formatSet && !downmix) ||
memcmp(&inputFormat, &lastInputFormat, sizeof(inputFormat)) != 0 ||
inputChannelConfig != lastInputChannelConfig) {
lastInputFormat = inputFormat;
lastInputChannelConfig = inputChannelConfig;
[self fullShutdown];
if(formatSet && ![self setup]) {
processEntered = NO;
return nil;
}
}
if(!downmix) {
processEntered = NO;
return [self readChunk:4096];
}
AudioChunk *chunk = [self readChunkAsFloat32:4096];
if(!chunk || ![chunk frameCount]) {
processEntered = NO;
return nil;
}
double streamTimestamp = [chunk streamTimestamp];
size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount];
[downmix process:[sampleData bytes] frameCount:frameCount output:&outBuffer[0]];
AudioChunk *outputChunk = [[AudioChunk alloc] init];
[outputChunk setFormat:outputFormat];
if(outputChannelConfig) {
[outputChunk setChannelConfig:outputChannelConfig];
}
if([chunk isHDCD]) [outputChunk setHDCD];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:[chunk streamTimeRatio]];
[outputChunk assignSamples:&outBuffer[0] frameCount:frameCount];
processEntered = NO;
return outputChunk;
}
@end

View file

@ -1,31 +0,0 @@
//
// DSPEqualizerNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/11/25.
//
#ifndef DSPEqualizerNode_h
#define DSPEqualizerNode_h
#import <CogAudio/DSPNode.h>
@interface DSPEqualizerNode : DSPNode {
float *samplePtr;
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (BOOL)setup;
- (void)cleanUp;
- (void)resetBuffer;
- (BOOL)paused;
- (void)process;
- (AudioChunk * _Nullable)convert;
@end
#endif /* DSPEqualizerNode_h */

View file

@ -1,401 +0,0 @@
//
// DSPEqualizerNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/11/25.
//
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import <AudioUnit/AudioUnit.h>
#import <Accelerate/Accelerate.h>
#import "DSPEqualizerNode.h"
#import "OutputNode.h"
#import "Logging.h"
#import "AudioPlayer.h"
extern void scale_by_volume(float *buffer, size_t count, float volume);
static void * kDSPEqualizerNodeContext = &kDSPEqualizerNodeContext;
@implementation DSPEqualizerNode {
BOOL enableEqualizer;
BOOL equalizerInitialized;
double equalizerPreamp;
__weak AudioPlayer *audioPlayer;
AudioUnit _eq;
AudioTimeStamp timeStamp;
BOOL stopping, paused;
BOOL processEntered;
BOOL observersapplied;
AudioStreamBasicDescription lastInputFormat;
AudioStreamBasicDescription inputFormat;
uint32_t lastInputChannelConfig, inputChannelConfig;
uint32_t outputChannelConfig;
float inBuffer[4096 * 32];
float eqBuffer[4096 * 32];
float outBuffer[4096 * 32];
}
static void fillBuffers(AudioBufferList *ioData, const float *inbuffer, size_t count, size_t offset) {
const size_t channels = ioData->mNumberBuffers;
for(int i = 0; i < channels; ++i) {
const size_t maxCount = (ioData->mBuffers[i].mDataByteSize / sizeof(float)) - offset;
float *output = ((float *)ioData->mBuffers[i].mData) + offset;
const float *input = inbuffer + i;
cblas_scopy((int)((count > maxCount) ? maxCount : count), input, (int)channels, output, 1);
ioData->mBuffers[i].mNumberChannels = 1;
}
}
static void clearBuffers(AudioBufferList *ioData, size_t count, size_t offset) {
for(int i = 0; i < ioData->mNumberBuffers; ++i) {
memset((uint8_t *)ioData->mBuffers[i].mData + offset * sizeof(float), 0, count * sizeof(float));
ioData->mBuffers[i].mNumberChannels = 1;
}
}
static OSStatus eqRenderCallback(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData) {
if(inNumberFrames > 4096 || !inRefCon) {
clearBuffers(ioData, inNumberFrames, 0);
return 0;
}
DSPEqualizerNode *_self = (__bridge DSPEqualizerNode *)inRefCon;
fillBuffers(ioData, _self->samplePtr, inNumberFrames, 0);
return 0;
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super initWithController:c previous:p latency:latency];
if(self) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableEqualizer = [defaults boolForKey:@"GraphicEQenable"];
float preamp = [defaults floatForKey:@"eqPreamp"];
equalizerPreamp = pow(10.0, preamp / 20.0);
OutputNode *outputNode = c;
audioPlayer = [outputNode controller];
[self addObservers];
}
return self;
}
- (void)dealloc {
DLog(@"Equalizer dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[self removeObservers];
[super cleanUp];
}
- (void)addObservers {
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.GraphicEQenable" options:0 context:kDSPEqualizerNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.eqPreamp" options:0 context:kDSPEqualizerNodeContext];
observersapplied = YES;
}
- (void)removeObservers {
if(observersapplied) {
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.GraphicEQenable" context:kDSPEqualizerNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.eqPreamp" context:kDSPEqualizerNodeContext];
observersapplied = NO;
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if(context != kDSPEqualizerNodeContext) {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
if([keyPath isEqualToString:@"values.GraphicEQenable"]) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableEqualizer = [defaults boolForKey:@"GraphicEQenable"];
} else if([keyPath isEqualToString:@"values.eqPreamp"]) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
float preamp = [defaults floatForKey:@"eqPreamp"];
equalizerPreamp = pow(10.0, preamp / 20.0);
}
}
- (AudioPlayer *)audioPlayer {
return audioPlayer;
}
- (BOOL)fullInit {
if(enableEqualizer) {
AudioComponentDescription desc;
NSError *err;
desc.componentType = kAudioUnitType_Effect;
desc.componentSubType = kAudioUnitSubType_GraphicEQ;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp = NULL;
comp = AudioComponentFindNext(comp, &desc);
if(!comp) {
return NO;
}
OSStatus _err = AudioComponentInstanceNew(comp, &_eq);
if(err) {
return NO;
}
UInt32 value;
UInt32 size = sizeof(value);
value = 4096;
AudioUnitSetProperty(_eq, kAudioUnitProperty_MaximumFramesPerSlice,
kAudioUnitScope_Global, 0, &value, size);
value = 127;
AudioUnitSetProperty(_eq, kAudioUnitProperty_RenderQuality,
kAudioUnitScope_Global, 0, &value, size);
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProcRefCon = (__bridge void *)self;
callbackStruct.inputProc = eqRenderCallback;
AudioUnitSetProperty(_eq, kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input, 0, &callbackStruct, sizeof(callbackStruct));
AudioUnitReset(_eq, kAudioUnitScope_Input, 0);
AudioUnitReset(_eq, kAudioUnitScope_Output, 0);
AudioUnitReset(_eq, kAudioUnitScope_Global, 0);
AudioStreamBasicDescription asbd = inputFormat;
// Of course, non-interleaved has only one sample per frame/packet, per buffer
asbd.mFormatFlags |= kAudioFormatFlagIsNonInterleaved;
asbd.mBytesPerFrame = sizeof(float);
asbd.mBytesPerPacket = sizeof(float);
asbd.mFramesPerPacket = 1;
UInt32 maximumFrames = 4096;
AudioUnitSetProperty(_eq, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maximumFrames, sizeof(maximumFrames));
AudioUnitSetProperty(_eq, kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input, 0, &asbd, sizeof(asbd));
AudioUnitSetProperty(_eq, kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output, 0, &asbd, sizeof(asbd));
AudioUnitReset(_eq, kAudioUnitScope_Input, 0);
AudioUnitReset(_eq, kAudioUnitScope_Output, 0);
AudioUnitReset(_eq, kAudioUnitScope_Global, 0);
_err = AudioUnitInitialize(_eq);
if(_err != noErr) {
return NO;
}
bzero(&timeStamp, sizeof(timeStamp));
timeStamp.mFlags = kAudioTimeStampSampleTimeValid;
equalizerInitialized = YES;
[[self audioPlayer] beginEqualizer:_eq];
}
return YES;
}
- (void)fullShutdown {
if(_eq) {
if(equalizerInitialized) {
[[self audioPlayer] endEqualizer:_eq];
AudioUnitUninitialize(_eq);
equalizerInitialized = NO;
}
AudioComponentInstanceDispose(_eq);
_eq = NULL;
}
}
- (BOOL)setup {
if(stopping)
return NO;
[self fullShutdown];
return [self fullInit];
}
- (void)cleanUp {
stopping = YES;
while(processEntered) {
usleep(500);
}
[self fullShutdown];
}
- (void)resetBuffer {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
[self fullShutdown];
paused = NO;
}
- (BOOL)paused {
return paused;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
}
usleep(500);
} else {
[self writeChunk:chunk];
chunk = nil;
}
if(!enableEqualizer && equalizerInitialized) {
[self fullShutdown];
}
}
}
}
- (AudioChunk *)convert {
if(stopping)
return nil;
processEntered = YES;
if(stopping || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}
if(![self peekFormat:&inputFormat channelConfig:&inputChannelConfig]) {
processEntered = NO;
return nil;
}
if(!inputFormat.mSampleRate ||
!inputFormat.mBitsPerChannel ||
!inputFormat.mChannelsPerFrame ||
!inputFormat.mBytesPerFrame ||
!inputFormat.mFramesPerPacket ||
!inputFormat.mBytesPerPacket) {
processEntered = NO;
return nil;
}
if((enableEqualizer && !equalizerInitialized) ||
memcmp(&inputFormat, &lastInputFormat, sizeof(inputFormat)) != 0 ||
inputChannelConfig != lastInputChannelConfig) {
lastInputFormat = inputFormat;
lastInputChannelConfig = inputChannelConfig;
[self fullShutdown];
if(enableEqualizer && ![self setup]) {
processEntered = NO;
return nil;
}
}
if(!equalizerInitialized) {
processEntered = NO;
return [self readChunk:4096];
}
AudioChunk *chunk = [self readChunkAsFloat32:4096];
if(!chunk || ![chunk frameCount]) {
processEntered = NO;
return nil;
}
double streamTimestamp = [chunk streamTimestamp];
samplePtr = &inBuffer[0];
size_t channels = inputFormat.mChannelsPerFrame;
size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount];
cblas_scopy((int)(frameCount * channels), [sampleData bytes], 1, &inBuffer[0], 1);
const size_t channelsminusone = channels - 1;
uint8_t tempBuffer[sizeof(AudioBufferList) + sizeof(AudioBuffer) * channelsminusone];
AudioBufferList *ioData = (AudioBufferList *)&tempBuffer[0];
ioData->mNumberBuffers = (UInt32)channels;
for(size_t i = 0; i < channels; ++i) {
ioData->mBuffers[i].mData = &eqBuffer[4096 * i];
ioData->mBuffers[i].mDataByteSize = (UInt32)(frameCount * sizeof(float));
ioData->mBuffers[i].mNumberChannels = 1;
}
OSStatus status = AudioUnitRender(_eq, NULL, &timeStamp, 0, (UInt32)frameCount, ioData);
if(status != noErr) {
processEntered = NO;
return nil;
}
timeStamp.mSampleTime += ((double)frameCount) / inputFormat.mSampleRate;
for(int i = 0; i < channels; ++i) {
cblas_scopy((int)frameCount, &eqBuffer[4096 * i], 1, &outBuffer[i], (int)channels);
}
AudioChunk *outputChunk = nil;
if(frameCount) {
scale_by_volume(&outBuffer[0], frameCount * channels, equalizerPreamp);
outputChunk = [[AudioChunk alloc] init];
[outputChunk setFormat:inputFormat];
if(outputChannelConfig) {
[outputChunk setChannelConfig:inputChannelConfig];
}
if([chunk isHDCD]) [outputChunk setHDCD];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:[chunk streamTimeRatio]];
[outputChunk assignSamples:&outBuffer[0] frameCount:frameCount];
}
processEntered = NO;
return outputChunk;
}
@end

View file

@ -1,30 +0,0 @@
//
// DSPFSurroundNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/11/25.
//
#ifndef DSPFSurroundNode_h
#define DSPFSurroundNode_h
#import <CogAudio/DSPNode.h>
@interface DSPFSurroundNode : DSPNode {
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (BOOL)setup;
- (void)cleanUp;
- (void)resetBuffer;
- (BOOL)paused;
- (void)process;
- (AudioChunk * _Nullable)convert;
@end
#endif /* DSPFSurroundNode_h */

View file

@ -1,275 +0,0 @@
//
// DSPFSurroundNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/11/25.
//
#import <Foundation/Foundation.h>
#import <Accelerate/Accelerate.h>
#import "DSPFSurroundNode.h"
#import "Logging.h"
#import "FSurroundFilter.h"
#define OCTAVES 5
static void * kDSPFSurroundNodeContext = &kDSPFSurroundNodeContext;
@implementation DSPFSurroundNode {
BOOL enableFSurround;
BOOL FSurroundDelayRemoved;
FSurroundFilter *fsurround;
BOOL stopping, paused;
BOOL processEntered;
BOOL observersapplied;
AudioStreamBasicDescription lastInputFormat;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription outputFormat;
uint32_t lastInputChannelConfig, inputChannelConfig;
uint32_t outputChannelConfig;
float inBuffer[4096 * 2];
float outBuffer[8192 * 6];
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super initWithController:c previous:p latency:latency];
if(self) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableFSurround = [defaults boolForKey:@"enableFSurround"];
[self addObservers];
}
return self;
}
- (void)dealloc {
DLog(@"FreeSurround dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[self removeObservers];
[super cleanUp];
}
- (void)addObservers {
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.enableFSurround" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPFSurroundNodeContext];
observersapplied = YES;
}
- (void)removeObservers {
if(observersapplied) {
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.enableFSurround" context:kDSPFSurroundNodeContext];
observersapplied = NO;
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if(context != kDSPFSurroundNodeContext) {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
if([keyPath isEqualToString:@"values.enableFSurround"]) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableFSurround = [defaults boolForKey:@"enableFSurround"];
}
}
- (BOOL)fullInit {
if(enableFSurround && inputFormat.mChannelsPerFrame == 2) {
fsurround = [[FSurroundFilter alloc] initWithSampleRate:inputFormat.mSampleRate];
if(!fsurround) {
return NO;
}
outputFormat = inputFormat;
outputFormat.mChannelsPerFrame = [fsurround channelCount];
outputFormat.mBytesPerFrame = sizeof(float) * outputFormat.mChannelsPerFrame;
outputFormat.mBytesPerPacket = outputFormat.mBytesPerFrame * outputFormat.mFramesPerPacket;
outputChannelConfig = [fsurround channelConfig];
FSurroundDelayRemoved = NO;
} else {
fsurround = nil;
}
return YES;
}
- (void)fullShutdown {
fsurround = nil;
}
- (BOOL)setup {
if(stopping)
return NO;
[self fullShutdown];
return [self fullInit];
}
- (void)cleanUp {
stopping = YES;
while(processEntered) {
usleep(500);
}
[self fullShutdown];
}
- (void)resetBuffer {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
[self fullShutdown];
paused = NO;
}
- (BOOL)paused {
return paused;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
}
usleep(500);
} else {
[self writeChunk:chunk];
chunk = nil;
}
if(!enableFSurround && fsurround) {
[self fullShutdown];
}
}
}
}
- (AudioChunk *)convert {
if(stopping)
return nil;
processEntered = YES;
if(stopping || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}
if(![self peekFormat:&inputFormat channelConfig:&inputChannelConfig]) {
processEntered = NO;
return nil;
}
if(!inputFormat.mSampleRate ||
!inputFormat.mBitsPerChannel ||
!inputFormat.mChannelsPerFrame ||
!inputFormat.mBytesPerFrame ||
!inputFormat.mFramesPerPacket ||
!inputFormat.mBytesPerPacket) {
processEntered = NO;
return nil;
}
if((enableFSurround && !fsurround) ||
memcmp(&inputFormat, &lastInputFormat, sizeof(inputFormat)) != 0 ||
inputChannelConfig != lastInputChannelConfig) {
lastInputFormat = inputFormat;
lastInputChannelConfig = inputChannelConfig;
[self fullShutdown];
if(enableFSurround && ![self setup]) {
processEntered = NO;
return nil;
}
}
if(!fsurround) {
processEntered = NO;
return [self readChunk:4096];
}
size_t totalRequestedSamples = 4096;
size_t totalFrameCount = 0;
AudioChunk *chunk = [self readAndMergeChunksAsFloat32:totalRequestedSamples];
if(!chunk || ![chunk frameCount]) {
processEntered = NO;
return nil;
}
double streamTimestamp = [chunk streamTimestamp];
float *samplePtr = &inBuffer[0];
size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount];
cblas_scopy((int)frameCount * 2, [sampleData bytes], 1, &samplePtr[0], 1);
totalFrameCount = frameCount;
size_t countToProcess = totalFrameCount;
size_t samplesRendered;
if(countToProcess < 4096) {
bzero(&inBuffer[countToProcess * 2], (4096 - countToProcess) * 2 * sizeof(float));
countToProcess = 4096;
}
[fsurround process:&inBuffer[0] output:&outBuffer[0] count:(int)countToProcess];
samplePtr = &outBuffer[0];
samplesRendered = totalFrameCount;
if(totalFrameCount < 4096) {
bzero(&outBuffer[4096 * 6], 4096 * 2 * sizeof(float));
[fsurround process:&outBuffer[4096 * 6] output:&outBuffer[4096 * 6] count:4096];
samplesRendered += 2048;
}
if(!FSurroundDelayRemoved) {
FSurroundDelayRemoved = YES;
if(samplesRendered > 2048) {
samplePtr += 2048 * 6;
samplesRendered -= 2048;
}
}
AudioChunk *outputChunk = nil;
if(samplesRendered) {
outputChunk = [[AudioChunk alloc] init];
[outputChunk setFormat:outputFormat];
if(outputChannelConfig) {
[outputChunk setChannelConfig:outputChannelConfig];
}
if([chunk isHDCD]) [outputChunk setHDCD];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:[chunk streamTimeRatio]];
[outputChunk assignSamples:samplePtr frameCount:samplesRendered];
}
processEntered = NO;
return outputChunk;
}
@end

View file

@ -1,35 +0,0 @@
//
// DSPHRTFNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/11/25.
//
#ifndef DSPHRTFNode_h
#define DSPHRTFNode_h
#import <simd/types.h>
#import <CogAudio/DSPNode.h>
@interface DSPHRTFNode : DSPNode {
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (BOOL)setup;
- (void)cleanUp;
- (void)resetBuffer;
- (BOOL)paused;
- (void)process;
- (AudioChunk * _Nullable)convert;
- (void)reportMotion:(simd_float4x4)matrix;
- (void)resetReferencePosition:(NSNotification *_Nullable)notification;
@end
#endif /* DSPHRTFNode_h */

View file

@ -1,434 +0,0 @@
//
// DSPHRTFNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/11/25.
//
#import <Foundation/Foundation.h>
#import <CoreMotion/CoreMotion.h>
#import "Logging.h"
#import "DSPHRTFNode.h"
#import "lpc.h"
#import "HeadphoneFilter.h"
#include <AvailabilityMacros.h>
#if defined(MAC_OS_X_VERSION_14_0) && MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_14_0
#define MOTION_MANAGER 1
#endif
static void * kDSPHRTFNodeContext = &kDSPHRTFNodeContext;
static NSString *CogPlaybackDidResetHeadTracking = @"CogPlaybackDigResetHeadTracking";
static simd_float4x4 convertMatrix(CMRotationMatrix r) {
simd_float4x4 matrix = {
simd_make_float4(r.m33, -r.m31, r.m32, 0.0f),
simd_make_float4(r.m13, -r.m11, r.m12, 0.0f),
simd_make_float4(r.m23, -r.m21, r.m22, 0.0f),
simd_make_float4(0.0f, 0.0f, 0.0f, 1.0f)
};
return matrix;
}
#ifdef MOTION_MANAGER
static NSLock *motionManagerLock = nil;
API_AVAILABLE(macos(14.0)) static CMHeadphoneMotionManager *motionManager = nil;
static DSPHRTFNode *registeredMotionListener = nil;
#endif
static void registerMotionListener(DSPHRTFNode *listener) {
#ifdef MOTION_MANAGER
if(@available(macOS 14, *)) {
[motionManagerLock lock];
if([motionManager isDeviceMotionActive]) {
[motionManager stopDeviceMotionUpdates];
}
if([motionManager isDeviceMotionAvailable]) {
registeredMotionListener = listener;
[motionManager startDeviceMotionUpdatesToQueue:[NSOperationQueue mainQueue] withHandler:^(CMDeviceMotion * _Nullable motion, NSError * _Nullable error) {
if(motion) {
[motionManagerLock lock];
[registeredMotionListener reportMotion:convertMatrix(motion.attitude.rotationMatrix)];
[motionManagerLock unlock];
}
}];
}
[motionManagerLock unlock];
}
#endif
}
static void unregisterMotionListener(void) {
#ifdef MOTION_MANAGER
if(@available(macOS 14, *)) {
[motionManagerLock lock];
if([motionManager isDeviceMotionActive]) {
[motionManager stopDeviceMotionUpdates];
}
registeredMotionListener = nil;
[motionManagerLock unlock];
}
#endif
}
@implementation DSPHRTFNode {
BOOL enableHrtf;
BOOL enableHeadTracking;
BOOL lastEnableHeadTracking;
HeadphoneFilter *hrtf;
BOOL stopping, paused;
BOOL processEntered;
BOOL resetFilter;
size_t needPrefill;
BOOL observersapplied;
AudioStreamBasicDescription lastInputFormat;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription outputFormat;
uint32_t lastInputChannelConfig, inputChannelConfig;
uint32_t outputChannelConfig;
BOOL referenceMatrixSet;
BOOL rotationMatrixUpdated;
simd_float4x4 rotationMatrix;
simd_float4x4 referenceMatrix;
float prefillBuffer[4096 * 32];
float outBuffer[4096 * 2];
void *extrapolate_buffer;
size_t extrapolate_buffer_size;
}
+ (void)initialize {
#ifdef MOTION_MANAGER
motionManagerLock = [[NSLock alloc] init];
if(@available(macOS 14, *)) {
CMAuthorizationStatus status = [CMHeadphoneMotionManager authorizationStatus];
if(status == CMAuthorizationStatusDenied) {
ALog(@"Headphone motion not authorized");
return;
} else if(status == CMAuthorizationStatusAuthorized) {
ALog(@"Headphone motion authorized");
} else if(status == CMAuthorizationStatusRestricted) {
ALog(@"Headphone motion restricted");
} else if(status == CMAuthorizationStatusNotDetermined) {
ALog(@"Headphone motion status not determined; will prompt for access");
}
motionManager = [[CMHeadphoneMotionManager alloc] init];
}
#endif
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super initWithController:c previous:p latency:latency];
if(self) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableHrtf = [defaults boolForKey:@"enableHrtf"];
enableHeadTracking = [defaults boolForKey:@"enableHeadTracking"];
rotationMatrix = matrix_identity_float4x4;
[self addObservers];
}
return self;
}
- (void)dealloc {
DLog(@"HRTF dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[self removeObservers];
[super cleanUp];
if(extrapolate_buffer) {
free(extrapolate_buffer);
extrapolate_buffer = NULL;
extrapolate_buffer_size = 0;
}
}
- (void)addObservers {
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.enableHrtf" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPHRTFNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.enableHeadTracking" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPHRTFNodeContext];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(resetReferencePosition:) name:CogPlaybackDidResetHeadTracking object:nil];
observersapplied = YES;
}
- (void)removeObservers {
if(observersapplied) {
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.enableHrtf" context:kDSPHRTFNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.enableHeadTracking" context:kDSPHRTFNodeContext];
[[NSNotificationCenter defaultCenter] removeObserver:self name:CogPlaybackDidResetHeadTracking object:nil];
observersapplied = NO;
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if(context != kDSPHRTFNodeContext) {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
if([keyPath isEqualToString:@"values.enableHrtf"] ||
[keyPath isEqualToString:@"values.enableHeadTracking"]) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableHrtf = [defaults boolForKey:@"enableHrtf"];
enableHeadTracking = [defaults boolForKey:@"enableHeadTracking"];
resetFilter = YES;
}
}
- (BOOL)fullInit {
if(enableHrtf) {
NSURL *presetUrl = [[NSBundle mainBundle] URLForResource:@"SADIE_D02-96000" withExtension:@"mhr"];
rotationMatrixUpdated = NO;
simd_float4x4 matrix;
if(!referenceMatrixSet || !enableHeadTracking) {
referenceMatrixSet = NO;
matrix = matrix_identity_float4x4;
self->referenceMatrix = matrix;
if(enableHeadTracking) {
lastEnableHeadTracking = YES;
registerMotionListener(self);
} else if(lastEnableHeadTracking) {
lastEnableHeadTracking = NO;
unregisterMotionListener();
}
} else {
simd_float4x4 mirrorTransform = {
simd_make_float4(-1.0, 0.0, 0.0, 0.0),
simd_make_float4(0.0, 1.0, 0.0, 0.0),
simd_make_float4(0.0, 0.0, 1.0, 0.0),
simd_make_float4(0.0, 0.0, 0.0, 1.0)
};
matrix = simd_mul(mirrorTransform, rotationMatrix);
matrix = simd_mul(matrix, referenceMatrix);
}
hrtf = [[HeadphoneFilter alloc] initWithImpulseFile:presetUrl forSampleRate:inputFormat.mSampleRate withInputChannels:inputFormat.mChannelsPerFrame withConfig:inputChannelConfig withMatrix:matrix];
if(!hrtf) {
return NO;
}
outputFormat = inputFormat;
outputFormat.mChannelsPerFrame = 2;
outputFormat.mBytesPerFrame = sizeof(float) * outputFormat.mChannelsPerFrame;
outputFormat.mBytesPerPacket = outputFormat.mBytesPerFrame * outputFormat.mFramesPerPacket;
outputChannelConfig = AudioChannelSideLeft | AudioChannelSideRight;
resetFilter = NO;
needPrefill = [hrtf needPrefill];
} else {
if(lastEnableHeadTracking) {
lastEnableHeadTracking = NO;
unregisterMotionListener();
}
referenceMatrixSet = NO;
hrtf = nil;
}
return YES;
}
- (void)fullShutdown {
hrtf = nil;
if(lastEnableHeadTracking) {
lastEnableHeadTracking = NO;
unregisterMotionListener();
}
resetFilter = NO;
}
- (BOOL)setup {
if(stopping)
return NO;
[self fullShutdown];
return [self fullInit];
}
- (void)cleanUp {
stopping = YES;
while(processEntered) {
usleep(500);
}
[self fullShutdown];
}
- (void)resetBuffer {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
[self fullShutdown];
paused = NO;
}
- (BOOL)paused {
return paused;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
}
usleep(500);
} else {
[self writeChunk:chunk];
chunk = nil;
}
if(resetFilter || (!enableHrtf && hrtf)) {
[self fullShutdown];
}
}
}
}
- (AudioChunk *)convert {
if(stopping)
return nil;
processEntered = YES;
if(stopping || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}
if(![self peekFormat:&inputFormat channelConfig:&inputChannelConfig]) {
processEntered = NO;
return nil;
}
if(!inputFormat.mSampleRate ||
!inputFormat.mBitsPerChannel ||
!inputFormat.mChannelsPerFrame ||
!inputFormat.mBytesPerFrame ||
!inputFormat.mFramesPerPacket ||
!inputFormat.mBytesPerPacket) {
processEntered = NO;
return nil;
}
if((enableHrtf && !hrtf) ||
memcmp(&inputFormat, &lastInputFormat, sizeof(inputFormat)) != 0 ||
inputChannelConfig != lastInputChannelConfig) {
lastInputFormat = inputFormat;
lastInputChannelConfig = inputChannelConfig;
[self fullShutdown];
if(enableHrtf && ![self setup]) {
processEntered = NO;
return nil;
}
}
if(!hrtf) {
processEntered = NO;
return [self readChunk:4096];
}
AudioChunk *chunk = [self readChunkAsFloat32:4096];
if(!chunk || ![chunk frameCount]) {
processEntered = NO;
return nil;
}
if(rotationMatrixUpdated) {
rotationMatrixUpdated = NO;
simd_float4x4 mirrorTransform = {
simd_make_float4(-1.0, 0.0, 0.0, 0.0),
simd_make_float4(0.0, 1.0, 0.0, 0.0),
simd_make_float4(0.0, 0.0, 1.0, 0.0),
simd_make_float4(0.0, 0.0, 0.0, 1.0)
};
simd_float4x4 matrix = simd_mul(mirrorTransform, rotationMatrix);
matrix = simd_mul(matrix, referenceMatrix);
[hrtf reloadWithMatrix:matrix];
}
double streamTimestamp = [chunk streamTimestamp];
size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount];
if(needPrefill) {
size_t maxToUse = 4096 - needPrefill;
if(maxToUse > frameCount) {
maxToUse = frameCount;
}
size_t channels = inputFormat.mChannelsPerFrame;
memcpy(&prefillBuffer[needPrefill * channels], [sampleData bytes], maxToUse * sizeof(float) * channels);
lpc_extrapolate_bkwd(&prefillBuffer[needPrefill * channels], maxToUse, maxToUse, (int)channels, LPC_ORDER, needPrefill, &extrapolate_buffer, &extrapolate_buffer_size);
[hrtf process:&prefillBuffer[0] sampleCount:(int)needPrefill toBuffer:&outBuffer[0]];
needPrefill = 0;
}
[hrtf process:(const float *)[sampleData bytes] sampleCount:(int)frameCount toBuffer:&outBuffer[0]];
AudioChunk *outputChunk = [[AudioChunk alloc] init];
[outputChunk setFormat:outputFormat];
if(outputChannelConfig) {
[outputChunk setChannelConfig:outputChannelConfig];
}
if([chunk isHDCD]) [outputChunk setHDCD];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:[chunk streamTimeRatio]];
[outputChunk assignSamples:&outBuffer[0] frameCount:frameCount];
processEntered = NO;
return outputChunk;
}
- (void)reportMotion:(simd_float4x4)matrix {
rotationMatrix = matrix;
if(!referenceMatrixSet) {
referenceMatrix = simd_inverse(matrix);
referenceMatrixSet = YES;
}
rotationMatrixUpdated = YES;
}
- (void)resetReferencePosition:(NSNotification *)notification {
referenceMatrixSet = NO;
}
@end

View file

@ -1,32 +0,0 @@
//
// DSPRubberbandNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/10/25.
//
#ifndef DSPRubberbandNode_h
#define DSPRubberbandNode_h
#import <CogAudio/DSPNode.h>
@interface DSPRubberbandNode : DSPNode {
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (BOOL)setup;
- (void)cleanUp;
- (void)resetBuffer;
- (BOOL)paused;
- (void)process;
- (AudioChunk * _Nullable)convert;
- (double)secondsBuffered;
@end
#endif /* DSPRubberbandNode_h */

View file

@ -1,560 +0,0 @@
//
// DSPRubberbandNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/10/25.
//
#import <Foundation/Foundation.h>
#import <Accelerate/Accelerate.h>
#import "DSPRubberbandNode.h"
#import "Logging.h"
#import <rubberband/rubberband-c.h>
static void * kDSPRubberbandNodeContext = &kDSPRubberbandNodeContext;
@implementation DSPRubberbandNode {
BOOL enableRubberband;
RubberBandState ts;
RubberBandOptions tslastoptions, tsnewoptions;
size_t tschannels;
ssize_t blockSize, toDrop, samplesBuffered;
BOOL tsapplynewoptions;
BOOL tsrestartengine;
double tempo, pitch;
double lastTempo, lastPitch;
double countIn;
uint64_t countOut;
double streamTimestamp;
double streamTimeRatio;
BOOL isHDCD;
BOOL stopping, paused;
BOOL processEntered;
BOOL flushed;
BOOL observersapplied;
AudioStreamBasicDescription lastInputFormat;
AudioStreamBasicDescription inputFormat;
uint32_t lastInputChannelConfig, inputChannelConfig;
float *rsPtrs[32];
float rsInBuffer[4096 * 32];
float rsOutBuffer[65536 * 32];
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super initWithController:c previous:p latency:latency];
if(self) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableRubberband = ![[defaults stringForKey:@"rubberbandEngine"] isEqualToString:@"disabled"];
pitch = [defaults doubleForKey:@"pitch"];
tempo = [defaults doubleForKey:@"tempo"];
lastPitch = pitch;
lastTempo = tempo;
[self addObservers];
}
return self;
}
- (void)dealloc {
DLog(@"Rubber Band dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[self removeObservers];
[super cleanUp];
}
- (void)addObservers {
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.pitch" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.tempo" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandEngine" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandTransients" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandDetector" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandPhase" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandWindow" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandSmoothing" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandFormant" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandPitch" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] addObserver:self forKeyPath:@"values.rubberbandChannels" options:(NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew) context:kDSPRubberbandNodeContext];
observersapplied = YES;
}
- (void)removeObservers {
if(observersapplied) {
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.pitch" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.tempo" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandEngine" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandTransients" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandDetector" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandPhase" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandWindow" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandSmoothing" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandFormant" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandPitch" context:kDSPRubberbandNodeContext];
[[NSUserDefaultsController sharedUserDefaultsController] removeObserver:self forKeyPath:@"values.rubberbandChannels" context:kDSPRubberbandNodeContext];
observersapplied = NO;
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if(context != kDSPRubberbandNodeContext) {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
return;
}
if([keyPath isEqualToString:@"values.pitch"] ||
[keyPath isEqualToString:@"values.tempo"]) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
pitch = [defaults doubleForKey:@"pitch"];
tempo = [defaults doubleForKey:@"tempo"];
tsapplynewoptions = YES;
} else if([[keyPath substringToIndex:17] isEqualToString:@"values.rubberband"]) {
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
enableRubberband = ![[defaults stringForKey:@"rubberbandEngine"] isEqualToString:@"disabled"];
if(enableRubberband && ts) {
RubberBandOptions options = [self getRubberbandOptions];
RubberBandOptions changed = options ^ tslastoptions;
if(changed) {
BOOL engineR3 = !!(options & RubberBandOptionEngineFiner);
// Options which require a restart of the engine
const RubberBandOptions mustRestart = RubberBandOptionEngineFaster | RubberBandOptionEngineFiner | RubberBandOptionWindowStandard | RubberBandOptionWindowShort | RubberBandOptionWindowLong | RubberBandOptionSmoothingOff | RubberBandOptionSmoothingOn | (engineR3 ? RubberBandOptionPitchHighSpeed | RubberBandOptionPitchHighQuality | RubberBandOptionPitchHighConsistency : 0) | RubberBandOptionChannelsApart | RubberBandOptionChannelsTogether;
if(changed & mustRestart) {
tsrestartengine = YES;
} else {
tsnewoptions = options;
tsapplynewoptions = YES;
}
}
}
}
}
- (RubberBandOptions)getRubberbandOptions {
RubberBandOptions options = RubberBandOptionProcessRealTime;
NSUserDefaults *defaults = [[NSUserDefaultsController sharedUserDefaultsController] defaults];
NSString *value = [defaults stringForKey:@"rubberbandEngine"];
BOOL engineR3 = NO;
if([value isEqualToString:@"faster"]) {
options |= RubberBandOptionEngineFaster;
} else if([value isEqualToString:@"finer"]) {
options |= RubberBandOptionEngineFiner;
engineR3 = YES;
}
if(!engineR3) {
value = [defaults stringForKey:@"rubberbandTransients"];
if([value isEqualToString:@"crisp"]) {
options |= RubberBandOptionTransientsCrisp;
} else if([value isEqualToString:@"mixed"]) {
options |= RubberBandOptionTransientsMixed;
} else if([value isEqualToString:@"smooth"]) {
options |= RubberBandOptionTransientsSmooth;
}
value = [defaults stringForKey:@"rubberbandDetector"];
if([value isEqualToString:@"compound"]) {
options |= RubberBandOptionDetectorCompound;
} else if([value isEqualToString:@"percussive"]) {
options |= RubberBandOptionDetectorPercussive;
} else if([value isEqualToString:@"soft"]) {
options |= RubberBandOptionDetectorSoft;
}
value = [defaults stringForKey:@"rubberbandPhase"];
if([value isEqualToString:@"laminar"]) {
options |= RubberBandOptionPhaseLaminar;
} else if([value isEqualToString:@"independent"]) {
options |= RubberBandOptionPhaseIndependent;
}
}
value = [defaults stringForKey:@"rubberbandWindow"];
if([value isEqualToString:@"standard"]) {
options |= RubberBandOptionWindowStandard;
} else if([value isEqualToString:@"short"]) {
options |= RubberBandOptionWindowShort;
} else if([value isEqualToString:@"long"]) {
if(engineR3) {
options |= RubberBandOptionWindowStandard;
} else {
options |= RubberBandOptionWindowLong;
}
}
if(!engineR3) {
value = [defaults stringForKey:@"rubberbandSmoothing"];
if([value isEqualToString:@"off"]) {
options |= RubberBandOptionSmoothingOff;
} else if([value isEqualToString:@"on"]) {
options |= RubberBandOptionSmoothingOn;
}
}
value = [defaults stringForKey:@"rubberbandFormant"];
if([value isEqualToString:@"shifted"]) {
options |= RubberBandOptionFormantShifted;
} else if([value isEqualToString:@"preserved"]) {
options |= RubberBandOptionFormantPreserved;
}
value = [defaults stringForKey:@"rubberbandPitch"];
if([value isEqualToString:@"highspeed"]) {
options |= RubberBandOptionPitchHighSpeed;
} else if([value isEqualToString:@"highquality"]) {
options |= RubberBandOptionPitchHighQuality;
} else if([value isEqualToString:@"highconsistency"]) {
options |= RubberBandOptionPitchHighConsistency;
}
value = [defaults stringForKey:@"rubberbandChannels"];
if([value isEqualToString:@"apart"]) {
options |= RubberBandOptionChannelsApart;
} else if([value isEqualToString:@"together"]) {
options |= RubberBandOptionChannelsTogether;
}
return options;
}
- (BOOL)fullInit {
RubberBandOptions options = [self getRubberbandOptions];
tslastoptions = options;
tschannels = inputFormat.mChannelsPerFrame;
ts = rubberband_new(inputFormat.mSampleRate, (int)tschannels, options, 1.0 / tempo, pitch);
if(!ts)
return NO;
blockSize = rubberband_get_process_size_limit(ts);
toDrop = rubberband_get_start_delay(ts);
samplesBuffered = 0;
if(blockSize > 4096)
blockSize = 4096;
rubberband_set_max_process_size(ts, (unsigned int)blockSize);
for(size_t i = 0; i < 32; ++i) {
rsPtrs[i] = &rsInBuffer[4096 * i];
}
ssize_t toPad = rubberband_get_preferred_start_pad(ts);
if(toPad > 0) {
for(size_t i = 0; i < tschannels; ++i) {
memset(rsPtrs[i], 0, 4096 * sizeof(float));
}
while(toPad > 0) {
ssize_t p = toPad;
if(p > blockSize) p = blockSize;
rubberband_process(ts, (const float * const *)rsPtrs, (int)p, false);
toPad -= p;
}
}
tsapplynewoptions = NO;
tsrestartengine = NO;
flushed = NO;
countIn = 0.0;
countOut = 0;
return YES;
}
- (void)partialInit {
if(stopping || paused || !ts) return;
processEntered = YES;
RubberBandOptions changed = tslastoptions ^ tsnewoptions;
if(changed) {
tslastoptions = tsnewoptions;
BOOL engineR3 = !!(tsnewoptions & RubberBandOptionEngineFiner);
const RubberBandOptions transientsmask = RubberBandOptionTransientsCrisp | RubberBandOptionTransientsMixed | RubberBandOptionTransientsSmooth;
const RubberBandOptions detectormask = RubberBandOptionDetectorCompound | RubberBandOptionDetectorPercussive | RubberBandOptionDetectorSoft;
const RubberBandOptions phasemask = RubberBandOptionPhaseLaminar | RubberBandOptionPhaseIndependent;
const RubberBandOptions formantmask = RubberBandOptionFormantShifted | RubberBandOptionFormantPreserved;
const RubberBandOptions pitchmask = RubberBandOptionPitchHighSpeed | RubberBandOptionPitchHighQuality | RubberBandOptionPitchHighConsistency;
if(changed & transientsmask)
rubberband_set_transients_option(ts, tsnewoptions & transientsmask);
if(!engineR3) {
if(changed & detectormask)
rubberband_set_detector_option(ts, tsnewoptions & detectormask);
if(changed & phasemask)
rubberband_set_phase_option(ts, tsnewoptions & phasemask);
}
if(changed & formantmask)
rubberband_set_formant_option(ts, tsnewoptions & formantmask);
if(!engineR3 && (changed & pitchmask))
rubberband_set_pitch_option(ts, tsnewoptions & pitchmask);
}
if(fabs(pitch - lastPitch) > 1e-5 ||
fabs(tempo - lastTempo) > 1e-5) {
lastPitch = pitch;
lastTempo = tempo;
rubberband_set_pitch_scale(ts, pitch);
rubberband_set_time_ratio(ts, 1.0 / tempo);
}
tsapplynewoptions = NO;
processEntered = NO;
}
- (void)fullShutdown {
if(ts) {
rubberband_delete(ts);
ts = NULL;
}
}
- (BOOL)setup {
if(stopping)
return NO;
[self fullShutdown];
return [self fullInit];
}
- (void)cleanUp {
stopping = YES;
while(processEntered) {
usleep(500);
}
[self fullShutdown];
}
- (void)resetBuffer {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
[self fullShutdown];
paused = NO;
}
- (BOOL)paused {
return paused;
}
- (void)setPreviousNode:(id)p {
if(previousNode != p) {
paused = YES;
while(processEntered);
previousNode = p;
paused = NO;
}
}
- (void)setEndOfStream:(BOOL)e {
if(endOfStream && !e) {
[self fullShutdown];
}
[super setEndOfStream:e];
flushed = e;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self convert];
if(!chunk || ![chunk frameCount]) {
if(!ts) {
flushed = previousNode && [[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES;
}
if(flushed) {
usleep(500);
endOfStream = YES;
continue;
}
if(paused) {
continue;
}
usleep(500);
} else {
[self writeChunk:chunk];
chunk = nil;
}
if(!enableRubberband && ts) {
[self fullShutdown];
} else if(tsrestartengine) {
[self fullShutdown];
} else if(tsapplynewoptions) {
[self partialInit];
}
}
}
}
- (AudioChunk *)convert {
if(stopping)
return nil;
processEntered = YES;
if(stopping || flushed || !previousNode || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) || [self shouldContinue] == NO) {
processEntered = NO;
return nil;
}
if(![self peekFormat:&inputFormat channelConfig:&inputChannelConfig]) {
processEntered = NO;
return nil;
}
if(!inputFormat.mSampleRate ||
!inputFormat.mBitsPerChannel ||
!inputFormat.mChannelsPerFrame ||
!inputFormat.mBytesPerFrame ||
!inputFormat.mFramesPerPacket ||
!inputFormat.mBytesPerPacket) {
processEntered = NO;
return nil;
}
if((enableRubberband && !ts) ||
memcmp(&inputFormat, &lastInputFormat, sizeof(inputFormat)) != 0 ||
inputChannelConfig != lastInputChannelConfig) {
lastInputFormat = inputFormat;
lastInputChannelConfig = inputChannelConfig;
[self fullShutdown];
if(enableRubberband && ![self setup]) {
processEntered = NO;
return nil;
}
}
if(!ts) {
processEntered = NO;
return [self readChunk:4096];
}
ssize_t samplesToProcess = rubberband_get_samples_required(ts);
if(samplesToProcess > blockSize)
samplesToProcess = blockSize;
int channels = (int)(inputFormat.mChannelsPerFrame);
if(samplesToProcess > 0) {
AudioChunk *chunk = [self readAndMergeChunksAsFloat32:samplesToProcess];
if(!chunk || ![chunk frameCount]) {
processEntered = NO;
return nil;
}
streamTimestamp = [chunk streamTimestamp];
streamTimeRatio = [chunk streamTimeRatio];
isHDCD = [chunk isHDCD];
size_t frameCount = [chunk frameCount];
countIn += ((double)frameCount) / tempo;
NSData *sampleData = [chunk removeSamples:frameCount];
for (size_t i = 0; i < channels; ++i) {
cblas_scopy((int)frameCount, ((const float *)[sampleData bytes]) + i, channels, rsPtrs[i], 1);
}
flushed = [[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES;
int len = (int)frameCount;
rubberband_process(ts, (const float * const *)rsPtrs, len, flushed);
}
ssize_t samplesAvailable;
while(!stopping && (samplesAvailable = rubberband_available(ts)) > 0) {
if(toDrop > 0) {
ssize_t blockDrop = toDrop;
if(blockDrop > samplesAvailable) blockDrop = samplesAvailable;
if(blockDrop > blockSize) blockDrop = blockSize;
rubberband_retrieve(ts, (float * const *)rsPtrs, (int)blockDrop);
toDrop -= blockDrop;
continue;
}
ssize_t maxAvailable = 65536 - samplesBuffered;
ssize_t samplesOut = samplesAvailable;
if(samplesOut > maxAvailable) {
samplesOut = maxAvailable;
if(samplesOut <= 0) {
break;
}
}
if(samplesOut > blockSize) samplesOut = blockSize;
rubberband_retrieve(ts, (float * const *)rsPtrs, (int)samplesOut);
for(size_t i = 0; i < channels; ++i) {
cblas_scopy((int)samplesOut, rsPtrs[i], 1, &rsOutBuffer[samplesBuffered * channels + i], channels);
}
samplesBuffered += samplesOut;
}
if(flushed) {
if(samplesBuffered > 0) {
ssize_t ideal = (ssize_t)floor(countIn + 0.5);
if(countOut + samplesBuffered > ideal) {
// Rubber Band does not account for flushing duration in real time mode
samplesBuffered = ideal - countOut;
}
}
}
AudioChunk *outputChunk = nil;
if(samplesBuffered > 0) {
outputChunk = [[AudioChunk alloc] init];
[outputChunk setFormat:inputFormat];
if(inputChannelConfig) {
[outputChunk setChannelConfig:inputChannelConfig];
}
if(isHDCD) [outputChunk setHDCD];
[outputChunk setStreamTimestamp:streamTimestamp];
[outputChunk setStreamTimeRatio:streamTimeRatio * tempo];
[outputChunk assignSamples:&rsOutBuffer[0] frameCount:samplesBuffered];
countOut += samplesBuffered;
samplesBuffered = 0;
double chunkDuration = [outputChunk duration];
streamTimestamp += chunkDuration * [outputChunk streamTimeRatio];
}
processEntered = NO;
return outputChunk;
}
- (double)secondsBuffered {
double rbBuffered = 0.0;
if(ts) {
// We don't use Rubber Band's latency function, because at least in Cog's case,
// by the time we call this function, and also, because it doesn't account for
// how much audio will be lopped off at the end of the process.
//
// Tested once, this tends to be close to zero when actually called.
rbBuffered = countIn - (double)(countOut);
if(rbBuffered < 0) {
rbBuffered = 0.0;
} else {
rbBuffered /= inputFormat.mSampleRate;
}
}
return [buffer listDuration] + rbBuffered;
}
@end

View file

@ -1,24 +0,0 @@
//
// Downmix.h
// Cog
//
// Created by Christopher Snowhill on 2/05/22.
// Copyright 2022 __LoSnoCo__. All rights reserved.
//
#import <CoreAudio/CoreAudio.h>
#import <Foundation/Foundation.h>
@interface DownmixProcessor : NSObject {
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription outputFormat;
uint32_t inConfig;
uint32_t outConfig;
}
- (id)initWithInputFormat:(AudioStreamBasicDescription)inf inputConfig:(uint32_t)iConfig andOutputFormat:(AudioStreamBasicDescription)outf outputConfig:(uint32_t)oConfig;
- (void)process:(const void *)inBuffer frameCount:(size_t)frames output:(void *)outBuffer;
@end

View file

@ -1,301 +0,0 @@
//
// Downmix.m
// Cog
//
// Created by Christopher Snowhill on 2/05/22.
// Copyright 2022 __LoSnoCo__. All rights reserved.
//
#import "Downmix.h"
#import "Logging.h"
#import "AudioChunk.h"
#import <Accelerate/Accelerate.h>
static void downmix_to_stereo(const float *inBuffer, int channels, uint32_t config, float *outBuffer, size_t count) {
float FrontRatios[2] = { 0.0F, 0.0F };
float FrontCenterRatio = 0.0F;
float LFERatio = 0.0F;
float BackRatios[2] = { 0.0F, 0.0F };
float BackCenterRatio = 0.0F;
float SideRatios[2] = { 0.0F, 0.0F };
if(config & (AudioChannelFrontLeft | AudioChannelFrontRight)) {
FrontRatios[0] = 1.0F;
}
if(config & AudioChannelFrontCenter) {
FrontRatios[0] = 0.5858F;
FrontCenterRatio = 0.4142F;
}
if(config & (AudioChannelBackLeft | AudioChannelBackRight)) {
if(config & AudioChannelFrontCenter) {
FrontRatios[0] = 0.651F;
FrontCenterRatio = 0.46F;
BackRatios[0] = 0.5636F;
BackRatios[1] = 0.3254F;
} else {
FrontRatios[0] = 0.4226F;
BackRatios[0] = 0.366F;
BackRatios[1] = 0.2114F;
}
}
if(config & AudioChannelLFE) {
FrontRatios[0] *= 0.8F;
FrontCenterRatio *= 0.8F;
LFERatio = FrontCenterRatio;
BackRatios[0] *= 0.8F;
BackRatios[1] *= 0.8F;
}
if(config & AudioChannelBackCenter) {
FrontRatios[0] *= 0.86F;
FrontCenterRatio *= 0.86F;
LFERatio *= 0.86F;
BackRatios[0] *= 0.86F;
BackRatios[1] *= 0.86F;
BackCenterRatio = FrontCenterRatio * 0.86F;
}
if(config & (AudioChannelSideLeft | AudioChannelSideRight)) {
float ratio = 0.73F;
if(config & AudioChannelBackCenter) ratio = 0.85F;
FrontRatios[0] *= ratio;
FrontCenterRatio *= ratio;
LFERatio *= ratio;
BackRatios[0] *= ratio;
BackRatios[1] *= ratio;
BackCenterRatio *= ratio;
SideRatios[0] = 0.463882352941176 * ratio;
SideRatios[1] = 0.267882352941176 * ratio;
}
int32_t channelIndexes[channels];
for(int i = 0; i < channels; ++i) {
channelIndexes[i] = [AudioChunk findChannelIndex:[AudioChunk extractChannelFlag:i fromConfig:config]];
}
vDSP_vclr(outBuffer, 1, count * 2);
float tempBuffer[count * 2];
for(uint32_t i = 0; i < channels; ++i) {
float leftRatio = 0.0F;
float rightRatio = 0.0F;
switch(channelIndexes[i]) {
case 0:
leftRatio = FrontRatios[0];
rightRatio = FrontRatios[1];
break;
case 1:
leftRatio = FrontRatios[1];
rightRatio = FrontRatios[0];
break;
case 2:
leftRatio = FrontCenterRatio;
rightRatio = FrontCenterRatio;
break;
case 3:
leftRatio = LFERatio;
rightRatio = LFERatio;
break;
case 4:
leftRatio = BackRatios[0];
rightRatio = BackRatios[1];
break;
case 5:
leftRatio = BackRatios[1];
rightRatio = BackRatios[0];
break;
case 6:
case 7:
break;
case 8:
leftRatio = BackCenterRatio;
rightRatio = BackCenterRatio;
break;
case 9:
leftRatio = SideRatios[0];
rightRatio = SideRatios[1];
break;
case 10:
leftRatio = SideRatios[1];
rightRatio = SideRatios[0];
break;
case 11:
case 12:
case 13:
case 14:
case 15:
case 16:
case 17:
default:
break;
}
vDSP_vsmul(inBuffer + i, channels, &leftRatio, tempBuffer, 1, count);
vDSP_vsmul(inBuffer + i, channels, &rightRatio, tempBuffer + count, 1, count);
vDSP_vadd(outBuffer, 2, tempBuffer, 1, outBuffer, 2, count);
vDSP_vadd(outBuffer + 1, 2, tempBuffer + count, 1, outBuffer + 1, 2, count);
}
}
static void downmix_to_mono(const float *inBuffer, int channels, uint32_t config, float *outBuffer, size_t count) {
float tempBuffer[count * 2];
if(channels > 2 || config != AudioConfigStereo) {
downmix_to_stereo(inBuffer, channels, config, tempBuffer, count);
inBuffer = tempBuffer;
channels = 2;
config = AudioConfigStereo;
}
cblas_scopy((int)count, inBuffer, 2, outBuffer, 1);
vDSP_vadd(outBuffer, 1, inBuffer + 1, 2, outBuffer, 1, count);
}
static void upmix(const float *inBuffer, int inchannels, uint32_t inconfig, float *outBuffer, int outchannels, uint32_t outconfig, size_t count) {
if(inconfig == AudioConfigMono && outconfig == AudioConfigStereo) {
cblas_scopy((int)count, inBuffer, 1, outBuffer, 2);
cblas_scopy((int)count, inBuffer, 1, outBuffer + 1, 2);
} else if(inconfig == AudioConfigMono && outconfig == AudioConfig4Point0) {
cblas_scopy((int)count, inBuffer, 1, outBuffer, 4);
cblas_scopy((int)count, inBuffer, 1, outBuffer + 1, 4);
vDSP_vclr(outBuffer + 2, 4, count);
vDSP_vclr(outBuffer + 3, 4, count);
} else if(inconfig == AudioConfigMono && (outconfig & AudioChannelFrontCenter)) {
uint32_t cIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelFrontCenter];
cblas_scopy((int)count, inBuffer, 1, outBuffer + cIndex, outchannels);
for(size_t i = 0; i < cIndex; ++i) {
vDSP_vclr(outBuffer + i, outchannels, (int)count);
}
for(size_t i = cIndex + 1; i < outchannels; ++i) {
vDSP_vclr(outBuffer + i, outchannels, (int)count);
}
} else if(inconfig == AudioConfig4Point0 && outchannels >= 5) {
uint32_t flIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelFrontLeft];
uint32_t frIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelFrontRight];
uint32_t blIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelBackLeft];
uint32_t brIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelBackRight];
vDSP_vclr(outBuffer, 1, count * outchannels);
if(flIndex != ~0)
cblas_scopy((int)count, inBuffer + 0, 4, outBuffer + flIndex, outchannels);
if(frIndex != ~0)
cblas_scopy((int)count, inBuffer + 1, 4, outBuffer + frIndex, outchannels);
if(blIndex != ~0)
cblas_scopy((int)count, inBuffer + 2, 4, outBuffer + blIndex, outchannels);
if(brIndex != ~0)
cblas_scopy((int)count, inBuffer + 3, 4, outBuffer + brIndex, outchannels);
} else if(inconfig == AudioConfig5Point0 && outchannels >= 6) {
uint32_t flIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelFrontLeft];
uint32_t frIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelFrontRight];
uint32_t cIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelFrontCenter];
uint32_t blIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelBackLeft];
uint32_t brIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelBackRight];
vDSP_vclr(outBuffer, 1, count * outchannels);
if(flIndex != ~0)
cblas_scopy((int)count, inBuffer + 0, 5, outBuffer + flIndex, outchannels);
if(frIndex != ~0)
cblas_scopy((int)count, inBuffer + 1, 5, outBuffer + frIndex, outchannels);
if(cIndex != ~0)
cblas_scopy((int)count, inBuffer + 2, 5, outBuffer + cIndex, outchannels);
if(blIndex != ~0)
cblas_scopy((int)count, inBuffer + 3, 5, outBuffer + blIndex, outchannels);
if(brIndex != ~0)
cblas_scopy((int)count, inBuffer + 4, 5, outBuffer + brIndex, outchannels);
} else if(inconfig == AudioConfig6Point1 && outchannels >= 8) {
uint32_t flIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelFrontLeft];
uint32_t frIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelFrontRight];
uint32_t cIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelFrontCenter];
uint32_t lfeIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelLFE];
uint32_t blIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelBackLeft];
uint32_t brIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelBackRight];
uint32_t bcIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelBackCenter];
uint32_t slIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelSideLeft];
uint32_t srIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:AudioChannelSideRight];
vDSP_vclr(outBuffer, 1, count * outchannels);
if(flIndex != ~0)
cblas_scopy((int)count, inBuffer + 0, 7, outBuffer + flIndex, outchannels);
if(frIndex != ~0)
cblas_scopy((int)count, inBuffer + 1, 7, outBuffer + frIndex, outchannels);
if(cIndex != ~0)
cblas_scopy((int)count, inBuffer + 2, 7, outBuffer + cIndex, outchannels);
if(lfeIndex != ~0)
cblas_scopy((int)count, inBuffer + 3, 7, outBuffer + lfeIndex, outchannels);
if(slIndex != ~0)
cblas_scopy((int)count, inBuffer + 4, 7, outBuffer + slIndex, outchannels);
if(srIndex != ~0)
cblas_scopy((int)count, inBuffer + 5, 7, outBuffer + srIndex, outchannels);
if(bcIndex != ~0)
cblas_scopy((int)count, inBuffer + 6, 7, outBuffer + bcIndex, outchannels);
else {
if(blIndex != ~0)
cblas_scopy((int)count, inBuffer + 6, 7, outBuffer + blIndex, outchannels);
if(brIndex != ~0)
cblas_scopy((int)count, inBuffer + 6, 7, outBuffer + brIndex, outchannels);
}
} else {
vDSP_vclr(outBuffer, 1, count * outchannels);
for(int i = 0; i < inchannels; ++i) {
uint32_t channelFlag = [AudioChunk extractChannelFlag:i fromConfig:inconfig];
uint32_t outIndex = [AudioChunk channelIndexFromConfig:outconfig forFlag:channelFlag];
if(outIndex != ~0)
cblas_scopy((int)count, inBuffer + i, inchannels, outBuffer + outIndex, outchannels);
}
}
}
@implementation DownmixProcessor
static void *kDownmixProcessorContext = &kDownmixProcessorContext;
- (id)initWithInputFormat:(AudioStreamBasicDescription)inf inputConfig:(uint32_t)iConfig andOutputFormat:(AudioStreamBasicDescription)outf outputConfig:(uint32_t)oConfig {
self = [super init];
if(self) {
if(inf.mFormatID != kAudioFormatLinearPCM ||
(inf.mFormatFlags & kAudioFormatFlagsNativeFloatPacked) != kAudioFormatFlagsNativeFloatPacked ||
inf.mBitsPerChannel != 32 ||
inf.mBytesPerFrame != (4 * inf.mChannelsPerFrame) ||
inf.mBytesPerPacket != inf.mFramesPerPacket * inf.mBytesPerFrame)
return nil;
if(outf.mFormatID != kAudioFormatLinearPCM ||
(outf.mFormatFlags & kAudioFormatFlagsNativeFloatPacked) != kAudioFormatFlagsNativeFloatPacked ||
outf.mBitsPerChannel != 32 ||
outf.mBytesPerFrame != (4 * outf.mChannelsPerFrame) ||
outf.mBytesPerPacket != outf.mFramesPerPacket * outf.mBytesPerFrame)
return nil;
inputFormat = inf;
outputFormat = outf;
inConfig = iConfig;
outConfig = oConfig;
}
return self;
}
- (void)process:(const void *)inBuffer frameCount:(size_t)frames output:(void *)outBuffer {
if(inputFormat.mChannelsPerFrame == 2 && outConfig == AudioConfigStereo &&
inConfig == (AudioChannelSideLeft | AudioChannelSideRight)) {
// Workaround for HRTF output
memcpy(outBuffer, inBuffer, frames * outputFormat.mBytesPerPacket);
} else if(inputFormat.mChannelsPerFrame > 2 && outConfig == AudioConfigStereo) {
downmix_to_stereo((const float *)inBuffer, inputFormat.mChannelsPerFrame, inConfig, (float *)outBuffer, frames);
} else if(inputFormat.mChannelsPerFrame > 1 && outConfig == AudioConfigMono) {
downmix_to_mono((const float *)inBuffer, inputFormat.mChannelsPerFrame, inConfig, (float *)outBuffer, frames);
} else if(inputFormat.mChannelsPerFrame < outputFormat.mChannelsPerFrame) {
upmix((const float *)inBuffer, inputFormat.mChannelsPerFrame, inConfig, (float *)outBuffer, outputFormat.mChannelsPerFrame, outConfig, frames);
} else if(inConfig == outConfig) {
memcpy(outBuffer, inBuffer, frames * outputFormat.mBytesPerPacket);
}
}
@end

View file

@ -1,36 +0,0 @@
//
// FSurroundFilter.h
// CogAudio
//
// Created by Christopher Snowhill on 7/9/22.
//
#ifndef FSurroundFilter_h
#define FSurroundFilter_h
#import <Cocoa/Cocoa.h>
#import <stdint.h>
#define FSurroundChunkSize 4096
@interface FSurroundFilter : NSObject {
void *decoder;
void *params;
double srate;
uint32_t channelCount;
uint32_t channelConfig;
float tempBuffer[4096 * 2];
}
- (id)initWithSampleRate:(double)srate;
- (uint32_t)channelCount;
- (uint32_t)channelConfig;
- (double)srate;
- (void)process:(const float *)samplesIn output:(float *)samplesOut count:(uint32_t)count;
@end
#endif /* FSurround_h */

View file

@ -1,156 +0,0 @@
//
// FSurroundFilter.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 7/9/22.
//
#import "FSurroundFilter.h"
#import "freesurround_decoder.h"
#import "AudioChunk.h"
#import <Accelerate/Accelerate.h>
#import <map>
#import <vector>
struct freesurround_params {
// the user-configurable parameters
float center_image, shift, depth, circular_wrap, focus, front_sep, rear_sep, bass_lo, bass_hi;
bool use_lfe;
channel_setup channels_fs; // FreeSurround channel setup
std::vector<unsigned> chanmap; // FreeSurround -> WFX channel index translation (derived data for faster lookup)
// construct with defaults
freesurround_params()
: center_image(0.7), shift(0), depth(1), circular_wrap(90), focus(0), front_sep(1), rear_sep(1),
bass_lo(40), bass_hi(90), use_lfe(false) {
set_channels_fs(cs_5point1);
}
// compute the WFX version of the channel setup code
unsigned channel_count() {
return (unsigned)chanmap.size();
}
unsigned channels_wfx() {
unsigned res = 0;
for(unsigned i = 0; i < chanmap.size(); res |= chanmap[i++]) {};
return res;
}
// assign a channel setup & recompute derived data
void set_channels_fs(channel_setup setup) {
channels_fs = setup;
chanmap.clear();
// Note: Because WFX does not define a few of the more exotic channels (side front left&right, side rear left&right, back center left&right),
// the side front/back channel pairs (both left and right sides, resp.) are mapped here onto foobar's top front/back channel pairs and the
// back (off-)center left/right channels are mapped onto foobar's top front center and top back center, respectively.
// Therefore, these speakers should be connected to those outputs instead.
std::map<channel_id, uint32_t> fs2wfx;
fs2wfx[ci_front_left] = AudioChannelFrontLeft;
fs2wfx[ci_front_center_left] = AudioChannelFrontCenterLeft;
fs2wfx[ci_front_center] = AudioChannelFrontCenter;
fs2wfx[ci_front_center_right] = AudioChannelFrontCenterRight;
fs2wfx[ci_front_right] = AudioChannelFrontRight;
fs2wfx[ci_side_front_left] = AudioChannelFrontLeft;
fs2wfx[ci_side_front_right] = AudioChannelTopFrontRight;
fs2wfx[ci_side_center_left] = AudioChannelSideLeft;
fs2wfx[ci_side_center_right] = AudioChannelSideRight;
fs2wfx[ci_side_back_left] = AudioChannelTopBackLeft;
fs2wfx[ci_side_back_right] = AudioChannelTopBackRight;
fs2wfx[ci_back_left] = AudioChannelBackLeft;
fs2wfx[ci_back_center_left] = AudioChannelTopFrontCenter;
fs2wfx[ci_back_center] = AudioChannelBackCenter;
fs2wfx[ci_back_center_right] = AudioChannelTopBackCenter;
fs2wfx[ci_back_right] = AudioChannelBackRight;
fs2wfx[ci_lfe] = AudioChannelLFE;
for(unsigned i = 0; i < freesurround_decoder::num_channels(channels_fs); i++)
chanmap.push_back(fs2wfx[freesurround_decoder::channel_at(channels_fs, i)]);
}
};
@implementation FSurroundFilter
- (id)initWithSampleRate:(double)srate {
self = [super init];
if(!self) return nil;
self->srate = srate;
freesurround_params *_params = new freesurround_params;
params = (void *)_params;
freesurround_decoder *_decoder = new freesurround_decoder(cs_5point1, 4096);
decoder = (void *)_decoder;
_decoder->circular_wrap(_params->circular_wrap);
_decoder->shift(_params->shift);
_decoder->depth(_params->depth);
_decoder->focus(_params->focus);
_decoder->center_image(_params->center_image);
_decoder->front_separation(_params->front_sep);
_decoder->rear_separation(_params->rear_sep);
_decoder->bass_redirection(_params->use_lfe);
_decoder->low_cutoff(_params->bass_lo / (srate / 2.0));
_decoder->high_cutoff(_params->bass_hi / (srate / 2.0));
channelCount = _params->channel_count();
channelConfig = _params->channels_wfx();
return self;
}
- (void)dealloc {
if(decoder) {
freesurround_decoder *_decoder = (freesurround_decoder *)decoder;
delete _decoder;
}
if(params) {
freesurround_params *_params = (freesurround_params *)params;
delete _params;
}
}
- (uint32_t)channelCount {
return channelCount;
}
- (uint32_t)channelConfig {
return channelConfig;
}
- (double)srate {
return srate;
}
- (void)process:(const float *)samplesIn output:(float *)samplesOut count:(uint32_t)count {
freesurround_params *_params = (freesurround_params *)params;
freesurround_decoder *_decoder = (freesurround_decoder *)decoder;
uint32_t zeroCount = 0;
if(count > 4096) {
zeroCount = count - 4096;
count = 4096;
}
if(count < 4096) {
cblas_scopy(count * 2, samplesIn, 1, &tempBuffer[0], 1);
vDSP_vclr(&tempBuffer[count * 2], 1, (4096 - count) * 2);
samplesIn = &tempBuffer[0];
}
float *src = _decoder->decode(samplesIn);
for(unsigned c = 0, num = channelCount; c < num; c++) {
unsigned idx = [AudioChunk channelIndexFromConfig:channelConfig forFlag:_params->chanmap[c]];
cblas_scopy(count, src + c, num, samplesOut + idx, num);
if(zeroCount) {
vDSP_vclr(samplesOut + idx + count, num, zeroCount);
}
}
}
@end

View file

@ -1,46 +0,0 @@
//
// HeadphoneFilter.h
// CogAudio Framework
//
// Created by Christopher Snowhill on 1/24/22.
//
#ifndef HeadphoneFilter_h
#define HeadphoneFilter_h
#import <Accelerate/Accelerate.h>
#import <Cocoa/Cocoa.h>
#import <simd/simd.h>
@interface HeadphoneFilter : NSObject {
NSURL *URL;
int bufferSize;
int paddedBufferSize;
double sampleRate;
int channelCount;
uint32_t config;
float **mirroredImpulseResponses;
float **prevInputs;
float *paddedSignal[2];
}
+ (BOOL)validateImpulseFile:(NSURL *)url;
- (id)initWithImpulseFile:(NSURL *)url forSampleRate:(double)sampleRate withInputChannels:(int)channels withConfig:(uint32_t)config withMatrix:(simd_float4x4)matrix;
- (void)reloadWithMatrix:(simd_float4x4)matrix;
- (void)process:(const float *)inBuffer sampleCount:(int)count toBuffer:(float *)outBuffer;
- (void)reset;
- (size_t)needPrefill;
@end
#endif /* HeadphoneFilter_h */

View file

@ -1,386 +0,0 @@
//
// HeadphoneFilter.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 1/24/22.
//
#import "HeadphoneFilter.h"
#import "AudioChunk.h"
#import "AudioDecoder.h"
#import "AudioSource.h"
#import <stdlib.h>
#import <fstream>
#import <soxr.h>
#import "HrtfData.h"
#import "Logging.h"
typedef struct speakerPosition {
float elevation;
float azimuth;
float distance;
} speakerPosition;
#define DEGREES(x) ((x)*M_PI / 180.0)
static const speakerPosition speakerPositions[18] = {
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-30.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(+30.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-135.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(+135.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-15.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(+15.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-180.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(-90.0), .distance = 1.0 },
{ .elevation = DEGREES(0.0), .azimuth = DEGREES(+90.0), .distance = 1.0 },
{ .elevation = DEGREES(+90.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
{ .elevation = DEGREES(+45.0), .azimuth = DEGREES(-30.0), .distance = 1.0 },
{ .elevation = DEGREES(+45.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
{ .elevation = DEGREES(+45.0), .azimuth = DEGREES(+30.0), .distance = 1.0 },
{ .elevation = DEGREES(+45.0), .azimuth = DEGREES(-135.0), .distance = 1.0 },
{ .elevation = DEGREES(+45.0), .azimuth = DEGREES(0.0), .distance = 1.0 },
{ .elevation = DEGREES(+45.0), .azimuth = DEGREES(+135.0), .distance = 1.0 }
};
static simd_float4x4 matX(float theta) {
simd_float4x4 mat = {
simd_make_float4(1.0f, 0.0f, 0.0f, 0.0f),
simd_make_float4(0.0f, cosf(theta), -sinf(theta), 0.0f),
simd_make_float4(0.0f, sinf(theta), cosf(theta), 0.0f),
simd_make_float4(0.0f, 0.0f, 0.0f, 1.0f)
};
return mat;
};
static simd_float4x4 matY(float theta) {
simd_float4x4 mat = {
simd_make_float4(cosf(theta), 0.0f, sinf(theta), 0.0f),
simd_make_float4(0.0f, 1.0f, 0.0f, 0.0f),
simd_make_float4(-sinf(theta), 0.0f, cosf(theta), 0.0f),
simd_make_float4(0.0f, 0.0f, 0.0f, 1.0f)
};
return mat;
}
#if 0
static simd_float4x4 matZ(float theta) {
simd_float4x4 mat = {
simd_make_float4(cosf(theta), -sinf(theta), 0.0f, 0.0f),
simd_make_float4(sinf(theta), cosf(theta), 0.0f, 0.0f),
simd_make_float4(0.0f, 0.0f, 1.0f, 0.0f),
simd_make_float4(0.0f, 0.0f, 0.0f, 1.0f)
};
return mat;
};
#endif
static void transformPosition(float &elevation, float &azimuth, const simd_float4x4 &matrix) {
simd_float4x4 mat_x = matX(azimuth);
simd_float4x4 mat_y = matY(elevation);
//simd_float4x4 mat_z = matrix_identity_float4x4;
simd_float4x4 offsetMatrix = simd_mul(mat_x, mat_y);
//offsetMatrix = simd_mul(offsetMatrix, mat_z);
offsetMatrix = simd_mul(offsetMatrix, matrix);
double sy = sqrt(offsetMatrix.columns[0].x * offsetMatrix.columns[0].x + offsetMatrix.columns[1].x * offsetMatrix.columns[1].x);
bool singular = sy < 1e-6; // If
float x, y/*, z*/;
if(!singular) {
x = atan2(offsetMatrix.columns[2].y, offsetMatrix.columns[2].z);
y = atan2(-offsetMatrix.columns[2].x, sy);
//z = atan2(offsetMatrix.columns[1].x, offsetMatrix.columns[0].x);
} else {
x = atan2(-offsetMatrix.columns[1].z, offsetMatrix.columns[1].y);
y = atan2(-offsetMatrix.columns[2].x, sy);
//z = 0;
}
elevation = y;
azimuth = x;
if(elevation < (M_PI * (-0.5))) {
elevation = (M_PI * (-0.5));
} else if(elevation > M_PI * 0.5) {
elevation = M_PI * 0.5;
}
while(azimuth < (M_PI * (-2.0))) {
azimuth += M_PI * 2.0;
}
while(azimuth > M_PI * 2.0) {
azimuth -= M_PI * 2.0;
}
}
@interface impulseSetCache : NSObject {
NSURL *URL;
HrtfData *data;
}
+ (impulseSetCache *)sharedController;
- (void)getImpulse:(NSURL *)url outImpulse:(float **)outImpulse outSampleCount:(int *)outSampleCount sampleRate:(double)sampleRate channelCount:(int)channelCount channelConfig:(uint32_t)channelConfig withMatrix:(simd_float4x4)matrix;
@end
@implementation impulseSetCache
static impulseSetCache *_sharedController = nil;
+ (impulseSetCache *)sharedController {
@synchronized(self) {
if(!_sharedController) {
_sharedController = [[impulseSetCache alloc] init];
}
}
return _sharedController;
}
- (id)init {
self = [super init];
if(self) {
data = NULL;
}
return self;
}
- (void)dealloc {
delete data;
}
- (void)getImpulse:(NSURL *)url outImpulse:(float **)outImpulse outSampleCount:(int *)outSampleCount sampleRate:(double)sampleRate channelCount:(int)channelCount channelConfig:(uint32_t)channelConfig withMatrix:(simd_float4x4)matrix {
double sampleRateOfSource = 0;
int sampleCount = 0;
if(!data || ![url isEqualTo:URL]) {
delete data;
data = NULL;
URL = url;
NSString *filePath = [url path];
try {
std::ifstream file([filePath UTF8String], std::fstream::binary);
if(!file.is_open()) {
throw std::logic_error("Cannot open file.");
}
data = new HrtfData(file);
file.close();
} catch(std::exception &e) {
ALog(@"Exception caught: %s", e.what());
}
}
try {
soxr_quality_spec_t q_spec = soxr_quality_spec(SOXR_HQ, 0);
soxr_io_spec_t io_spec = soxr_io_spec(SOXR_FLOAT32_I, SOXR_FLOAT32_I);
soxr_runtime_spec_t runtime_spec = soxr_runtime_spec(0);
bool resampling;
sampleRateOfSource = data->get_sample_rate();
resampling = !!(fabs(sampleRateOfSource - sampleRate) > 1e-6);
uint32_t sampleCountResampled;
uint32_t sampleCountExact = data->get_response_length();
sampleCount = sampleCountExact + ((data->get_longest_delay() + 2) >> 2);
uint32_t actualSampleCount = sampleCount;
if(resampling) {
sampleCountResampled = (uint32_t)(((double)sampleCountExact) * sampleRate / sampleRateOfSource);
actualSampleCount = (uint32_t)(((double)actualSampleCount) * sampleRate / sampleRateOfSource);
io_spec.scale = sampleRateOfSource / sampleRate;
}
actualSampleCount = (actualSampleCount + 15) & ~15;
*outImpulse = (float *)calloc(sizeof(float), actualSampleCount * channelCount * 2);
if(!*outImpulse) {
throw std::bad_alloc();
}
float *hrtfData = *outImpulse;
for(uint32_t i = 0; i < channelCount; ++i) {
uint32_t channelFlag = [AudioChunk extractChannelFlag:i fromConfig:channelConfig];
uint32_t channelNumber = [AudioChunk findChannelIndex:channelFlag];
if(channelNumber < 18) {
const speakerPosition &speaker = speakerPositions[channelNumber];
DirectionData hrtfLeft;
DirectionData hrtfRight;
float azimuth = speaker.azimuth;
float elevation = speaker.elevation;
transformPosition(elevation, azimuth, matrix);
data->get_direction_data(elevation, azimuth, speaker.distance, hrtfLeft, hrtfRight);
if(resampling) {
ssize_t leftDelay = (ssize_t)((double)(hrtfLeft.delay) * 0.25 * sampleRate / sampleRateOfSource);
ssize_t rightDelay = (ssize_t)((double)(hrtfRight.delay) * 0.25 * sampleRate / sampleRateOfSource);
soxr_oneshot(sampleRateOfSource, sampleRate, 1, &hrtfLeft.impulse_response[0], sampleCountExact, NULL, &hrtfData[leftDelay + actualSampleCount * i * 2], sampleCountResampled, NULL, &io_spec, &q_spec, &runtime_spec);
soxr_oneshot(sampleRateOfSource, sampleRate, 1, &hrtfRight.impulse_response[0], sampleCountExact, NULL, &hrtfData[rightDelay + actualSampleCount * (i * 2 + 1)], sampleCountResampled, NULL, &io_spec, &q_spec, &runtime_spec);
} else {
cblas_scopy(sampleCountExact, &hrtfLeft.impulse_response[0], 1, &hrtfData[((hrtfLeft.delay + 2) >> 2) + actualSampleCount * i * 2], 1);
cblas_scopy(sampleCountExact, &hrtfRight.impulse_response[0], 1, &hrtfData[((hrtfRight.delay + 2) >> 2) + actualSampleCount * (i * 2 + 1)], 1);
}
}
}
*outSampleCount = actualSampleCount;
} catch(std::exception &e) {
ALog(@"Exception caught: %s", e.what());
}
}
@end
@implementation HeadphoneFilter
+ (BOOL)validateImpulseFile:(NSURL *)url {
NSString *filePath = [url path];
try {
std::ifstream file([filePath UTF8String], std::fstream::binary);
if(!file.is_open()) {
throw std::logic_error("Cannot open file.");
}
HrtfData data(file);
file.close();
return YES;
} catch(std::exception &e) {
ALog(@"Exception thrown: %s", e.what());
return NO;
}
}
- (id)initWithImpulseFile:(NSURL *)url forSampleRate:(double)sampleRate withInputChannels:(int)channels withConfig:(uint32_t)config withMatrix:(simd_float4x4)matrix {
self = [super init];
if(self) {
URL = url;
self->sampleRate = sampleRate;
channelCount = channels;
self->config = config;
float *impulseBuffer = NULL;
int sampleCount = 0;
[[impulseSetCache sharedController] getImpulse:url outImpulse:&impulseBuffer outSampleCount:&sampleCount sampleRate:sampleRate channelCount:channels channelConfig:config withMatrix:matrix];
if(!impulseBuffer) {
return nil;
}
mirroredImpulseResponses = (float **)calloc(sizeof(float *), channelCount * 2);
if(!mirroredImpulseResponses) {
free(impulseBuffer);
return nil;
}
for(int i = 0; i < channelCount * 2; ++i) {
mirroredImpulseResponses[i] = &impulseBuffer[sampleCount * i];
vDSP_vrvrs(mirroredImpulseResponses[i], 1, sampleCount);
}
paddedBufferSize = sampleCount;
paddedSignal[0] = (float *)calloc(sizeof(float), paddedBufferSize * 2);
if(!paddedSignal[0]) {
return nil;
}
paddedSignal[1] = paddedSignal[0] + paddedBufferSize;
prevInputs = (float **)calloc(channels, sizeof(float *));
if(!prevInputs)
return nil;
prevInputs[0] = (float *)calloc(sizeof(float), sampleCount * channelCount);
if(!prevInputs[0])
return nil;
for(int i = 1; i < channels; ++i) {
prevInputs[i] = prevInputs[i - 1] + sampleCount;
}
}
return self;
}
- (void)dealloc {
if(paddedSignal[0]) {
free(paddedSignal[0]);
}
if(prevInputs) {
if(prevInputs[0]) {
free(prevInputs[0]);
}
free(prevInputs);
}
if(mirroredImpulseResponses) {
if(mirroredImpulseResponses[0]) {
free(mirroredImpulseResponses[0]);
}
free(mirroredImpulseResponses);
}
}
- (void)reloadWithMatrix:(simd_float4x4)matrix {
@synchronized (self) {
if(!mirroredImpulseResponses[0]) {
return;
}
free(mirroredImpulseResponses[0]);
float *impulseBuffer = NULL;
int sampleCount = 0;
[[impulseSetCache sharedController] getImpulse:URL outImpulse:&impulseBuffer outSampleCount:&sampleCount sampleRate:sampleRate channelCount:channelCount channelConfig:config withMatrix:matrix];
for(int i = 0; i < channelCount * 2; ++i) {
mirroredImpulseResponses[i] = &impulseBuffer[sampleCount * i];
vDSP_vrvrs(mirroredImpulseResponses[i], 1, sampleCount);
}
}
}
- (void)process:(const float *)inBuffer sampleCount:(int)count toBuffer:(float *)outBuffer {
@synchronized (self) {
int sampleCount = paddedBufferSize;
while(count > 0) {
float left = 0, right = 0;
for(int i = 0; i < channelCount; ++i) {
float thisleft, thisright;
vDSP_vmul(prevInputs[i], 1, mirroredImpulseResponses[i * 2], 1, paddedSignal[0], 1, sampleCount);
vDSP_vmul(prevInputs[i], 1, mirroredImpulseResponses[i * 2 + 1], 1, paddedSignal[1], 1, sampleCount);
vDSP_sve(paddedSignal[0], 1, &thisleft, sampleCount);
vDSP_sve(paddedSignal[1], 1, &thisright, sampleCount);
left += thisleft;
right += thisright;
memmove(prevInputs[i], prevInputs[i] + 1, sizeof(float) * (sampleCount - 1));
prevInputs[i][sampleCount - 1] = *inBuffer++;
}
outBuffer[0] = left;
outBuffer[1] = right;
outBuffer += 2;
--count;
}
}
}
- (void)reset {
for(int i = 0; i < channelCount; ++i) {
vDSP_vclr(prevInputs[i], 1, paddedBufferSize);
}
}
- (size_t)needPrefill {
return paddedBufferSize;
}
@end

View file

@ -1,26 +0,0 @@
//
// DSPNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/10/25.
//
#ifndef DSPNode_h
#define DSPNode_h
#import <CogAudio/Node.h>
@interface DSPNode : Node {
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (void)threadEntry:(id _Nullable)arg;
- (void)setShouldContinue:(BOOL)s;
- (double)secondsBuffered;
@end
#endif /* DSPNode_h */

View file

@ -1,76 +0,0 @@
//
// DSPNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/10/25.
//
#import <Foundation/Foundation.h>
#import "DSPNode.h"
@implementation DSPNode {
BOOL threadTerminated;
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super init];
if(self) {
buffer = [[ChunkList alloc] initWithMaximumDuration:latency];
writeSemaphore = [[Semaphore alloc] init];
readSemaphore = [[Semaphore alloc] init];
accessLock = [[NSLock alloc] init];
initialBufferFilled = NO;
controller = c;
endOfStream = NO;
shouldContinue = YES;
nodeChannelConfig = 0;
nodeLossless = NO;
durationPrebuffer = latency * 0.25;
inWrite = NO;
inPeek = NO;
inRead = NO;
inMerge = NO;
[self setPreviousNode:p];
#ifdef LOG_CHAINS
[self initLogFiles];
#endif
}
return self;
}
// DSP threads buffer for low latency, and therefore should have high priority
- (void)threadEntry:(id _Nullable)arg {
@autoreleasepool {
NSThread *currentThread = [NSThread currentThread];
[currentThread setThreadPriority:0.75];
[currentThread setQualityOfService:NSQualityOfServiceUserInitiated];
threadTerminated = NO;
[self process];
threadTerminated = YES;
}
}
- (void)setShouldContinue:(BOOL)s {
BOOL currentShouldContinue = shouldContinue;
shouldContinue = s;
if(!currentShouldContinue && s && threadTerminated) {
[self launchThread];
}
}
- (double)secondsBuffered {
return [buffer listDuration];
}
@end

View file

@ -8,47 +8,30 @@
#import <Cocoa/Cocoa.h>
#import <CoreAudio/AudioHardware.h>
#import <AudioToolbox/AudioToolbox.h>
#import <AudioUnit/AudioUnit.h>
#import <CoreAudio/AudioHardware.h>
#import <CogAudio/AudioDecoder.h>
#import <CogAudio/Node.h>
#import <CogAudio/Plugin.h>
#define INPUT_NODE_SEEK
#import "AudioDecoder.h"
#import "Converter.h"
#import "Node.h"
#import "Plugin.h"
@interface InputNode : Node {
id<CogDecoder> decoder;
int bytesPerSample;
int bytesPerFrame;
BOOL floatingPoint;
BOOL swapEndian;
Converter *converter;
AudioStreamBasicDescription outputFormat;
BOOL shouldSeek;
long seekFrame;
BOOL observersAdded;
Semaphore *exitAtTheEndOfTheStream;
double seekTime;
}
@property(readonly) Semaphore * _Nonnull exitAtTheEndOfTheStream;
@property(readonly) BOOL threadExited;
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p;
- (BOOL)openWithSource:(id<CogSource>_Nonnull)source;
- (BOOL)openWithDecoder:(id<CogDecoder>_Nonnull)d;
- (BOOL)openURL:(NSURL *)url withSource:(id<CogSource>)source outputFormat:(AudioStreamBasicDescription)of;
- (void)process;
- (NSDictionary *_Nonnull)properties;
- (void)seek:(long)frame;
- (NSDictionary *) properties;
- (void)seek:(double)time;
- (void)registerObservers;
- (BOOL)setTrack:(NSURL *_Nonnull)track;
- (id<CogDecoder>_Nonnull)decoder;
@end

View file

@ -7,284 +7,137 @@
//
#import "InputNode.h"
#import "AudioPlayer.h"
#import "BufferChain.h"
#import "CoreAudioUtils.h"
#import "OutputNode.h"
#import "Plugin.h"
#import "NSDictionary+Merge.h"
#import "Logging.h"
#import "CoreAudioUtils.h"
@implementation InputNode
static void *kInputNodeContext = &kInputNodeContext;
- (BOOL)openURL:(NSURL *)url withSource:(id<CogSource>)source outputFormat:(AudioStreamBasicDescription)of
{
outputFormat = of;
decoder = [AudioDecoder audioDecoderForURL:url];
[decoder retain];
@synthesize threadExited;
@synthesize exitAtTheEndOfTheStream;
- (id)initWithController:(id)c previous:(id)p {
self = [super initWithController:c previous:p];
if(self) {
exitAtTheEndOfTheStream = [[Semaphore alloc] init];
threadExited = NO;
}
return self;
}
- (BOOL)openWithSource:(id<CogSource>)source {
[self removeObservers];
decoder = [AudioDecoder audioDecoderForSource:source];
if(decoder == nil)
converter = [[Converter alloc] init];
if (converter == nil)
return NO;
[self registerObservers];
if(![decoder open:source]) {
ALog(@"Couldn't open decoder...");
if (decoder == nil)
return NO;
if (![decoder open:source])
{
NSLog(@"Couldn't open decoder...");
return NO;
}
NSDictionary *properties = [decoder properties];
int bitsPerSample = [[properties objectForKey:@"bitsPerSample"] intValue];
int channels = [[properties objectForKey:@"channels"] intValue];
bytesPerFrame = ((bitsPerSample + 7) / 8) * channels;
nodeFormat = propertiesToASBD(properties);
if([properties valueForKey:@"channelConfig"])
nodeChannelConfig = [[properties valueForKey:@"channelConfig"] unsignedIntValue];
nodeLossless = [[properties valueForKey:@"encoding"] isEqualToString:@"lossless"];
shouldContinue = YES;
shouldSeek = NO;
return YES;
}
- (BOOL)openWithDecoder:(id<CogDecoder>)d {
[self removeObservers];
- (void)registerObservers
{
[decoder addObserver:self
forKeyPath:@"properties"
options:(NSKeyValueObservingOptionNew)
context:NULL];
DLog(@"Opening with old decoder: %@", d);
decoder = d;
NSDictionary *properties = [decoder properties];
int bitsPerSample = [[properties objectForKey:@"bitsPerSample"] intValue];
int channels = [[properties objectForKey:@"channels"] intValue];
bytesPerFrame = ((bitsPerSample + 7) / 8) * channels;
nodeFormat = propertiesToASBD(properties);
if([properties valueForKey:@"channelConfig"])
nodeChannelConfig = [[properties valueForKey:@"channelConfig"] unsignedIntValue];
nodeLossless = [[properties valueForKey:@"encoding"] isEqualToString:@"lossless"];
[self registerObservers];
shouldContinue = YES;
shouldSeek = NO;
DLog(@"DONES: %@", decoder);
return YES;
}
- (void)registerObservers {
if(!observersAdded) {
DLog(@"REGISTERING OBSERVERS");
[decoder addObserver:self
forKeyPath:@"properties"
options:(NSKeyValueObservingOptionNew)
context:kInputNodeContext];
[decoder addObserver:self
forKeyPath:@"metadata"
options:(NSKeyValueObservingOptionNew)
context:kInputNodeContext];
observersAdded = YES;
}
[decoder addObserver:self
forKeyPath:@"metadata"
options:(NSKeyValueObservingOptionNew)
context:NULL];
}
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context {
if(context == kInputNodeContext) {
DLog(@"SOMETHING CHANGED!");
if([keyPath isEqual:@"properties"]) {
DLog(@"Input format changed");
// Converter may need resetting, it'll do that when it reaches the new chunks
NSDictionary *properties = [decoder properties];
int bitsPerSample = [[properties objectForKey:@"bitsPerSample"] intValue];
int channels = [[properties objectForKey:@"channels"] intValue];
bytesPerFrame = ((bitsPerSample + 7) / 8) * channels;
nodeFormat = propertiesToASBD(properties);
nodeChannelConfig = [[properties valueForKey:@"channelConfig"] unsignedIntValue];
nodeLossless = [[properties valueForKey:@"encoding"] isEqualToString:@"lossless"];
} else if([keyPath isEqual:@"metadata"]) {
// Inform something of metadata change
NSDictionary *entryProperties = [decoder properties];
if(entryProperties == nil)
return;
NSDictionary *entryInfo = [NSDictionary dictionaryByMerging:entryProperties with:[decoder metadata]];
[controller pushInfo:entryInfo];
}
} else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
context:(void *)context
{
if ([keyPath isEqual:@"properties"]) {
//Setup converter!
[converter cleanUp];
[converter setupWithInputFormat:propertiesToASBD([decoder properties]) outputFormat:outputFormat];
//Inform something of properties change
}
else if ([keyPath isEqual:@"metadata"]) {
//Inform something of metadata change
}
}
- (void)process {
BOOL shouldClose = YES;
BOOL seekError = NO;
BOOL isError = NO;
if([decoder respondsToSelector:@selector(isSilence)]) {
if([decoder isSilence]) {
isError = YES;
}
}
[controller setError:isError];
while([self shouldContinue] == YES && [self endOfStream] == NO) {
if(shouldSeek == YES) {
BufferChain *bufferChain = controller;
AudioPlayer *audioPlayer = [bufferChain controller];
OutputNode *outputNode = [audioPlayer output];
DLog(@"SEEKING! Resetting Buffer");
[outputNode resetBackwards];
DLog(@"Reset buffer!");
DLog(@"SEEKING!");
@autoreleasepool {
seekError = [decoder seek:seekFrame] < 0;
}
- (void)process
{
int amountRead = 0, amountConverted = 0, amountInBuffer = 0;
void *inputBuffer = malloc(CHUNK_SIZE);
while ([self shouldContinue] == YES && [self endOfStream] == NO)
{
if (shouldSeek == YES)
{
[decoder seekToTime:seekTime];
shouldSeek = NO;
DLog(@"Seeked! Resetting Buffer");
[self resetBuffer];
initialBufferFilled = NO;
if(seekError) {
[controller setError:YES];
}
}
AudioChunk *chunk;
@autoreleasepool {
chunk = [decoder readAudio];
if (amountInBuffer < CHUNK_SIZE) {
amountRead = [decoder fillBuffer:((char *)inputBuffer) + amountInBuffer ofSize:CHUNK_SIZE - amountInBuffer];
amountInBuffer += amountRead;
}
if(chunk && [chunk frameCount]) {
@autoreleasepool {
[self writeChunk:chunk];
chunk = nil;
amountConverted = [converter convert:inputBuffer amount:amountInBuffer]; //Convert fills in converter buffer, til the next call
if (amountInBuffer - amountConverted > 0) {
memmove(inputBuffer,((char *)inputBuffer) + amountConverted, amountInBuffer - amountConverted);
}
amountInBuffer -= amountConverted;
if ([converter outputBufferSize] <= 0)
{
if (initialBufferFilled == NO) {
[controller initialBufferFilled];
}
} else {
if(chunk) {
@autoreleasepool {
chunk = nil;
}
}
DLog(@"End of stream? %@", [self properties]);
endOfStream = YES;
shouldClose = [controller endOfInputReached]; // Lets us know if we should keep going or not (occassionally, for track changes within a file)
DLog(@"closing? is %i", shouldClose);
// Move this here, so that the above endOfInputReached has a chance to queue another track before starting output
// Technically, the output should still play out its buffer first before checking if it should stop
if(initialBufferFilled == NO) {
[controller initialBufferFilled:self];
}
// wait before exiting, as we might still get seeking request
DLog("InputNode: Before wait")
[exitAtTheEndOfTheStream waitIndefinitely];
DLog("InputNode: After wait, should seek = %d", shouldSeek);
if(shouldSeek) {
endOfStream = NO;
shouldClose = NO;
continue;
} else {
break;
}
[controller endOfInputReached];
break; //eof
}
[self writeData:[converter outputBuffer] amount:[converter outputBufferSize]];
}
if(shouldClose)
[decoder close];
[exitAtTheEndOfTheStream signal];
threadExited = YES;
DLog("Input node thread stopping");
[decoder close];
[converter cleanUp];
free(inputBuffer);
}
- (void)seek:(long)frame {
seekFrame = frame;
- (void)seek:(double)time
{
seekTime = time;
shouldSeek = YES;
DLog(@"Should seek!");
[self resetBuffer];
[writeSemaphore signal];
if(endOfStream) {
[exitAtTheEndOfTheStream signal];
}
[semaphore signal];
}
- (BOOL)setTrack:(NSURL *)track {
if([decoder respondsToSelector:@selector(setTrack:)] && [decoder setTrack:track]) {
DLog(@"SET TRACK!");
- (void)dealloc
{
[decoder removeObserver:self forKeyPath:@"properties"];
[decoder removeObserver:self forKeyPath:@"metadata"];
return YES;
}
return NO;
[decoder release];
[super dealloc];
}
- (void)removeObservers {
if(observersAdded) {
[decoder removeObserver:self forKeyPath:@"properties" context:kInputNodeContext];
[decoder removeObserver:self forKeyPath:@"metadata" context:kInputNodeContext];
observersAdded = NO;
}
}
- (void)setShouldContinue:(BOOL)s {
[super setShouldContinue:s];
if(!s)
[self removeObservers];
}
- (void)dealloc {
DLog(@"Input Node dealloc");
[self removeObservers];
[super cleanUp];
}
- (NSDictionary *)properties {
- (NSDictionary *) properties
{
return [decoder properties];
}
- (id<CogDecoder>)decoder {
return decoder;
}
- (double)secondsBuffered {
return [buffer listDuration];
}
@end

View file

@ -6,100 +6,50 @@
// Copyright 2006 Vincent Spader. All rights reserved.
//
#import <CogAudio/ChunkList.h>
#import <CogAudio/CogSemaphore.h>
#import <Cocoa/Cocoa.h>
#import <os/workgroup.h>
#import "VirtualRingBuffer.h"
#import "Semaphore.h"
#define BUFFER_SIZE 1024 * 1024
#define CHUNK_SIZE 16 * 1024
//#define LOG_CHAINS 1
@interface Node : NSObject {
ChunkList *buffer;
Semaphore *writeSemaphore;
Semaphore *readSemaphore;
NSLock *accessLock;
id __weak previousNode;
id __weak controller;
BOOL shouldReset;
BOOL inWrite;
BOOL inPeek;
BOOL inRead;
BOOL inMerge;
BOOL shouldContinue;
BOOL endOfStream; // All data is now in buffer
VirtualRingBuffer *buffer;
Semaphore *semaphore;
NSLock *readLock;
NSLock *writeLock;
id previousNode;
id controller;
BOOL shouldContinue;
BOOL endOfStream; //All data is now in buffer
BOOL initialBufferFilled;
AudioStreamBasicDescription nodeFormat;
uint32_t nodeChannelConfig;
BOOL nodeLossless;
double durationPrebuffer;
#ifdef LOG_CHAINS
NSFileHandle *logFileOut;
NSFileHandle *logFileIn;
#endif
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p;
- (id)initWithController:(id)c previous:(id)p;
#ifdef LOG_CHAINS
- (void)initLogFiles;
#endif
- (int)writeData:(void *)ptr amount:(int)a;
- (int)readData:(void *)ptr amount:(int)a;
- (void)cleanUp;
- (BOOL)paused;
- (void)writeData:(const void *_Nonnull)ptr amount:(size_t)a;
- (void)writeChunk:(AudioChunk *_Nonnull)chunk;
- (AudioChunk *_Nonnull)readChunk:(size_t)maxFrames;
- (AudioChunk *_Nonnull)readChunkAsFloat32:(size_t)maxFrames;
- (AudioChunk *_Nonnull)readAndMergeChunks:(size_t)maxFrames;
- (AudioChunk *_Nonnull)readAndMergeChunksAsFloat32:(size_t)maxFrames;
- (BOOL)peekFormat:(AudioStreamBasicDescription *_Nonnull)format channelConfig:(uint32_t *_Nonnull)config;
- (BOOL)peekTimestamp:(double *_Nonnull)timestamp timeRatio:(double *_Nonnull)timeRatio;
- (void)process; // Should be overwriten by subclass
- (void)threadEntry:(id _Nullable)arg;
- (void)process; //Should be overwriten by subclass
- (void)threadEntry:(id)arg;
- (void)launchThread;
- (void)setShouldReset:(BOOL)s;
- (BOOL)shouldReset;
- (void)resetBackwards;
- (NSLock *)readLock;
- (NSLock *)writeLock;
- (void)setPreviousNode:(id _Nullable)p;
- (id _Nullable)previousNode;
- (id)previousNode;
- (BOOL)shouldContinue;
- (void)setShouldContinue:(BOOL)s;
- (ChunkList *_Nonnull)buffer;
- (void)resetBuffer; // WARNING! DANGER WILL ROBINSON!
- (VirtualRingBuffer *)buffer;
- (void)resetBuffer; //WARNING! DANGER WILL ROBINSON!
- (AudioStreamBasicDescription)nodeFormat;
- (uint32_t)nodeChannelConfig;
- (BOOL)nodeLossless;
- (Semaphore *_Nonnull)writeSemaphore;
- (Semaphore *_Nonnull)readSemaphore;
//-(void)resetBuffer;
- (Semaphore *)semaphore;
- (BOOL)endOfStream;
- (void)setEndOfStream:(BOOL)e;
- (double)secondsBuffered;
@end

View file

@ -8,658 +8,191 @@
#import "Node.h"
#import "BufferChain.h"
#import "Logging.h"
#import "OutputCoreAudio.h"
#import <pthread.h>
#import <mach/mach_time.h>
#ifdef LOG_CHAINS
#import "NSFileHandle+CreateFile.h"
static NSLock * _Node_lock = nil;
static uint64_t _Node_serial;
#endif
@implementation Node
#ifdef LOG_CHAINS
+ (void)initialize {
@synchronized (_Node_lock) {
if(!_Node_lock) {
_Node_lock = [[NSLock alloc] init];
_Node_serial = 0;
}
}
}
- (void)initLogFiles {
[_Node_lock lock];
logFileOut = [NSFileHandle fileHandleForWritingAtPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"%@_output_%08lld.raw", [self className], _Node_serial++]] createFile:YES];
logFileIn = [NSFileHandle fileHandleForWritingAtPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"%@_input_%08lld.raw", [self className], _Node_serial++]] createFile:YES];
[_Node_lock unlock];
}
#endif
- (id)initWithController:(id)c previous:(id)p {
- (id)initWithController:(id)c previous:(id)p
{
self = [super init];
if(self) {
buffer = [[ChunkList alloc] initWithMaximumDuration:10.0];
writeSemaphore = [[Semaphore alloc] init];
readSemaphore = [[Semaphore alloc] init];
accessLock = [[NSLock alloc] init];
if (self)
{
buffer = [[VirtualRingBuffer alloc] initWithLength:BUFFER_SIZE];
semaphore = [[Semaphore alloc] init];
readLock = [[NSLock alloc] init];
writeLock = [[NSLock alloc] init];
initialBufferFilled = NO;
controller = c;
previousNode = p;
endOfStream = NO;
shouldContinue = YES;
nodeChannelConfig = 0;
nodeLossless = NO;
durationPrebuffer = 2.0;
inWrite = NO;
inPeek = NO;
inRead = NO;
inMerge = NO;
[self setPreviousNode:p];
#ifdef LOG_CHAINS
[self initLogFiles];
#endif
}
return self;
}
- (void)dealloc {
[self cleanUp];
}
- (void)cleanUp {
[self setShouldContinue:NO];
while(inWrite || inPeek || inRead || inMerge) {
[writeSemaphore signal];
if(previousNode) {
[[previousNode readSemaphore] signal];
}
usleep(500);
}
}
- (AudioStreamBasicDescription)nodeFormat {
return nodeFormat;
}
- (uint32_t)nodeChannelConfig {
return nodeChannelConfig;
}
- (BOOL)nodeLossless {
return nodeLossless;
}
- (void)writeData:(const void *)ptr amount:(size_t)amount {
inWrite = YES;
if(!shouldContinue || [self paused]) {
inWrite = NO;
return;
}
[accessLock lock];
AudioChunk *chunk = [[AudioChunk alloc] init];
[chunk setFormat:nodeFormat];
if(nodeChannelConfig) {
[chunk setChannelConfig:nodeChannelConfig];
}
[chunk setLossless:nodeLossless];
[chunk assignSamples:ptr frameCount:amount / nodeFormat.mBytesPerPacket];
#ifdef LOG_CHAINS
if(logFileOut) {
[logFileOut writeData:[NSData dataWithBytes:ptr length:amount]];
}
#endif
double durationList = [buffer listDuration];
double durationLeft = [buffer maxDuration] - durationList;
if(shouldContinue == YES && durationList >= durationPrebuffer) {
if(initialBufferFilled == NO) {
initialBufferFilled = YES;
if([controller respondsToSelector:@selector(initialBufferFilled:)])
[controller performSelector:@selector(initialBufferFilled:) withObject:self];
}
}
while(shouldContinue == YES && ![self paused] && durationLeft < 0.0) {
if(durationLeft < 0.0 || shouldReset) {
[accessLock unlock];
[writeSemaphore timedWait:2000];
[accessLock lock];
}
durationLeft = [buffer maxDuration] - [buffer listDuration];
}
BOOL doSignal = NO;
if([chunk frameCount]) {
[buffer addChunk:chunk];
doSignal = YES;
}
[accessLock unlock];
if(doSignal) {
[readSemaphore signal];
}
inWrite = NO;
}
- (void)writeChunk:(AudioChunk *)chunk {
inWrite = YES;
if(!shouldContinue || [self paused]) {
inWrite = NO;
return;
}
[accessLock lock];
double durationList = [buffer listDuration];
double durationLeft = [buffer maxDuration] - durationList;
if(shouldContinue == YES && durationList >= durationPrebuffer) {
if(initialBufferFilled == NO) {
initialBufferFilled = YES;
if([controller respondsToSelector:@selector(initialBufferFilled:)])
[controller performSelector:@selector(initialBufferFilled:) withObject:self];
}
}
while(shouldContinue == YES && ![self paused] && durationLeft < 0.0) {
if(previousNode && [previousNode shouldContinue] == NO) {
shouldContinue = NO;
break;
}
if(durationLeft < 0.0 || shouldReset) {
[accessLock unlock];
[writeSemaphore timedWait:2000];
[accessLock lock];
}
durationLeft = [buffer maxDuration] - [buffer listDuration];
}
BOOL doSignal = NO;
if([chunk frameCount]) {
#ifdef LOG_CHAINS
if(logFileOut) {
AudioChunk *chunkCopy = [chunk copy];
size_t frameCount = [chunkCopy frameCount];
NSData *chunkData = [chunkCopy removeSamples:frameCount];
[logFileOut writeData:chunkData];
}
#endif
[buffer addChunk:chunk];
doSignal = YES;
}
[accessLock unlock];
if(doSignal) {
[readSemaphore signal];
}
inWrite = NO;
}
// Should be overwriten by subclass.
- (void)process {
}
- (void)threadEntry:(id)arg {
@autoreleasepool {
[self process];
}
}
- (BOOL)peekFormat:(nonnull AudioStreamBasicDescription *)format channelConfig:(nonnull uint32_t *)config {
inPeek = YES;
if(!shouldContinue || [self paused]) {
inPeek = NO;
return NO;
}
[accessLock lock];
while(shouldContinue && ![self paused] &&
[[previousNode buffer] isEmpty] && [previousNode endOfStream] == NO) {
[accessLock unlock];
[writeSemaphore signal];
[[previousNode readSemaphore] timedWait:2000];
[accessLock lock];
}
if(!shouldContinue || [self paused]) {
[accessLock unlock];
inPeek = NO;
return NO;
}
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
[accessLock unlock];
inPeek = NO;
return NO;
}
BOOL ret = [[previousNode buffer] peekFormat:format channelConfig:config];
[accessLock unlock];
inPeek = NO;
return ret;
}
- (BOOL)peekTimestamp:(double *_Nonnull)timestamp timeRatio:(double *_Nonnull)timeRatio {
inPeek = YES;
if(!shouldContinue || [self paused]) {
inPeek = NO;
return NO;
}
[accessLock lock];
while(shouldContinue && ![self paused] &&
[[previousNode buffer] isEmpty] && [previousNode endOfStream] == NO) {
[accessLock unlock];
[writeSemaphore signal];
[[previousNode readSemaphore] timedWait:2000];
[accessLock lock];
}
if(!shouldContinue || [self paused]) {
[accessLock unlock];
inPeek = NO;
return NO;
}
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
[accessLock unlock];
inPeek = NO;
return NO;
}
BOOL ret = [[previousNode buffer] peekTimestamp:timestamp timeRatio:timeRatio];
[accessLock unlock];
inPeek = NO;
return ret;
}
- (AudioChunk *)readChunk:(size_t)maxFrames {
inRead = YES;
if(!shouldContinue || [self paused]) {
inRead = NO;
return [[AudioChunk alloc] init];
}
[accessLock lock];
while(shouldContinue && ![self paused] &&
[[previousNode buffer] isEmpty] && [previousNode endOfStream] == NO) {
[accessLock unlock];
[writeSemaphore signal];
[[previousNode readSemaphore] timedWait:2000];
[accessLock lock];
if([previousNode shouldReset] == YES) {
break;
}
}
if(!shouldContinue || [self paused]) {
[accessLock unlock];
inRead = NO;
return [[AudioChunk alloc] init];
}
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
[accessLock unlock];
inRead = NO;
return [[AudioChunk alloc] init];
}
if([previousNode shouldReset] == YES) {
@autoreleasepool {
[buffer reset];
}
shouldReset = YES;
[previousNode setShouldReset:NO];
[[previousNode writeSemaphore] signal];
}
AudioChunk *ret;
@autoreleasepool {
ret = [[previousNode buffer] removeSamples:maxFrames];
}
[accessLock unlock];
if([ret frameCount]) {
[[previousNode writeSemaphore] signal];
}
#ifdef LOG_CHAINS
if(logFileIn) {
AudioChunk *chunkCopy = [ret copy];
size_t frameCount = [chunkCopy frameCount];
NSData *chunkData = [chunkCopy removeSamples:frameCount];
[logFileIn writeData:chunkData];
}
#endif
inRead = NO;
return ret;
}
- (AudioChunk *)readChunkAsFloat32:(size_t)maxFrames {
inRead = YES;
if(!shouldContinue || [self paused]) {
inRead = NO;
return [[AudioChunk alloc] init];
}
[accessLock lock];
while(shouldContinue && ![self paused] &&
[[previousNode buffer] isEmpty] && [previousNode endOfStream] == NO) {
[accessLock unlock];
[writeSemaphore signal];
[[previousNode readSemaphore] timedWait:2000];
[accessLock lock];
if([previousNode shouldReset] == YES) {
break;
}
}
if(!shouldContinue || [self paused]) {
[accessLock unlock];
inRead = NO;
return [[AudioChunk alloc] init];
}
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
[accessLock unlock];
inRead = NO;
return [[AudioChunk alloc] init];
}
if([previousNode shouldReset] == YES) {
@autoreleasepool {
[buffer reset];
}
shouldReset = YES;
[previousNode setShouldReset:NO];
[[previousNode writeSemaphore] signal];
}
AudioChunk *ret;
@autoreleasepool {
ret = [[previousNode buffer] removeSamplesAsFloat32:maxFrames];
}
[accessLock unlock];
if([ret frameCount]) {
[[previousNode writeSemaphore] signal];
}
#ifdef LOG_CHAINS
if(logFileIn) {
AudioChunk *chunkCopy = [ret copy];
size_t frameCount = [chunkCopy frameCount];
NSData *chunkData = [chunkCopy removeSamples:frameCount];
[logFileIn writeData:chunkData];
}
#endif
inRead = NO;
return ret;
}
- (AudioChunk *)readAndMergeChunks:(size_t)maxFrames {
inMerge = YES;
if(!shouldContinue || [self paused]) {
inMerge = NO;
return [[AudioChunk alloc] init];
}
[accessLock lock];
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
[accessLock unlock];
inMerge = NO;
return [[AudioChunk alloc] init];
}
AudioChunk *ret;
@autoreleasepool {
ret = [[previousNode buffer] removeAndMergeSamples:maxFrames callBlock:^BOOL{
if([previousNode shouldReset] == YES) {
@autoreleasepool {
[buffer reset];
}
shouldReset = YES;
[previousNode setShouldReset:NO];
- (int)writeData:(void *)ptr amount:(int)amount
{
void *writePtr;
int amountToCopy, availOutput;
int amountLeft = amount;
[writeLock lock];
while (shouldContinue == YES && amountLeft > 0)
{
availOutput = [buffer lengthAvailableToWriteReturningPointer:&writePtr];
if (availOutput == 0)
{
[writeLock unlock];
if (initialBufferFilled == NO) {
initialBufferFilled = YES;\
if ([controller respondsToSelector:@selector(initialBufferFilled)])
[controller performSelector:@selector(initialBufferFilled)];
}
[accessLock unlock];
[[previousNode writeSemaphore] signal];
[[previousNode readSemaphore] timedWait:2000];
[accessLock lock];
return !shouldContinue || [self paused] || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES);
}];
}
[accessLock unlock];
if([ret frameCount]) {
[[previousNode writeSemaphore] signal];
#ifdef LOG_CHAINS
if(logFileIn) {
AudioChunk *chunkCopy = [ret copy];
size_t frameCount = [chunkCopy frameCount];
NSData *chunkData = [chunkCopy removeSamples:frameCount];
[logFileIn writeData:chunkData];
[semaphore wait];
[writeLock lock];
}
#endif
}
inMerge = NO;
return ret;
}
- (AudioChunk *)readAndMergeChunksAsFloat32:(size_t)maxFrames {
inMerge = YES;
if(!shouldContinue || [self paused]) {
inMerge = NO;
return [[AudioChunk alloc] init];
}
[accessLock lock];
if([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES) {
[accessLock unlock];
inMerge = NO;
return [[AudioChunk alloc] init];
}
AudioChunk *ret;
@autoreleasepool {
ret = [[previousNode buffer] removeAndMergeSamplesAsFloat32:maxFrames callBlock:^BOOL{
if([previousNode shouldReset] == YES) {
@autoreleasepool {
[buffer reset];
}
shouldReset = YES;
[previousNode setShouldReset:NO];
else
{
amountToCopy = availOutput;
if (amountToCopy > amountLeft)
amountToCopy = amountLeft;
memcpy(writePtr, &((char *)ptr)[amount - amountLeft], amountToCopy);
if (amountToCopy > 0)
{
[buffer didWriteLength:amountToCopy];
}
[accessLock unlock];
[[previousNode writeSemaphore] signal];
[[previousNode readSemaphore] timedWait:2000];
[accessLock lock];
return !shouldContinue || [self paused] || ([[previousNode buffer] isEmpty] && [previousNode endOfStream] == YES);
}];
}
[accessLock unlock];
if([ret frameCount]) {
[[previousNode writeSemaphore] signal];
#ifdef LOG_CHAINS
if(logFileIn) {
AudioChunk *chunkCopy = [ret copy];
size_t frameCount = [chunkCopy frameCount];
NSData *chunkData = [chunkCopy removeSamples:frameCount];
[logFileIn writeData:chunkData];
amountLeft -= amountToCopy;
}
#endif
}
inMerge = NO;
return ret;
[writeLock unlock];
return (amount - amountLeft);
}
- (void)launchThread {
//Should be overwriten by subclass.
- (void)process
{
}
- (void)threadEntry:(id)arg
{
[self retain];
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
[self process];
[pool release];
[self release];
}
- (int)readData:(void *)ptr amount:(int)amount
{
void *readPtr;
int amountToCopy;
int availInput;
[readLock lock];
availInput = [[previousNode buffer] lengthAvailableToReadReturningPointer:&readPtr];
if (availInput <= amount && [previousNode endOfStream] == YES)
{
// [previousNode release];
//If it is the outputNode, [soundController newInputChain];
//else
endOfStream = YES;
}
/* if (availInput <= 0) {
NSLog(@"BUFFER RAN DRY!");
}
else if (availInput < amount) {
NSLog(@"BUFFER IN DANGER");
}
*/
amountToCopy = availInput;
if (amountToCopy > amount)
{
amountToCopy = amount;
}
memcpy(ptr, readPtr, amountToCopy);
if (amountToCopy > 0)
{
[[previousNode buffer] didReadLength:amountToCopy];
[[previousNode semaphore] signal];
}
[readLock unlock];
return amountToCopy;
}
- (void)launchThread
{
[NSThread detachNewThreadSelector:@selector(threadEntry:) toTarget:self withObject:nil];
}
- (void)setPreviousNode:(id)p {
previousNode = p;
}
- (id)previousNode {
- (id)previousNode
{
return previousNode;
}
- (BOOL)shouldContinue {
- (BOOL)shouldContinue
{
return shouldContinue;
}
- (void)setShouldContinue:(BOOL)s {
- (void)setShouldContinue:(BOOL)s
{
shouldContinue = s;
}
- (ChunkList *)buffer {
- (VirtualRingBuffer *)buffer
{
return buffer;
}
- (void)resetBuffer {
shouldReset = YES; // Will reset on next write.
if(previousNode == nil) {
@autoreleasepool {
[accessLock lock];
[buffer reset];
[accessLock unlock];
}
}
- (void)resetBuffer
{
[readLock lock];
[writeLock lock];
[buffer empty];
[writeLock unlock];
[readLock unlock];
}
- (void)lockedResetBuffer {
@autoreleasepool {
[buffer reset];
}
- (NSLock *)readLock
{
return readLock;
}
- (void)unlockedResetBuffer {
@autoreleasepool {
[accessLock lock];
[buffer reset];
[accessLock unlock];
}
- (NSLock *)writeLock
{
return writeLock;
}
// Implementations should override
- (BOOL)paused {
return NO;
- (Semaphore *)semaphore
{
return semaphore;
}
- (Semaphore *)writeSemaphore {
return writeSemaphore;
}
- (Semaphore *)readSemaphore {
return readSemaphore;
}
- (BOOL)endOfStream {
- (BOOL)endOfStream
{
return endOfStream;
}
- (void)setEndOfStream:(BOOL)e {
- (void)setEndOfStream:(BOOL)e
{
endOfStream = e;
}
- (void)setShouldReset:(BOOL)s {
shouldReset = s;
}
- (BOOL)shouldReset {
return shouldReset;
}
// Buffering nodes should implement this
- (double)secondsBuffered {
return 0.0;
}
// Reset everything in the chain
- (void)resetBackwards {
[accessLock lock];
if(buffer) {
[self lockedResetBuffer];
[writeSemaphore signal];
[readSemaphore signal];
}
Node *node = previousNode;
while(node) {
[node unlockedResetBuffer];
[node setShouldReset:YES];
[[node writeSemaphore] signal];
[[node readSemaphore] signal];
node = [node previousNode];
}
[accessLock unlock];
}
@end

View file

@ -8,84 +8,37 @@
#import <Cocoa/Cocoa.h>
#import <CoreAudio/AudioHardware.h>
#import <AudioToolbox/AudioToolbox.h>
#import <AudioUnit/AudioUnit.h>
#import <CoreAudio/AudioHardware.h>
#import <CogAudio/Node.h>
#import <CogAudio/OutputCoreAudio.h>
#import "Node.h"
#import "OutputCoreAudio.h"
@interface OutputNode : Node {
AudioStreamBasicDescription format;
uint32_t config;
double amountPlayed;
double amountPlayedInterval;
unsigned long amountPlayed;
OutputCoreAudio *output;
BOOL paused;
BOOL started;
BOOL intervalReported;
}
- (double)amountPlayed;
- (double)amountPlayedInterval;
- (void)incrementAmountPlayed:(double)seconds;
- (void)setAmountPlayed:(double)seconds;
- (void)resetAmountPlayed;
- (void)resetAmountPlayedInterval;
- (BOOL)selectNextBuffer;
- (void)endOfInputPlayed;
- (BOOL)endOfStream;
- (BOOL)chainQueueHasTracks;
- (double)secondsBuffered;
- (void)setup;
- (void)setupWithInterval:(BOOL)resumeInterval;
- (void)process;
- (void)close;
- (void)seek:(double)time;
- (void)fadeOut;
- (void)fadeOutBackground;
- (void)fadeIn;
- (int)readData:(void *)ptr amount:(int)amount;
- (AudioChunk *)readChunk:(size_t)amount;
- (void)setFormat:(AudioStreamBasicDescription *)f;
- (AudioStreamBasicDescription) format;
- (void)setFormat:(AudioStreamBasicDescription *)f channelConfig:(uint32_t)channelConfig;
- (AudioStreamBasicDescription)format;
- (uint32_t)config;
- (AudioStreamBasicDescription)deviceFormat;
- (uint32_t)deviceChannelConfig;
- (double)volume;
- (void)setVolume:(double)v;
- (void)setVolume:(double) v;
- (void)setShouldContinue:(BOOL)s;
- (void)setShouldPlayOutBuffer:(BOOL)s;
- (void)pause;
- (void)resume;
- (BOOL)isPaused;
- (void)sustainHDCD;
- (void)restartPlaybackAtCurrentPosition;
- (double)latency;
- (double)getVisLatency;
- (double)getTotalLatency;
- (id)controller;
- (id)downmix;
@end

View file

@ -7,384 +7,106 @@
//
#import "OutputNode.h"
#import "OutputCoreAudio.h"
#import "AudioPlayer.h"
#import "BufferChain.h"
#import "OutputCoreAudio.h"
#import "DSPRubberbandNode.h"
#import "DSPFSurroundNode.h"
#import "DSPHRTFNode.h"
#import "DSPEqualizerNode.h"
#import "VisualizationNode.h"
#import "DSPDownmixNode.h"
@implementation OutputNode
#import "Logging.h"
@implementation OutputNode {
BOOL DSPsLaunched;
Node *previousInput;
DSPRubberbandNode *rubberbandNode;
DSPFSurroundNode *fsurroundNode;
DSPHRTFNode *hrtfNode;
DSPEqualizerNode *equalizerNode;
DSPDownmixNode *downmixNode;
VisualizationNode *visualizationNode;
}
- (void)setup {
[self setupWithInterval:NO];
}
- (void)setupWithInterval:(BOOL)resumeInterval {
if(!resumeInterval) {
amountPlayed = 0.0;
amountPlayedInterval = 0.0;
intervalReported = NO;
}
paused = YES;
started = NO;
- (void)setup
{
amountPlayed = 0;
output = [[OutputCoreAudio alloc] initWithController:self];
[output setup];
if(!DSPsLaunched) {
rubberbandNode = [[DSPRubberbandNode alloc] initWithController:self previous:nil latency:0.1];
if(!rubberbandNode) return;
fsurroundNode = [[DSPFSurroundNode alloc] initWithController:self previous:rubberbandNode latency:0.03];
if(!fsurroundNode) return;
equalizerNode = [[DSPEqualizerNode alloc] initWithController:self previous:fsurroundNode latency:0.03];
if(!equalizerNode) return;
hrtfNode = [[DSPHRTFNode alloc] initWithController:self previous:equalizerNode latency:0.03];
if(!hrtfNode) return;
downmixNode = [[DSPDownmixNode alloc] initWithController:self previous:hrtfNode latency:0.03];
if(!downmixNode) return;
// Approximately double the chunk size for Vis at 44100Hz
visualizationNode = [[VisualizationNode alloc] initWithController:self previous:downmixNode latency:8192.0 / 44100.0];
if(!visualizationNode) return;
[self setPreviousNode:visualizationNode];
DSPsLaunched = YES;
[self launchDSPs];
previousInput = nil;
}
}
- (void)seek:(double)time {
// [output pause];
[self resetBuffer];
- (void)seek:(double)time
{
[output pause];
amountPlayed = time;
amountPlayed = time*format.mBytesPerFrame*(format.mSampleRate/1000.0);
}
- (void)process {
paused = NO;
- (void)process
{
[output start];
}
- (void)pause {
paused = YES;
- (void)pause
{
[output pause];
}
- (void)resume {
paused = NO;
- (void)resume
{
[output resume];
}
- (void)fadeOut {
[output fadeOut];
}
- (int)readData:(void *)ptr amount:(int)amount
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
- (void)fadeOutBackground {
[output fadeOutBackground];
}
- (void)fadeIn {
[self reconnectInputAndReplumb];
[output fadeIn];
}
- (void)incrementAmountPlayed:(double)seconds {
amountPlayed += seconds;
amountPlayedInterval += seconds;
if(!intervalReported && amountPlayedInterval >= 60.0) {
intervalReported = YES;
[controller reportPlayCount];
}
}
- (void)setAmountPlayed:(double)seconds {
double delta = seconds - amountPlayed;
if(delta > 0.0 && delta < 5.0) {
[self incrementAmountPlayed:delta];
} else if(delta) {
amountPlayed = seconds;
}
}
- (void)resetAmountPlayed {
amountPlayed = 0;
}
- (void)resetAmountPlayedInterval {
amountPlayedInterval = 0;
intervalReported = NO;
}
- (BOOL)selectNextBuffer {
BOOL ret = [controller selectNextBuffer];
if(!ret) {
[self reconnectInputAndReplumb];
}
return ret;
}
- (void)endOfInputPlayed {
if(!intervalReported) {
intervalReported = YES;
[controller reportPlayCount];
}
[controller endOfInputPlayed];
[self resetAmountPlayedInterval];
}
- (BOOL)chainQueueHasTracks {
return [controller chainQueueHasTracks];
}
- (double)secondsBuffered {
return [buffer listDuration];
}
- (NSArray *)DSPs {
if(DSPsLaunched) {
return @[rubberbandNode, fsurroundNode, equalizerNode, hrtfNode, downmixNode, visualizationNode];
} else {
return @[];
}
}
- (BOOL)reconnectInput {
Node *finalNode = nil;
if(rubberbandNode) {
finalNode = [[controller bufferChain] finalNode];
[rubberbandNode setPreviousNode:finalNode];
int n;
previousNode = [[controller bufferChain] finalNode];
n = [super readData:ptr amount:amount];
if (endOfStream == YES)
{
amountPlayed = 0;
[controller endOfInputPlayed]; //Updates shouldContinue appropriately?
}
return !!finalNode;
}
- (void)reconnectInputAndReplumb {
Node *finalNode = nil;
if(rubberbandNode) {
finalNode = [[controller bufferChain] finalNode];
[rubberbandNode setPreviousNode:finalNode];
/* if (n == 0) {
NSLog(@"Output Buffer dry!");
}
*/
amountPlayed += n;
NSArray *DSPs = [self DSPs];
for (Node *node in DSPs) {
[node setEndOfStream:NO];
[node setShouldContinue:YES];
}
[pool release];
return n;
}
- (void)launchDSPs {
NSArray *DSPs = [self DSPs];
for (Node *node in DSPs) {
[node launchThread];
}
- (double)amountPlayed
{
return (amountPlayed/format.mBytesPerFrame)/(format.mSampleRate/1000.0);
}
- (AudioChunk *)readChunk:(size_t)amount {
@autoreleasepool {
if([self reconnectInput]) {
AudioChunk *ret = [super readChunk:amount];
if((!ret || ![ret frameCount]) && [previousNode endOfStream]) {
endOfStream = YES;
}
return ret;
} else {
return [[AudioChunk alloc] init];
}
}
}
- (BOOL)peekFormat:(nonnull AudioStreamBasicDescription *)format channelConfig:(nonnull uint32_t *)config {
@autoreleasepool {
if([self reconnectInput]) {
BOOL ret = [super peekFormat:format channelConfig:config];
if(!ret && [previousNode endOfStream]) {
endOfStream = YES;
}
return ret;
} else {
return NO;
}
}
}
- (double)amountPlayed {
return amountPlayed;
}
- (double)amountPlayedInterval {
return amountPlayedInterval;
}
- (AudioStreamBasicDescription)format {
- (AudioStreamBasicDescription) format
{
return format;
}
- (uint32_t)config {
return config;
}
- (AudioStreamBasicDescription)deviceFormat {
return [output deviceFormat];
}
- (uint32_t)deviceChannelConfig {
return [output deviceChannelConfig];
}
- (void)setFormat:(AudioStreamBasicDescription *)f channelConfig:(uint32_t)channelConfig {
if(!shouldContinue) return;
- (void)setFormat:(AudioStreamBasicDescription *)f
{
format = *f;
config = channelConfig;
// Calculate a ratio and add to double(seconds) instead, as format may change
// double oldSampleRatio = sampleRatio;
AudioPlayer *audioPlayer = controller;
BufferChain *bufferChain = [audioPlayer bufferChain];
if(bufferChain) {
ConverterNode *converter = [bufferChain converter];
AudioStreamBasicDescription outputFormat;
uint32_t outputChannelConfig;
BOOL formatChanged = NO;
if(converter) {
AudioStreamBasicDescription converterFormat = [converter nodeFormat];
if(memcmp(&converterFormat, &format, sizeof(converterFormat)) != 0) {
formatChanged = YES;
}
}
if(downmixNode && output && !formatChanged) {
outputFormat = [output deviceFormat];
outputChannelConfig = [output deviceChannelConfig];
AudioStreamBasicDescription currentOutputFormat = [downmixNode nodeFormat];
uint32_t currentOutputChannelConfig = [downmixNode nodeChannelConfig];
if(memcmp(&currentOutputFormat, &outputFormat, sizeof(currentOutputFormat)) != 0 ||
currentOutputChannelConfig != outputChannelConfig) {
formatChanged = YES;
}
}
if(formatChanged) {
InputNode *inputNode = [bufferChain inputNode];
if(converter) {
[converter setOutputFormat:format];
}
if(downmixNode && output) {
[downmixNode setOutputFormat:[output deviceFormat] withChannelConfig:[output deviceChannelConfig]];
}
if(inputNode) {
AudioStreamBasicDescription inputFormat = [inputNode nodeFormat];
if(converter) {
[converter inputFormatDidChange:inputFormat inputConfig:[inputNode nodeChannelConfig]];
}
[inputNode seek:(long)(amountPlayed * inputFormat.mSampleRate)];
}
}
}
}
- (void)close {
- (void)close
{
[output stop];
output = nil;
if(DSPsLaunched) {
NSArray *DSPs = [self DSPs];
for(Node *node in DSPs) {
[node setShouldContinue:NO];
}
previousNode = nil;
visualizationNode = nil;
downmixNode = nil;
hrtfNode = nil;
fsurroundNode = nil;
rubberbandNode = nil;
previousInput = nil;
DSPsLaunched = NO;
}
}
- (double)volume {
return [output volume];
- (void)dealloc
{
[output release];
[super dealloc];
}
- (void)setVolume:(double)v {
- (void)setVolume:(double) v
{
[output setVolume:v];
}
- (void)setShouldContinue:(BOOL)s {
- (void)setShouldContinue:(BOOL)s
{
[super setShouldContinue:s];
NSArray *DSPs = [self DSPs];
for(Node *node in DSPs) {
[node setShouldContinue:s];
}
// if (s == NO)
// [output stop];
// if (s == NO)
// [output stop];
}
- (void)setShouldPlayOutBuffer:(BOOL)s {
[output setShouldPlayOutBuffer:s];
}
- (BOOL)isPaused {
return paused;
}
- (void)sustainHDCD {
[output sustainHDCD];
}
- (void)restartPlaybackAtCurrentPosition {
[controller restartPlaybackAtCurrentPosition];
}
- (double)latency {
double latency = 0.0;
NSArray *DSPs = [self DSPs];
for(Node *node in DSPs) {
latency += [node secondsBuffered];
}
return [output latency] + latency;
}
- (double)getVisLatency {
return [output latency] + [visualizationNode secondsBuffered];
}
- (double)getTotalLatency {
return [[controller bufferChain] secondsBuffered] + [self latency];
}
- (id)controller {
return controller;
}
- (id)downmix {
return downmixNode;
}
@end

View file

@ -1,35 +0,0 @@
//
// VisualizationNode.h
// CogAudio
//
// Created by Christopher Snowhill on 2/12/25.
//
#ifndef VisualizationNode_h
#define VisualizationNode_h
#import <CogAudio/Node.h>
@interface VisualizationNode : Node {
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency;
- (void)threadEntry:(id _Nullable)arg;
- (BOOL)setup;
- (void)cleanUp;
- (BOOL)paused;
- (void)resetBuffer;
- (void)setShouldContinue:(BOOL)s;
- (void)process;
- (double)secondsBuffered;
@end
#endif /* VisualizationNode_h */

View file

@ -1,273 +0,0 @@
//
// VisualizationNode.m
// CogAudio Framework
//
// Created by Christopher Snowhill on 2/12/25.
//
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import <Accelerate/Accelerate.h>
#import "Downmix.h"
#import <CogAudio/VisualizationController.h>
#import "BufferChain.h"
#import "Logging.h"
#import "rsstate.h"
#import "VisualizationNode.h"
@implementation VisualizationNode {
void *rs;
double lastVisRate;
BOOL processEntered;
BOOL stopping;
BOOL paused;
BOOL threadTerminated;
AudioStreamBasicDescription inputFormat;
AudioStreamBasicDescription visFormat; // Mono format for vis
uint32_t inputChannelConfig;
uint32_t visChannelConfig;
size_t resamplerRemain;
DownmixProcessor *downmixer;
VisualizationController *visController;
float visAudio[512];
float resamplerInput[8192];
float visTemp[8192];
}
- (id _Nullable)initWithController:(id _Nonnull)c previous:(id _Nullable)p latency:(double)latency {
self = [super init];
if(self) {
buffer = [[ChunkList alloc] initWithMaximumDuration:latency];
writeSemaphore = [[Semaphore alloc] init];
readSemaphore = [[Semaphore alloc] init];
accessLock = [[NSLock alloc] init];
initialBufferFilled = NO;
controller = c;
endOfStream = NO;
shouldContinue = YES;
nodeChannelConfig = 0;
nodeLossless = NO;
durationPrebuffer = latency * 0.25;
visController = [VisualizationController sharedController];
inWrite = NO;
inPeek = NO;
inRead = NO;
inMerge = NO;
[self setPreviousNode:p];
}
return self;
}
- (void)dealloc {
DLog(@"Visualization node dealloc");
[self setShouldContinue:NO];
[self cleanUp];
[super cleanUp];
}
// Visualization thread should be fairly high priority, too
- (void)threadEntry:(id _Nullable)arg {
@autoreleasepool {
NSThread *currentThread = [NSThread currentThread];
[currentThread setThreadPriority:0.75];
[currentThread setQualityOfService:NSQualityOfServiceUserInitiated];
threadTerminated = NO;
[self process];
threadTerminated = YES;
}
}
- (void)resetBuffer {
paused = YES;
while(processEntered) {
usleep(500);
}
[buffer reset];
[self fullShutdown];
paused = NO;
}
- (double)secondsBuffered {
return [buffer listDuration];
}
- (void)setShouldContinue:(BOOL)s {
BOOL currentShouldContinue = shouldContinue;
shouldContinue = s;
if(!currentShouldContinue && s && threadTerminated) {
[self launchThread];
}
}
- (BOOL)setup {
if(fabs(inputFormat.mSampleRate - 44100.0) > 1e-6) {
rs = rsstate_new(1, inputFormat.mSampleRate, 44100.0);
if(!rs) {
return NO;
}
resamplerRemain = 0;
}
visFormat = inputFormat;
visFormat.mChannelsPerFrame = 1;
visFormat.mBytesPerFrame = sizeof(float);
visFormat.mBytesPerPacket = visFormat.mBytesPerFrame * visFormat.mFramesPerPacket;
visChannelConfig = AudioChannelFrontCenter;
downmixer = [[DownmixProcessor alloc] initWithInputFormat:inputFormat inputConfig:inputChannelConfig andOutputFormat:visFormat outputConfig:visChannelConfig];
if(!downmixer) {
return NO;
}
return YES;
}
- (void)cleanUp {
stopping = YES;
while(processEntered) {
usleep(500);
}
[self fullShutdown];
}
- (void)fullShutdown {
if(rs) {
rsstate_delete(rs);
rs = NULL;
}
downmixer = nil;
}
- (BOOL)paused {
return paused;
}
- (void)process {
while([self shouldContinue] == YES) {
if(paused || endOfStream) {
usleep(500);
continue;
}
@autoreleasepool {
AudioChunk *chunk = nil;
chunk = [self readAndMergeChunksAsFloat32:512];
if(!chunk || ![chunk frameCount]) {
if([previousNode endOfStream] == YES) {
usleep(500);
endOfStream = YES;
continue;
}
} else {
[self processVis:[chunk copy]];
[self writeChunk:chunk];
chunk = nil;
}
}
}
endOfStream = YES;
}
- (void)postVisPCM:(const float *)visTemp amount:(size_t)samples {
[visController postVisPCM:visTemp amount:(int)samples];
}
- (void)processVis:(AudioChunk *)chunk {
processEntered = YES;
if(paused) {
processEntered = NO;
return;
}
AudioStreamBasicDescription format = [chunk format];
uint32_t channelConfig = [chunk channelConfig];
[visController postSampleRate:44100.0];
if(!rs || !downmixer ||
memcmp(&format, &inputFormat, sizeof(format)) != 0 ||
channelConfig != inputChannelConfig) {
if(rs) {
while(!stopping) {
int samplesFlushed;
samplesFlushed = (int)rsstate_flush(rs, &visTemp[0], 8192);
if(samplesFlushed > 1) {
[self postVisPCM:visTemp amount:samplesFlushed];
} else {
break;
}
}
}
[self fullShutdown];
inputFormat = format;
inputChannelConfig = channelConfig;
if(![self setup]) {
processEntered = NO;
return;
}
}
size_t frameCount = [chunk frameCount];
NSData *sampleData = [chunk removeSamples:frameCount];
[downmixer process:[sampleData bytes] frameCount:frameCount output:&visAudio[0]];
if(rs) {
int samplesProcessed;
size_t totalDone = 0;
size_t inDone = 0;
size_t visFrameCount = frameCount;
do {
if(stopping) {
break;
}
int visTodo = (int)MIN(visFrameCount, resamplerRemain + visFrameCount - 8192);
if(visTodo) {
cblas_scopy(visTodo, &visAudio[0], 1, &resamplerInput[resamplerRemain], 1);
}
visTodo += resamplerRemain;
resamplerRemain = 0;
samplesProcessed = (int)rsstate_resample(rs, &resamplerInput[0], visTodo, &inDone, &visTemp[0], 8192);
resamplerRemain = (int)(visTodo - inDone);
if(resamplerRemain && inDone) {
memmove(&resamplerInput[0], &resamplerInput[inDone], resamplerRemain * sizeof(float));
}
if(samplesProcessed) {
[self postVisPCM:&visTemp[0] amount:samplesProcessed];
}
totalDone += inDone;
visFrameCount -= inDone;
} while(samplesProcessed && visFrameCount);
} else {
[self postVisPCM:&visAudio[0] amount:frameCount];
}
processEntered = NO;
}
@end

View file

@ -1 +0,0 @@
#import "ThirdParty/deadbeef/fft.h"

View file

@ -3,12 +3,10 @@
archiveVersion = 1;
classes = {
};
objectVersion = 54;
objectVersion = 42;
objects = {
/* Begin PBXBuildFile section */
07DB5F3E0ED353A900C2E3EF /* AudioMetadataWriter.h in Headers */ = {isa = PBXBuildFile; fileRef = 07DB5F3C0ED353A900C2E3EF /* AudioMetadataWriter.h */; };
07DB5F3F0ED353A900C2E3EF /* AudioMetadataWriter.m in Sources */ = {isa = PBXBuildFile; fileRef = 07DB5F3D0ED353A900C2E3EF /* AudioMetadataWriter.m */; };
17A2D3C50B8D1D37000778C4 /* AudioDecoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 17A2D3C30B8D1D37000778C4 /* AudioDecoder.h */; settings = {ATTRIBUTES = (Public, ); }; };
17A2D3C60B8D1D37000778C4 /* AudioDecoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 17A2D3C40B8D1D37000778C4 /* AudioDecoder.m */; };
17ADB13C0B97926D00257CA2 /* AudioSource.h in Headers */ = {isa = PBXBuildFile; fileRef = 17ADB13A0B97926D00257CA2 /* AudioSource.h */; };
@ -25,9 +23,15 @@
17D21CA80B8BE4BA00D1EBDE /* Node.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21C7D0B8BE4BA00D1EBDE /* Node.m */; };
17D21CA90B8BE4BA00D1EBDE /* OutputNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21C7E0B8BE4BA00D1EBDE /* OutputNode.h */; settings = {ATTRIBUTES = (Public, ); }; };
17D21CAA0B8BE4BA00D1EBDE /* OutputNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21C7F0B8BE4BA00D1EBDE /* OutputNode.m */; };
17D21CC50B8BE4BA00D1EBDE /* OutputCoreAudio.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21C9C0B8BE4BA00D1EBDE /* OutputCoreAudio.h */; settings = {ATTRIBUTES = (Public, ); }; };
17D21CC60B8BE4BA00D1EBDE /* OutputCoreAudio.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21C9D0B8BE4BA00D1EBDE /* OutputCoreAudio.m */; };
17D21CC70B8BE4BA00D1EBDE /* Status.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21C9E0B8BE4BA00D1EBDE /* Status.h */; settings = {ATTRIBUTES = (Public, ); }; };
17D21CF30B8BE5EF00D1EBDE /* CogSemaphore.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21CF10B8BE5EF00D1EBDE /* CogSemaphore.h */; settings = {ATTRIBUTES = (Public, ); }; };
17D21CF40B8BE5EF00D1EBDE /* CogSemaphore.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21CF20B8BE5EF00D1EBDE /* CogSemaphore.m */; };
17D21CDF0B8BE5B400D1EBDE /* VirtualRingBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21CDA0B8BE5B400D1EBDE /* VirtualRingBuffer.h */; settings = {ATTRIBUTES = (Public, ); }; };
17D21CE00B8BE5B400D1EBDE /* VirtualRingBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21CDB0B8BE5B400D1EBDE /* VirtualRingBuffer.m */; };
17D21CE10B8BE5B400D1EBDE /* DBLog.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21CDD0B8BE5B400D1EBDE /* DBLog.h */; settings = {ATTRIBUTES = (Public, ); }; };
17D21CE20B8BE5B400D1EBDE /* DBLog.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21CDE0B8BE5B400D1EBDE /* DBLog.m */; };
17D21CF30B8BE5EF00D1EBDE /* Semaphore.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21CF10B8BE5EF00D1EBDE /* Semaphore.h */; settings = {ATTRIBUTES = (Public, ); }; };
17D21CF40B8BE5EF00D1EBDE /* Semaphore.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21CF20B8BE5EF00D1EBDE /* Semaphore.m */; };
17D21DAD0B8BE76800D1EBDE /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 17D21DA90B8BE76800D1EBDE /* AudioToolbox.framework */; };
17D21DAE0B8BE76800D1EBDE /* AudioUnit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 17D21DAA0B8BE76800D1EBDE /* AudioUnit.framework */; };
17D21DAF0B8BE76800D1EBDE /* CoreAudio.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 17D21DAB0B8BE76800D1EBDE /* CoreAudio.framework */; };
@ -37,89 +41,15 @@
17D21EBD0B8BF44000D1EBDE /* AudioPlayer.h in Headers */ = {isa = PBXBuildFile; fileRef = 17D21EBB0B8BF44000D1EBDE /* AudioPlayer.h */; settings = {ATTRIBUTES = (Public, ); }; };
17D21EBE0B8BF44000D1EBDE /* AudioPlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 17D21EBC0B8BF44000D1EBDE /* AudioPlayer.m */; };
17F94DD50B8D0F7000A34E87 /* PluginController.h in Headers */ = {isa = PBXBuildFile; fileRef = 17F94DD30B8D0F7000A34E87 /* PluginController.h */; settings = {ATTRIBUTES = (Public, ); }; };
17F94DD60B8D0F7000A34E87 /* PluginController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 17F94DD40B8D0F7000A34E87 /* PluginController.mm */; };
17F94DD60B8D0F7000A34E87 /* PluginController.m in Sources */ = {isa = PBXBuildFile; fileRef = 17F94DD40B8D0F7000A34E87 /* PluginController.m */; };
17F94DDD0B8D101100A34E87 /* Plugin.h in Headers */ = {isa = PBXBuildFile; fileRef = 17F94DDC0B8D101100A34E87 /* Plugin.h */; settings = {ATTRIBUTES = (Public, ); }; };
831A50142865A7FD0049CFE4 /* rsstate.hpp in Headers */ = {isa = PBXBuildFile; fileRef = 831A50132865A7FD0049CFE4 /* rsstate.hpp */; };
831A50162865A8800049CFE4 /* rsstate.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 831A50152865A8800049CFE4 /* rsstate.cpp */; };
831A50182865A8B30049CFE4 /* rsstate.h in Headers */ = {isa = PBXBuildFile; fileRef = 831A50172865A8B30049CFE4 /* rsstate.h */; };
8328995327CB511000D7F028 /* RedundantPlaylistDataStore.m in Sources */ = {isa = PBXBuildFile; fileRef = 8328995127CB510F00D7F028 /* RedundantPlaylistDataStore.m */; };
8328995427CB511000D7F028 /* RedundantPlaylistDataStore.h in Headers */ = {isa = PBXBuildFile; fileRef = 8328995227CB511000D7F028 /* RedundantPlaylistDataStore.h */; };
8328995727CB51B700D7F028 /* SHA256Digest.h in Headers */ = {isa = PBXBuildFile; fileRef = 8328995527CB51B700D7F028 /* SHA256Digest.h */; };
8328995827CB51B700D7F028 /* SHA256Digest.m in Sources */ = {isa = PBXBuildFile; fileRef = 8328995627CB51B700D7F028 /* SHA256Digest.m */; };
8328995A27CB51C900D7F028 /* Security.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8328995927CB51C900D7F028 /* Security.framework */; };
833442422D6EFA6700C51D38 /* VisualizationController.h in Headers */ = {isa = PBXBuildFile; fileRef = 833442402D6EFA6700C51D38 /* VisualizationController.h */; settings = {ATTRIBUTES = (Public, ); }; };
833442432D6EFA6700C51D38 /* VisualizationController.m in Sources */ = {isa = PBXBuildFile; fileRef = 833442412D6EFA6700C51D38 /* VisualizationController.m */; };
833738EA2D5EA52500278628 /* DSPDownmixNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 833738E92D5EA52500278628 /* DSPDownmixNode.h */; settings = {ATTRIBUTES = (Public, ); }; };
833738EC2D5EA53500278628 /* DSPDownmixNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 833738EB2D5EA53500278628 /* DSPDownmixNode.m */; };
833738EF2D5EA5B700278628 /* Downmix.m in Sources */ = {isa = PBXBuildFile; fileRef = 833738EE2D5EA5B700278628 /* Downmix.m */; };
833738F02D5EA5B700278628 /* Downmix.h in Headers */ = {isa = PBXBuildFile; fileRef = 833738ED2D5EA5B700278628 /* Downmix.h */; settings = {ATTRIBUTES = (Public, ); }; };
8347C7412796C58800FA8A7D /* NSFileHandle+CreateFile.h in Headers */ = {isa = PBXBuildFile; fileRef = 8347C73F2796C58800FA8A7D /* NSFileHandle+CreateFile.h */; };
8347C7422796C58800FA8A7D /* NSFileHandle+CreateFile.m in Sources */ = {isa = PBXBuildFile; fileRef = 8347C7402796C58800FA8A7D /* NSFileHandle+CreateFile.m */; };
834A41A9287A90AB00EB9D9B /* freesurround_decoder.h in Headers */ = {isa = PBXBuildFile; fileRef = 834A41A5287A90AB00EB9D9B /* freesurround_decoder.h */; };
834A41AA287A90AB00EB9D9B /* freesurround_decoder.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 834A41A6287A90AB00EB9D9B /* freesurround_decoder.cpp */; };
834A41AB287A90AB00EB9D9B /* channelmaps.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 834A41A7287A90AB00EB9D9B /* channelmaps.cpp */; };
834A41AC287A90AB00EB9D9B /* channelmaps.h in Headers */ = {isa = PBXBuildFile; fileRef = 834A41A8287A90AB00EB9D9B /* channelmaps.h */; };
834FD4EB27AF8F380063BC83 /* AudioChunk.h in Headers */ = {isa = PBXBuildFile; fileRef = 834FD4EA27AF8F380063BC83 /* AudioChunk.h */; settings = {ATTRIBUTES = (Public, ); }; };
834FD4ED27AF91220063BC83 /* AudioChunk.m in Sources */ = {isa = PBXBuildFile; fileRef = 834FD4EC27AF91220063BC83 /* AudioChunk.m */; };
834FD4F027AF93680063BC83 /* ChunkList.h in Headers */ = {isa = PBXBuildFile; fileRef = 834FD4EE27AF93680063BC83 /* ChunkList.h */; settings = {ATTRIBUTES = (Public, ); }; };
834FD4F127AF93680063BC83 /* ChunkList.m in Sources */ = {isa = PBXBuildFile; fileRef = 834FD4EF27AF93680063BC83 /* ChunkList.m */; };
8350416D28646149006B32CC /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8350416C28646149006B32CC /* CoreMedia.framework */; };
835C88B1279811A500E28EAE /* hdcd_decode2.h in Headers */ = {isa = PBXBuildFile; fileRef = 835C88AF279811A500E28EAE /* hdcd_decode2.h */; };
835C88B2279811A500E28EAE /* hdcd_decode2.c in Sources */ = {isa = PBXBuildFile; fileRef = 835C88B0279811A500E28EAE /* hdcd_decode2.c */; };
835DD2672ACAF1D90057E319 /* OutputCoreAudio.m in Sources */ = {isa = PBXBuildFile; fileRef = 835DD2652ACAF1D90057E319 /* OutputCoreAudio.m */; };
835DD2682ACAF1D90057E319 /* OutputCoreAudio.h in Headers */ = {isa = PBXBuildFile; fileRef = 835DD2662ACAF1D90057E319 /* OutputCoreAudio.h */; settings = {ATTRIBUTES = (Public, ); }; };
835DD2722ACAF5AD0057E319 /* lpc.h in Headers */ = {isa = PBXBuildFile; fileRef = 835DD26D2ACAF5AD0057E319 /* lpc.h */; };
835DD2732ACAF5AD0057E319 /* util.h in Headers */ = {isa = PBXBuildFile; fileRef = 835DD26E2ACAF5AD0057E319 /* util.h */; };
835DD2742ACAF5AD0057E319 /* lpc.c in Sources */ = {isa = PBXBuildFile; fileRef = 835DD26F2ACAF5AD0057E319 /* lpc.c */; };
835FAC5E27BCA14D00BA8562 /* BadSampleCleaner.h in Headers */ = {isa = PBXBuildFile; fileRef = 835FAC5C27BCA14D00BA8562 /* BadSampleCleaner.h */; };
835FAC5F27BCA14D00BA8562 /* BadSampleCleaner.m in Sources */ = {isa = PBXBuildFile; fileRef = 835FAC5D27BCA14D00BA8562 /* BadSampleCleaner.m */; };
83725A9027AA16C90003F694 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 83725A7B27AA0D8A0003F694 /* Accelerate.framework */; };
83725A9127AA16D50003F694 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 83725A7C27AA0D8E0003F694 /* AVFoundation.framework */; };
8377C64C27B8C51500E8BC0F /* fft_accelerate.c in Sources */ = {isa = PBXBuildFile; fileRef = 8377C64B27B8C51500E8BC0F /* fft_accelerate.c */; };
8377C64E27B8C54400E8BC0F /* fft.h in Headers */ = {isa = PBXBuildFile; fileRef = 8377C64D27B8C54400E8BC0F /* fft.h */; };
8384912718080FF100E7332D /* Logging.h in Headers */ = {isa = PBXBuildFile; fileRef = 8384912618080FF100E7332D /* Logging.h */; };
838A33722D06A97D00D0D770 /* librubberband.3.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 838A33712D06A97D00D0D770 /* librubberband.3.dylib */; };
839065F32853338700636FBB /* dsd2float.h in Headers */ = {isa = PBXBuildFile; fileRef = 839065F22853338700636FBB /* dsd2float.h */; };
839366671815923C006DD712 /* CogPluginMulti.h in Headers */ = {isa = PBXBuildFile; fileRef = 839366651815923C006DD712 /* CogPluginMulti.h */; };
839366681815923C006DD712 /* CogPluginMulti.m in Sources */ = {isa = PBXBuildFile; fileRef = 839366661815923C006DD712 /* CogPluginMulti.m */; };
8399CF2C27B5D1D5008751F1 /* NSDictionary+Merge.h in Headers */ = {isa = PBXBuildFile; fileRef = 8399CF2A27B5D1D4008751F1 /* NSDictionary+Merge.h */; };
8399CF2D27B5D1D5008751F1 /* NSDictionary+Merge.m in Sources */ = {isa = PBXBuildFile; fileRef = 8399CF2B27B5D1D4008751F1 /* NSDictionary+Merge.m */; };
839E56E52879450300DFB5F4 /* HrtfData.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E12879450300DFB5F4 /* HrtfData.h */; };
839E56E62879450300DFB5F4 /* Endianness.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E22879450300DFB5F4 /* Endianness.h */; };
839E56E72879450300DFB5F4 /* HrtfData.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 839E56E32879450300DFB5F4 /* HrtfData.cpp */; };
839E56E82879450300DFB5F4 /* IHrtfData.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E42879450300DFB5F4 /* IHrtfData.h */; };
839E56EA28794F6300DFB5F4 /* HrtfTypes.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56E928794F6300DFB5F4 /* HrtfTypes.h */; };
839E56F7287974A100DFB5F4 /* SandboxBroker.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E56F6287974A100DFB5F4 /* SandboxBroker.h */; };
839E899E2D5DB9D500A13526 /* VisualizationNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 839E899D2D5DB9D500A13526 /* VisualizationNode.h */; settings = {ATTRIBUTES = (Public, ); }; };
839E89A02D5DBA1700A13526 /* VisualizationNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 839E899F2D5DBA1700A13526 /* VisualizationNode.m */; };
83A3496A2D5C3F430096D530 /* DSPRubberbandNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83A349682D5C3F430096D530 /* DSPRubberbandNode.m */; };
83A3496B2D5C3F430096D530 /* DSPRubberbandNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A349672D5C3F430096D530 /* DSPRubberbandNode.h */; settings = {ATTRIBUTES = (Public, ); }; };
83A3496D2D5C40490096D530 /* DSPFSurroundNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A3496C2D5C40490096D530 /* DSPFSurroundNode.h */; settings = {ATTRIBUTES = (Public, ); }; };
83A3496F2D5C405E0096D530 /* DSPFSurroundNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83A3496E2D5C405E0096D530 /* DSPFSurroundNode.m */; };
83A349722D5C41810096D530 /* FSurroundFilter.mm in Sources */ = {isa = PBXBuildFile; fileRef = 83A349712D5C41810096D530 /* FSurroundFilter.mm */; };
83A349732D5C41810096D530 /* FSurroundFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A349702D5C41810096D530 /* FSurroundFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };
83A349752D5C50A10096D530 /* DSPHRTFNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83A349742D5C50A10096D530 /* DSPHRTFNode.h */; settings = {ATTRIBUTES = (Public, ); }; };
83A349772D5C50B20096D530 /* DSPHRTFNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83A349762D5C50B20096D530 /* DSPHRTFNode.m */; };
83B74281289E027F005AAC28 /* CogAudio-Bridging-Header.h in Headers */ = {isa = PBXBuildFile; fileRef = 83B74280289E027F005AAC28 /* CogAudio-Bridging-Header.h */; };
83F843202D5C6272008C123B /* HeadphoneFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F8431E2D5C6272008C123B /* HeadphoneFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };
83F843212D5C6272008C123B /* HeadphoneFilter.mm in Sources */ = {isa = PBXBuildFile; fileRef = 83F8431F2D5C6272008C123B /* HeadphoneFilter.mm */; };
83F843232D5C66DA008C123B /* DSPEqualizerNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F843222D5C66DA008C123B /* DSPEqualizerNode.h */; settings = {ATTRIBUTES = (Public, ); }; };
83F843252D5C66E9008C123B /* DSPEqualizerNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83F843242D5C66E9008C123B /* DSPEqualizerNode.m */; };
83F9FFF62D6EC43900026576 /* soxr.h in Headers */ = {isa = PBXBuildFile; fileRef = 83F9FFF02D6EC43900026576 /* soxr.h */; settings = {ATTRIBUTES = (Public, ); }; };
83F9FFF82D6EC43900026576 /* libsoxr.0.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 83F9FFF22D6EC43900026576 /* libsoxr.0.dylib */; };
83FFED512D5B08BC0044CCAF /* DSPNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 83FFED502D5B08BC0044CCAF /* DSPNode.h */; settings = {ATTRIBUTES = (Public, ); }; };
83FFED532D5B09320044CCAF /* DSPNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 83FFED522D5B09320044CCAF /* DSPNode.m */; };
8DC2EF570486A6940098B216 /* Cocoa.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1058C7B1FEA5585E11CA2CBB /* Cocoa.framework */; };
8E8D3D2F0CBAEE6E00135C1B /* AudioContainer.h in Headers */ = {isa = PBXBuildFile; fileRef = 8E8D3D2D0CBAEE6E00135C1B /* AudioContainer.h */; settings = {ATTRIBUTES = (Public, ); }; };
8E8D3D300CBAEE6E00135C1B /* AudioContainer.m in Sources */ = {isa = PBXBuildFile; fileRef = 8E8D3D2E0CBAEE6E00135C1B /* AudioContainer.m */; };
8EC1225F0B993BD500C5B3AD /* ConverterNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 8EC1225D0B993BD500C5B3AD /* ConverterNode.h */; settings = {ATTRIBUTES = (Public, ); }; };
8EC122600B993BD500C5B3AD /* ConverterNode.m in Sources */ = {isa = PBXBuildFile; fileRef = 8EC1225E0B993BD500C5B3AD /* ConverterNode.m */; };
B0575F2D0D687A0800411D77 /* Helper.h in Headers */ = {isa = PBXBuildFile; fileRef = B0575F2C0D687A0800411D77 /* Helper.h */; settings = {ATTRIBUTES = (Public, ); }; };
B0575F300D687A4000411D77 /* Helper.m in Sources */ = {isa = PBXBuildFile; fileRef = B0575F2F0D687A4000411D77 /* Helper.m */; };
8EC1225F0B993BD500C5B3AD /* Converter.h in Headers */ = {isa = PBXBuildFile; fileRef = 8EC1225D0B993BD500C5B3AD /* Converter.h */; };
8EC122600B993BD500C5B3AD /* Converter.m in Sources */ = {isa = PBXBuildFile; fileRef = 8EC1225E0B993BD500C5B3AD /* Converter.m */; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
83725A8D27AA0DDB0003F694 /* CopyFiles */ = {
17D21D2B0B8BE6A200D1EBDE /* CopyFiles */ = {
isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647;
dstPath = "";
@ -131,8 +61,6 @@
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
07DB5F3C0ED353A900C2E3EF /* AudioMetadataWriter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioMetadataWriter.h; sourceTree = "<group>"; };
07DB5F3D0ED353A900C2E3EF /* AudioMetadataWriter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AudioMetadataWriter.m; sourceTree = "<group>"; };
0867D69BFE84028FC02AAC07 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = /System/Library/Frameworks/Foundation.framework; sourceTree = "<absolute>"; };
0867D6A5FE840307C02AAC07 /* AppKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AppKit.framework; path = /System/Library/Frameworks/AppKit.framework; sourceTree = "<absolute>"; };
1058C7B1FEA5585E11CA2CBB /* Cocoa.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Cocoa.framework; path = /System/Library/Frameworks/Cocoa.framework; sourceTree = "<absolute>"; };
@ -152,9 +80,15 @@
17D21C7D0B8BE4BA00D1EBDE /* Node.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = Node.m; sourceTree = "<group>"; };
17D21C7E0B8BE4BA00D1EBDE /* OutputNode.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = OutputNode.h; sourceTree = "<group>"; };
17D21C7F0B8BE4BA00D1EBDE /* OutputNode.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = OutputNode.m; sourceTree = "<group>"; };
17D21C9C0B8BE4BA00D1EBDE /* OutputCoreAudio.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = OutputCoreAudio.h; sourceTree = "<group>"; };
17D21C9D0B8BE4BA00D1EBDE /* OutputCoreAudio.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = OutputCoreAudio.m; sourceTree = "<group>"; };
17D21C9E0B8BE4BA00D1EBDE /* Status.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = Status.h; sourceTree = "<group>"; };
17D21CF10B8BE5EF00D1EBDE /* CogSemaphore.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = CogSemaphore.h; sourceTree = "<group>"; };
17D21CF20B8BE5EF00D1EBDE /* CogSemaphore.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = CogSemaphore.m; sourceTree = "<group>"; };
17D21CDA0B8BE5B400D1EBDE /* VirtualRingBuffer.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = VirtualRingBuffer.h; sourceTree = "<group>"; };
17D21CDB0B8BE5B400D1EBDE /* VirtualRingBuffer.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = VirtualRingBuffer.m; sourceTree = "<group>"; };
17D21CDD0B8BE5B400D1EBDE /* DBLog.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = DBLog.h; sourceTree = "<group>"; };
17D21CDE0B8BE5B400D1EBDE /* DBLog.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = DBLog.m; sourceTree = "<group>"; };
17D21CF10B8BE5EF00D1EBDE /* Semaphore.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = Semaphore.h; sourceTree = "<group>"; };
17D21CF20B8BE5EF00D1EBDE /* Semaphore.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = Semaphore.m; sourceTree = "<group>"; };
17D21DA90B8BE76800D1EBDE /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = /System/Library/Frameworks/AudioToolbox.framework; sourceTree = "<absolute>"; };
17D21DAA0B8BE76800D1EBDE /* AudioUnit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioUnit.framework; path = /System/Library/Frameworks/AudioUnit.framework; sourceTree = "<absolute>"; };
17D21DAB0B8BE76800D1EBDE /* CoreAudio.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreAudio.framework; path = /System/Library/Frameworks/CoreAudio.framework; sourceTree = "<absolute>"; };
@ -164,90 +98,13 @@
17D21EBB0B8BF44000D1EBDE /* AudioPlayer.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = AudioPlayer.h; sourceTree = "<group>"; };
17D21EBC0B8BF44000D1EBDE /* AudioPlayer.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = AudioPlayer.m; sourceTree = "<group>"; };
17F94DD30B8D0F7000A34E87 /* PluginController.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = PluginController.h; sourceTree = "<group>"; };
17F94DD40B8D0F7000A34E87 /* PluginController.mm */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.cpp.objcpp; path = PluginController.mm; sourceTree = "<group>"; };
17F94DD40B8D0F7000A34E87 /* PluginController.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = PluginController.m; sourceTree = "<group>"; };
17F94DDC0B8D101100A34E87 /* Plugin.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = Plugin.h; sourceTree = "<group>"; };
32DBCF5E0370ADEE00C91783 /* CogAudio_Prefix.pch */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CogAudio_Prefix.pch; sourceTree = "<group>"; };
831A50132865A7FD0049CFE4 /* rsstate.hpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.h; path = rsstate.hpp; sourceTree = "<group>"; };
831A50152865A8800049CFE4 /* rsstate.cpp */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = rsstate.cpp; sourceTree = "<group>"; };
831A50172865A8B30049CFE4 /* rsstate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = rsstate.h; sourceTree = "<group>"; };
8328995127CB510F00D7F028 /* RedundantPlaylistDataStore.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = RedundantPlaylistDataStore.m; path = ../../Utils/RedundantPlaylistDataStore.m; sourceTree = "<group>"; };
8328995227CB511000D7F028 /* RedundantPlaylistDataStore.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = RedundantPlaylistDataStore.h; path = ../../Utils/RedundantPlaylistDataStore.h; sourceTree = "<group>"; };
8328995527CB51B700D7F028 /* SHA256Digest.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = SHA256Digest.h; path = ../../Utils/SHA256Digest.h; sourceTree = "<group>"; };
8328995627CB51B700D7F028 /* SHA256Digest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = SHA256Digest.m; path = ../../Utils/SHA256Digest.m; sourceTree = "<group>"; };
8328995927CB51C900D7F028 /* Security.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Security.framework; path = System/Library/Frameworks/Security.framework; sourceTree = SDKROOT; };
833442402D6EFA6700C51D38 /* VisualizationController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisualizationController.h; sourceTree = "<group>"; };
833442412D6EFA6700C51D38 /* VisualizationController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VisualizationController.m; sourceTree = "<group>"; };
833738E92D5EA52500278628 /* DSPDownmixNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPDownmixNode.h; sourceTree = "<group>"; };
833738EB2D5EA53500278628 /* DSPDownmixNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPDownmixNode.m; sourceTree = "<group>"; };
833738ED2D5EA5B700278628 /* Downmix.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Downmix.h; sourceTree = "<group>"; };
833738EE2D5EA5B700278628 /* Downmix.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Downmix.m; sourceTree = "<group>"; };
8347C73F2796C58800FA8A7D /* NSFileHandle+CreateFile.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "NSFileHandle+CreateFile.h"; path = "../../Utils/NSFileHandle+CreateFile.h"; sourceTree = "<group>"; };
8347C7402796C58800FA8A7D /* NSFileHandle+CreateFile.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "NSFileHandle+CreateFile.m"; path = "../../Utils/NSFileHandle+CreateFile.m"; sourceTree = "<group>"; };
834A41A5287A90AB00EB9D9B /* freesurround_decoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = freesurround_decoder.h; sourceTree = "<group>"; };
834A41A6287A90AB00EB9D9B /* freesurround_decoder.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = freesurround_decoder.cpp; sourceTree = "<group>"; };
834A41A7287A90AB00EB9D9B /* channelmaps.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = channelmaps.cpp; sourceTree = "<group>"; };
834A41A8287A90AB00EB9D9B /* channelmaps.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = channelmaps.h; sourceTree = "<group>"; };
834FD4EA27AF8F380063BC83 /* AudioChunk.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AudioChunk.h; sourceTree = "<group>"; };
834FD4EC27AF91220063BC83 /* AudioChunk.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AudioChunk.m; sourceTree = "<group>"; };
834FD4EE27AF93680063BC83 /* ChunkList.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ChunkList.h; sourceTree = "<group>"; };
834FD4EF27AF93680063BC83 /* ChunkList.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ChunkList.m; sourceTree = "<group>"; };
8350416C28646149006B32CC /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; };
835C88AF279811A500E28EAE /* hdcd_decode2.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = hdcd_decode2.h; sourceTree = "<group>"; };
835C88B0279811A500E28EAE /* hdcd_decode2.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = hdcd_decode2.c; sourceTree = "<group>"; };
835DD2652ACAF1D90057E319 /* OutputCoreAudio.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OutputCoreAudio.m; sourceTree = "<group>"; };
835DD2662ACAF1D90057E319 /* OutputCoreAudio.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OutputCoreAudio.h; sourceTree = "<group>"; };
835DD26B2ACAF5AD0057E319 /* LICENSE.LGPL */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = LICENSE.LGPL; sourceTree = "<group>"; };
835DD26C2ACAF5AD0057E319 /* License.txt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = License.txt; sourceTree = "<group>"; };
835DD26D2ACAF5AD0057E319 /* lpc.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = lpc.h; sourceTree = "<group>"; };
835DD26E2ACAF5AD0057E319 /* util.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = util.h; sourceTree = "<group>"; };
835DD26F2ACAF5AD0057E319 /* lpc.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = lpc.c; sourceTree = "<group>"; };
835FAC5C27BCA14D00BA8562 /* BadSampleCleaner.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = BadSampleCleaner.h; path = Utils/BadSampleCleaner.h; sourceTree = SOURCE_ROOT; };
835FAC5D27BCA14D00BA8562 /* BadSampleCleaner.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; name = BadSampleCleaner.m; path = Utils/BadSampleCleaner.m; sourceTree = SOURCE_ROOT; };
83725A7B27AA0D8A0003F694 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
83725A7C27AA0D8E0003F694 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
8377C64B27B8C51500E8BC0F /* fft_accelerate.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; path = fft_accelerate.c; sourceTree = "<group>"; };
8377C64D27B8C54400E8BC0F /* fft.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = fft.h; sourceTree = "<group>"; };
8384912618080FF100E7332D /* Logging.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = Logging.h; path = ../../Utils/Logging.h; sourceTree = "<group>"; };
838A33712D06A97D00D0D770 /* librubberband.3.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = librubberband.3.dylib; path = ../ThirdParty/rubberband/lib/librubberband.3.dylib; sourceTree = SOURCE_ROOT; };
839065F22853338700636FBB /* dsd2float.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = dsd2float.h; sourceTree = "<group>"; };
839366651815923C006DD712 /* CogPluginMulti.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CogPluginMulti.h; sourceTree = "<group>"; };
839366661815923C006DD712 /* CogPluginMulti.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CogPluginMulti.m; sourceTree = "<group>"; };
8399CF2A27B5D1D4008751F1 /* NSDictionary+Merge.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "NSDictionary+Merge.h"; path = "../../Utils/NSDictionary+Merge.h"; sourceTree = "<group>"; };
8399CF2B27B5D1D4008751F1 /* NSDictionary+Merge.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "NSDictionary+Merge.m"; path = "../../Utils/NSDictionary+Merge.m"; sourceTree = "<group>"; };
839E56E12879450300DFB5F4 /* HrtfData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HrtfData.h; sourceTree = "<group>"; };
839E56E22879450300DFB5F4 /* Endianness.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Endianness.h; sourceTree = "<group>"; };
839E56E32879450300DFB5F4 /* HrtfData.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = HrtfData.cpp; sourceTree = "<group>"; };
839E56E42879450300DFB5F4 /* IHrtfData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = IHrtfData.h; sourceTree = "<group>"; };
839E56E928794F6300DFB5F4 /* HrtfTypes.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HrtfTypes.h; sourceTree = "<group>"; };
839E56F6287974A100DFB5F4 /* SandboxBroker.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = SandboxBroker.h; path = ../Utils/SandboxBroker.h; sourceTree = "<group>"; };
839E899D2D5DB9D500A13526 /* VisualizationNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisualizationNode.h; sourceTree = "<group>"; };
839E899F2D5DBA1700A13526 /* VisualizationNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VisualizationNode.m; sourceTree = "<group>"; };
83A349672D5C3F430096D530 /* DSPRubberbandNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPRubberbandNode.h; sourceTree = "<group>"; };
83A349682D5C3F430096D530 /* DSPRubberbandNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPRubberbandNode.m; sourceTree = "<group>"; };
83A3496C2D5C40490096D530 /* DSPFSurroundNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPFSurroundNode.h; sourceTree = "<group>"; };
83A3496E2D5C405E0096D530 /* DSPFSurroundNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPFSurroundNode.m; sourceTree = "<group>"; };
83A349702D5C41810096D530 /* FSurroundFilter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FSurroundFilter.h; sourceTree = "<group>"; };
83A349712D5C41810096D530 /* FSurroundFilter.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FSurroundFilter.mm; sourceTree = "<group>"; };
83A349742D5C50A10096D530 /* DSPHRTFNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPHRTFNode.h; sourceTree = "<group>"; };
83A349762D5C50B20096D530 /* DSPHRTFNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPHRTFNode.m; sourceTree = "<group>"; };
83B74280289E027F005AAC28 /* CogAudio-Bridging-Header.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "CogAudio-Bridging-Header.h"; sourceTree = "<group>"; };
83F8431E2D5C6272008C123B /* HeadphoneFilter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = HeadphoneFilter.h; sourceTree = "<group>"; };
83F8431F2D5C6272008C123B /* HeadphoneFilter.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = HeadphoneFilter.mm; sourceTree = "<group>"; };
83F843222D5C66DA008C123B /* DSPEqualizerNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPEqualizerNode.h; sourceTree = "<group>"; };
83F843242D5C66E9008C123B /* DSPEqualizerNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPEqualizerNode.m; sourceTree = "<group>"; };
83F9FFF02D6EC43900026576 /* soxr.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = soxr.h; sourceTree = "<group>"; };
83F9FFF22D6EC43900026576 /* libsoxr.0.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; path = libsoxr.0.dylib; sourceTree = "<group>"; };
83F9FFF42D6EC43900026576 /* README.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = "<group>"; };
83FFED502D5B08BC0044CCAF /* DSPNode.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DSPNode.h; sourceTree = "<group>"; };
83FFED522D5B09320044CCAF /* DSPNode.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DSPNode.m; sourceTree = "<group>"; };
8DC2EF5A0486A6940098B216 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist; path = Info.plist; sourceTree = "<group>"; };
8DC2EF5B0486A6940098B216 /* CogAudio.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = CogAudio.framework; sourceTree = BUILT_PRODUCTS_DIR; };
8E8D3D2D0CBAEE6E00135C1B /* AudioContainer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioContainer.h; sourceTree = "<group>"; };
8E8D3D2E0CBAEE6E00135C1B /* AudioContainer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AudioContainer.m; sourceTree = "<group>"; };
8EC1225D0B993BD500C5B3AD /* ConverterNode.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = ConverterNode.h; sourceTree = "<group>"; };
8EC1225E0B993BD500C5B3AD /* ConverterNode.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = ConverterNode.m; sourceTree = "<group>"; };
B0575F2C0D687A0800411D77 /* Helper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Helper.h; sourceTree = "<group>"; };
B0575F2F0D687A4000411D77 /* Helper.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = Helper.m; sourceTree = "<group>"; };
8EC1225D0B993BD500C5B3AD /* Converter.h */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.h; path = Converter.h; sourceTree = "<group>"; };
8EC1225E0B993BD500C5B3AD /* Converter.m */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.c.objc; path = Converter.m; sourceTree = "<group>"; };
D2F7E79907B2D74100F64583 /* CoreData.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreData.framework; path = /System/Library/Frameworks/CoreData.framework; sourceTree = "<absolute>"; };
/* End PBXFileReference section */
@ -256,17 +113,11 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
8328995A27CB51C900D7F028 /* Security.framework in Frameworks */,
83725A9127AA16D50003F694 /* AVFoundation.framework in Frameworks */,
83F9FFF82D6EC43900026576 /* libsoxr.0.dylib in Frameworks */,
8DC2EF570486A6940098B216 /* Cocoa.framework in Frameworks */,
8350416D28646149006B32CC /* CoreMedia.framework in Frameworks */,
83725A9027AA16C90003F694 /* Accelerate.framework in Frameworks */,
17D21DAD0B8BE76800D1EBDE /* AudioToolbox.framework in Frameworks */,
17D21DAE0B8BE76800D1EBDE /* AudioUnit.framework in Frameworks */,
17D21DAF0B8BE76800D1EBDE /* CoreAudio.framework in Frameworks */,
17D21DB00B8BE76800D1EBDE /* CoreAudioKit.framework in Frameworks */,
838A33722D06A97D00D0D770 /* librubberband.3.dylib in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
@ -289,7 +140,6 @@
089C1665FE841158C02AAC07 /* Resources */,
0867D69AFE84028FC02AAC07 /* External Frameworks and Libraries */,
034768DFFF38A50411DB9C8B /* Products */,
83725A8F27AA16C90003F694 /* Frameworks */,
);
name = CogAudio;
sourceTree = "<group>";
@ -314,33 +164,22 @@
08FB77AEFE84172EC02AAC07 /* Classes */ = {
isa = PBXGroup;
children = (
83B74280289E027F005AAC28 /* CogAudio-Bridging-Header.h */,
8377C64F27B8CAAB00E8BC0F /* Visualization */,
17F94DDC0B8D101100A34E87 /* Plugin.h */,
17D21EBB0B8BF44000D1EBDE /* AudioPlayer.h */,
17D21EBC0B8BF44000D1EBDE /* AudioPlayer.m */,
8E8D3D2D0CBAEE6E00135C1B /* AudioContainer.h */,
8E8D3D2E0CBAEE6E00135C1B /* AudioContainer.m */,
17A2D3C30B8D1D37000778C4 /* AudioDecoder.h */,
17A2D3C40B8D1D37000778C4 /* AudioDecoder.m */,
17C940210B900909008627D6 /* AudioMetadataReader.h */,
17C940220B900909008627D6 /* AudioMetadataReader.m */,
07DB5F3C0ED353A900C2E3EF /* AudioMetadataWriter.h */,
07DB5F3D0ED353A900C2E3EF /* AudioMetadataWriter.m */,
17B6192E0B909BC300BC003F /* AudioPropertiesReader.h */,
17B6192F0B909BC300BC003F /* AudioPropertiesReader.m */,
17ADB13A0B97926D00257CA2 /* AudioSource.h */,
17ADB13B0B97926D00257CA2 /* AudioSource.m */,
839366651815923C006DD712 /* CogPluginMulti.h */,
839366661815923C006DD712 /* CogPluginMulti.m */,
17F94DD30B8D0F7000A34E87 /* PluginController.h */,
17F94DD40B8D0F7000A34E87 /* PluginController.mm */,
17F94DD40B8D0F7000A34E87 /* PluginController.m */,
17D21C750B8BE4BA00D1EBDE /* Chain */,
17D21C9B0B8BE4BA00D1EBDE /* Output */,
839E56F6287974A100DFB5F4 /* SandboxBroker.h */,
17D21C9E0B8BE4BA00D1EBDE /* Status.h */,
B0575F2C0D687A0800411D77 /* Helper.h */,
B0575F2F0D687A4000411D77 /* Helper.m */,
17D21CD80B8BE5B400D1EBDE /* ThirdParty */,
17D21CDC0B8BE5B400D1EBDE /* Utils */,
);
@ -358,11 +197,8 @@
1058C7B2FEA5585E11CA2CBB /* Other Frameworks */ = {
isa = PBXGroup;
children = (
838A33712D06A97D00D0D770 /* librubberband.3.dylib */,
83725A7B27AA0D8A0003F694 /* Accelerate.framework */,
17D21DAA0B8BE76800D1EBDE /* AudioUnit.framework */,
17D21DA90B8BE76800D1EBDE /* AudioToolbox.framework */,
83725A7C27AA0D8E0003F694 /* AVFoundation.framework */,
17D21DAA0B8BE76800D1EBDE /* AudioUnit.framework */,
17D21DAB0B8BE76800D1EBDE /* CoreAudio.framework */,
17D21DAC0B8BE76800D1EBDE /* CoreAudioKit.framework */,
0867D6A5FE840307C02AAC07 /* AppKit.framework */,
@ -375,25 +211,16 @@
17D21C750B8BE4BA00D1EBDE /* Chain */ = {
isa = PBXGroup;
children = (
83A349692D5C3F430096D530 /* DSP */,
834FD4EA27AF8F380063BC83 /* AudioChunk.h */,
834FD4EC27AF91220063BC83 /* AudioChunk.m */,
17D21C760B8BE4BA00D1EBDE /* BufferChain.h */,
17D21C770B8BE4BA00D1EBDE /* BufferChain.m */,
834FD4EE27AF93680063BC83 /* ChunkList.h */,
834FD4EF27AF93680063BC83 /* ChunkList.m */,
8EC1225D0B993BD500C5B3AD /* ConverterNode.h */,
8EC1225E0B993BD500C5B3AD /* ConverterNode.m */,
8EC1225D0B993BD500C5B3AD /* Converter.h */,
8EC1225E0B993BD500C5B3AD /* Converter.m */,
17D21C7A0B8BE4BA00D1EBDE /* InputNode.h */,
17D21C7B0B8BE4BA00D1EBDE /* InputNode.m */,
17D21C7C0B8BE4BA00D1EBDE /* Node.h */,
17D21C7D0B8BE4BA00D1EBDE /* Node.m */,
17D21C7E0B8BE4BA00D1EBDE /* OutputNode.h */,
17D21C7F0B8BE4BA00D1EBDE /* OutputNode.m */,
83FFED502D5B08BC0044CCAF /* DSPNode.h */,
83FFED522D5B09320044CCAF /* DSPNode.m */,
839E899D2D5DB9D500A13526 /* VisualizationNode.h */,
839E899F2D5DBA1700A13526 /* VisualizationNode.m */,
);
path = Chain;
sourceTree = "<group>";
@ -401,8 +228,8 @@
17D21C9B0B8BE4BA00D1EBDE /* Output */ = {
isa = PBXGroup;
children = (
835DD2662ACAF1D90057E319 /* OutputCoreAudio.h */,
835DD2652ACAF1D90057E319 /* OutputCoreAudio.m */,
17D21C9C0B8BE4BA00D1EBDE /* OutputCoreAudio.h */,
17D21C9D0B8BE4BA00D1EBDE /* OutputCoreAudio.m */,
);
path = Output;
sourceTree = "<group>";
@ -410,37 +237,28 @@
17D21CD80B8BE5B400D1EBDE /* ThirdParty */ = {
isa = PBXGroup;
children = (
83F9FFF52D6EC43900026576 /* soxr */,
835DD2692ACAF5AD0057E319 /* lvqcl */,
834A41A4287A90AB00EB9D9B /* fsurround */,
839E56E02879450300DFB5F4 /* hrtf */,
831A50152865A8800049CFE4 /* rsstate.cpp */,
831A50172865A8B30049CFE4 /* rsstate.h */,
831A50132865A7FD0049CFE4 /* rsstate.hpp */,
8377C64A27B8C51500E8BC0F /* deadbeef */,
835C88AE279811A500E28EAE /* hdcd */,
17D21DC40B8BE79700D1EBDE /* CoreAudioUtils */,
17D21CD90B8BE5B400D1EBDE /* VirtualRingBuffer */,
);
path = ThirdParty;
sourceTree = "<group>";
};
17D21CD90B8BE5B400D1EBDE /* VirtualRingBuffer */ = {
isa = PBXGroup;
children = (
17D21CDA0B8BE5B400D1EBDE /* VirtualRingBuffer.h */,
17D21CDB0B8BE5B400D1EBDE /* VirtualRingBuffer.m */,
);
path = VirtualRingBuffer;
sourceTree = "<group>";
};
17D21CDC0B8BE5B400D1EBDE /* Utils */ = {
isa = PBXGroup;
children = (
839065F22853338700636FBB /* dsd2float.h */,
8328995527CB51B700D7F028 /* SHA256Digest.h */,
8328995627CB51B700D7F028 /* SHA256Digest.m */,
8328995227CB511000D7F028 /* RedundantPlaylistDataStore.h */,
8328995127CB510F00D7F028 /* RedundantPlaylistDataStore.m */,
835FAC5C27BCA14D00BA8562 /* BadSampleCleaner.h */,
835FAC5D27BCA14D00BA8562 /* BadSampleCleaner.m */,
8399CF2A27B5D1D4008751F1 /* NSDictionary+Merge.h */,
8399CF2B27B5D1D4008751F1 /* NSDictionary+Merge.m */,
8347C73F2796C58800FA8A7D /* NSFileHandle+CreateFile.h */,
8347C7402796C58800FA8A7D /* NSFileHandle+CreateFile.m */,
8384912618080FF100E7332D /* Logging.h */,
17D21CF10B8BE5EF00D1EBDE /* CogSemaphore.h */,
17D21CF20B8BE5EF00D1EBDE /* CogSemaphore.m */,
17D21CDD0B8BE5B400D1EBDE /* DBLog.h */,
17D21CDE0B8BE5B400D1EBDE /* DBLog.m */,
17D21CF10B8BE5EF00D1EBDE /* Semaphore.h */,
17D21CF20B8BE5EF00D1EBDE /* Semaphore.m */,
);
path = Utils;
sourceTree = "<group>";
@ -462,135 +280,6 @@
name = "Other Sources";
sourceTree = "<group>";
};
834A41A4287A90AB00EB9D9B /* fsurround */ = {
isa = PBXGroup;
children = (
834A41A5287A90AB00EB9D9B /* freesurround_decoder.h */,
834A41A6287A90AB00EB9D9B /* freesurround_decoder.cpp */,
834A41A7287A90AB00EB9D9B /* channelmaps.cpp */,
834A41A8287A90AB00EB9D9B /* channelmaps.h */,
);
path = fsurround;
sourceTree = "<group>";
};
835C88AE279811A500E28EAE /* hdcd */ = {
isa = PBXGroup;
children = (
835C88AF279811A500E28EAE /* hdcd_decode2.h */,
835C88B0279811A500E28EAE /* hdcd_decode2.c */,
);
path = hdcd;
sourceTree = "<group>";
};
835DD2692ACAF5AD0057E319 /* lvqcl */ = {
isa = PBXGroup;
children = (
835DD26A2ACAF5AD0057E319 /* License */,
835DD26D2ACAF5AD0057E319 /* lpc.h */,
835DD26E2ACAF5AD0057E319 /* util.h */,
835DD26F2ACAF5AD0057E319 /* lpc.c */,
);
path = lvqcl;
sourceTree = "<group>";
};
835DD26A2ACAF5AD0057E319 /* License */ = {
isa = PBXGroup;
children = (
835DD26B2ACAF5AD0057E319 /* LICENSE.LGPL */,
835DD26C2ACAF5AD0057E319 /* License.txt */,
);
path = License;
sourceTree = "<group>";
};
83725A8F27AA16C90003F694 /* Frameworks */ = {
isa = PBXGroup;
children = (
8350416C28646149006B32CC /* CoreMedia.framework */,
8328995927CB51C900D7F028 /* Security.framework */,
);
name = Frameworks;
sourceTree = "<group>";
};
8377C64A27B8C51500E8BC0F /* deadbeef */ = {
isa = PBXGroup;
children = (
8377C64D27B8C54400E8BC0F /* fft.h */,
8377C64B27B8C51500E8BC0F /* fft_accelerate.c */,
);
path = deadbeef;
sourceTree = "<group>";
};
8377C64F27B8CAAB00E8BC0F /* Visualization */ = {
isa = PBXGroup;
children = (
833442402D6EFA6700C51D38 /* VisualizationController.h */,
833442412D6EFA6700C51D38 /* VisualizationController.m */,
);
path = Visualization;
sourceTree = "<group>";
};
839E56E02879450300DFB5F4 /* hrtf */ = {
isa = PBXGroup;
children = (
839E56E22879450300DFB5F4 /* Endianness.h */,
839E56E32879450300DFB5F4 /* HrtfData.cpp */,
839E56E12879450300DFB5F4 /* HrtfData.h */,
839E56E928794F6300DFB5F4 /* HrtfTypes.h */,
839E56E42879450300DFB5F4 /* IHrtfData.h */,
);
path = hrtf;
sourceTree = "<group>";
};
83A349692D5C3F430096D530 /* DSP */ = {
isa = PBXGroup;
children = (
833738ED2D5EA5B700278628 /* Downmix.h */,
833738EE2D5EA5B700278628 /* Downmix.m */,
83F8431E2D5C6272008C123B /* HeadphoneFilter.h */,
83F8431F2D5C6272008C123B /* HeadphoneFilter.mm */,
83A349702D5C41810096D530 /* FSurroundFilter.h */,
83A349712D5C41810096D530 /* FSurroundFilter.mm */,
83A349672D5C3F430096D530 /* DSPRubberbandNode.h */,
83A349682D5C3F430096D530 /* DSPRubberbandNode.m */,
83A3496C2D5C40490096D530 /* DSPFSurroundNode.h */,
83A3496E2D5C405E0096D530 /* DSPFSurroundNode.m */,
83A349742D5C50A10096D530 /* DSPHRTFNode.h */,
83A349762D5C50B20096D530 /* DSPHRTFNode.m */,
83F843222D5C66DA008C123B /* DSPEqualizerNode.h */,
83F843242D5C66E9008C123B /* DSPEqualizerNode.m */,
833738E92D5EA52500278628 /* DSPDownmixNode.h */,
833738EB2D5EA53500278628 /* DSPDownmixNode.m */,
);
path = DSP;
sourceTree = "<group>";
};
83F9FFF12D6EC43900026576 /* include */ = {
isa = PBXGroup;
children = (
83F9FFF02D6EC43900026576 /* soxr.h */,
);
path = include;
sourceTree = "<group>";
};
83F9FFF32D6EC43900026576 /* lib */ = {
isa = PBXGroup;
children = (
83F9FFF22D6EC43900026576 /* libsoxr.0.dylib */,
);
path = lib;
sourceTree = "<group>";
};
83F9FFF52D6EC43900026576 /* soxr */ = {
isa = PBXGroup;
children = (
83F9FFF12D6EC43900026576 /* include */,
83F9FFF32D6EC43900026576 /* lib */,
83F9FFF42D6EC43900026576 /* README.md */,
);
name = soxr;
path = ../ThirdParty/soxr;
sourceTree = SOURCE_ROOT;
};
/* End PBXGroup section */
/* Begin PBXHeadersBuildPhase section */
@ -598,82 +287,45 @@
isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647;
files = (
833442422D6EFA6700C51D38 /* VisualizationController.h in Headers */,
833738F02D5EA5B700278628 /* Downmix.h in Headers */,
834FD4EB27AF8F380063BC83 /* AudioChunk.h in Headers */,
83F843202D5C6272008C123B /* HeadphoneFilter.h in Headers */,
83A349732D5C41810096D530 /* FSurroundFilter.h in Headers */,
839E56E82879450300DFB5F4 /* IHrtfData.h in Headers */,
17D21CA10B8BE4BA00D1EBDE /* BufferChain.h in Headers */,
831A50142865A7FD0049CFE4 /* rsstate.hpp in Headers */,
835DD2682ACAF1D90057E319 /* OutputCoreAudio.h in Headers */,
834A41AC287A90AB00EB9D9B /* channelmaps.h in Headers */,
83A3496D2D5C40490096D530 /* DSPFSurroundNode.h in Headers */,
83A3496B2D5C3F430096D530 /* DSPRubberbandNode.h in Headers */,
17D21CA50B8BE4BA00D1EBDE /* InputNode.h in Headers */,
833738EA2D5EA52500278628 /* DSPDownmixNode.h in Headers */,
83F843232D5C66DA008C123B /* DSPEqualizerNode.h in Headers */,
834A41A9287A90AB00EB9D9B /* freesurround_decoder.h in Headers */,
834FD4F027AF93680063BC83 /* ChunkList.h in Headers */,
835DD2732ACAF5AD0057E319 /* util.h in Headers */,
17D21CA70B8BE4BA00D1EBDE /* Node.h in Headers */,
8399CF2C27B5D1D5008751F1 /* NSDictionary+Merge.h in Headers */,
17D21CA90B8BE4BA00D1EBDE /* OutputNode.h in Headers */,
8EC1225F0B993BD500C5B3AD /* ConverterNode.h in Headers */,
8328995427CB511000D7F028 /* RedundantPlaylistDataStore.h in Headers */,
839E56E52879450300DFB5F4 /* HrtfData.h in Headers */,
83FFED512D5B08BC0044CCAF /* DSPNode.h in Headers */,
839E899E2D5DB9D500A13526 /* VisualizationNode.h in Headers */,
83A349752D5C50A10096D530 /* DSPHRTFNode.h in Headers */,
83F9FFF62D6EC43900026576 /* soxr.h in Headers */,
17D21CC50B8BE4BA00D1EBDE /* OutputCoreAudio.h in Headers */,
17D21CC70B8BE4BA00D1EBDE /* Status.h in Headers */,
17D21CF30B8BE5EF00D1EBDE /* CogSemaphore.h in Headers */,
839E56E62879450300DFB5F4 /* Endianness.h in Headers */,
17D21CDF0B8BE5B400D1EBDE /* VirtualRingBuffer.h in Headers */,
17D21CE10B8BE5B400D1EBDE /* DBLog.h in Headers */,
17D21CF30B8BE5EF00D1EBDE /* Semaphore.h in Headers */,
17D21DC70B8BE79700D1EBDE /* CoreAudioUtils.h in Headers */,
835DD2722ACAF5AD0057E319 /* lpc.h in Headers */,
17D21EBD0B8BF44000D1EBDE /* AudioPlayer.h in Headers */,
831A50182865A8B30049CFE4 /* rsstate.h in Headers */,
17F94DD50B8D0F7000A34E87 /* PluginController.h in Headers */,
17F94DDD0B8D101100A34E87 /* Plugin.h in Headers */,
8328995727CB51B700D7F028 /* SHA256Digest.h in Headers */,
17A2D3C50B8D1D37000778C4 /* AudioDecoder.h in Headers */,
8347C7412796C58800FA8A7D /* NSFileHandle+CreateFile.h in Headers */,
83B74281289E027F005AAC28 /* CogAudio-Bridging-Header.h in Headers */,
17C940230B900909008627D6 /* AudioMetadataReader.h in Headers */,
839E56F7287974A100DFB5F4 /* SandboxBroker.h in Headers */,
839065F32853338700636FBB /* dsd2float.h in Headers */,
17B619300B909BC300BC003F /* AudioPropertiesReader.h in Headers */,
839366671815923C006DD712 /* CogPluginMulti.h in Headers */,
17ADB13C0B97926D00257CA2 /* AudioSource.h in Headers */,
835C88B1279811A500E28EAE /* hdcd_decode2.h in Headers */,
8384912718080FF100E7332D /* Logging.h in Headers */,
8377C64E27B8C54400E8BC0F /* fft.h in Headers */,
835FAC5E27BCA14D00BA8562 /* BadSampleCleaner.h in Headers */,
8E8D3D2F0CBAEE6E00135C1B /* AudioContainer.h in Headers */,
B0575F2D0D687A0800411D77 /* Helper.h in Headers */,
07DB5F3E0ED353A900C2E3EF /* AudioMetadataWriter.h in Headers */,
839E56EA28794F6300DFB5F4 /* HrtfTypes.h in Headers */,
8EC1225F0B993BD500C5B3AD /* Converter.h in Headers */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXHeadersBuildPhase section */
/* Begin PBXNativeTarget section */
8DC2EF4F0486A6940098B216 /* CogAudio */ = {
8DC2EF4F0486A6940098B216 /* CogAudio Framework */ = {
isa = PBXNativeTarget;
buildConfigurationList = 1DEB91AD08733DA50010E9CD /* Build configuration list for PBXNativeTarget "CogAudio" */;
buildConfigurationList = 1DEB91AD08733DA50010E9CD /* Build configuration list for PBXNativeTarget "CogAudio Framework" */;
buildPhases = (
17D21D2B0B8BE6A200D1EBDE /* CopyFiles */,
8DC2EF500486A6940098B216 /* Headers */,
8DC2EF540486A6940098B216 /* Sources */,
8DC2EF560486A6940098B216 /* Frameworks */,
8DC2EF520486A6940098B216 /* Resources */,
83725A8D27AA0DDB0003F694 /* CopyFiles */,
);
buildRules = (
);
dependencies = (
);
name = CogAudio;
name = "CogAudio Framework";
productInstallPath = "$(HOME)/Library/Frameworks";
productName = CogAudio;
productReference = 8DC2EF5B0486A6940098B216 /* CogAudio.framework */;
@ -684,30 +336,13 @@
/* Begin PBXProject section */
0867D690FE84028FC02AAC07 /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = YES;
LastUpgradeCheck = 1620;
TargetAttributes = {
8DC2EF4F0486A6940098B216 = {
LastSwiftMigration = 1330;
ProvisioningStyle = Manual;
};
};
};
buildConfigurationList = 1DEB91B108733DA50010E9CD /* Build configuration list for PBXProject "CogAudio" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = en;
hasScannedForEncodings = 1;
knownRegions = (
en,
Base,
);
mainGroup = 0867D691FE84028FC02AAC07 /* CogAudio */;
productRefGroup = 034768DFFF38A50411DB9C8B /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
8DC2EF4F0486A6940098B216 /* CogAudio */,
8DC2EF4F0486A6940098B216 /* CogAudio Framework */,
);
};
/* End PBXProject section */
@ -728,48 +363,21 @@
buildActionMask = 2147483647;
files = (
17D21CA20B8BE4BA00D1EBDE /* BufferChain.m in Sources */,
83A349772D5C50B20096D530 /* DSPHRTFNode.m in Sources */,
17D21CA60B8BE4BA00D1EBDE /* InputNode.m in Sources */,
83A3496A2D5C3F430096D530 /* DSPRubberbandNode.m in Sources */,
8399CF2D27B5D1D5008751F1 /* NSDictionary+Merge.m in Sources */,
83F843252D5C66E9008C123B /* DSPEqualizerNode.m in Sources */,
834A41AB287A90AB00EB9D9B /* channelmaps.cpp in Sources */,
833738EC2D5EA53500278628 /* DSPDownmixNode.m in Sources */,
833442432D6EFA6700C51D38 /* VisualizationController.m in Sources */,
831A50162865A8800049CFE4 /* rsstate.cpp in Sources */,
17D21CA80B8BE4BA00D1EBDE /* Node.m in Sources */,
17D21CAA0B8BE4BA00D1EBDE /* OutputNode.m in Sources */,
835C88B2279811A500E28EAE /* hdcd_decode2.c in Sources */,
835FAC5F27BCA14D00BA8562 /* BadSampleCleaner.m in Sources */,
834FD4ED27AF91220063BC83 /* AudioChunk.m in Sources */,
833738EF2D5EA5B700278628 /* Downmix.m in Sources */,
17D21CF40B8BE5EF00D1EBDE /* CogSemaphore.m in Sources */,
839E89A02D5DBA1700A13526 /* VisualizationNode.m in Sources */,
8347C7422796C58800FA8A7D /* NSFileHandle+CreateFile.m in Sources */,
83A3496F2D5C405E0096D530 /* DSPFSurroundNode.m in Sources */,
17D21CC60B8BE4BA00D1EBDE /* OutputCoreAudio.m in Sources */,
17D21CE00B8BE5B400D1EBDE /* VirtualRingBuffer.m in Sources */,
17D21CE20B8BE5B400D1EBDE /* DBLog.m in Sources */,
17D21CF40B8BE5EF00D1EBDE /* Semaphore.m in Sources */,
17D21DC80B8BE79700D1EBDE /* CoreAudioUtils.m in Sources */,
8328995327CB511000D7F028 /* RedundantPlaylistDataStore.m in Sources */,
8377C64C27B8C51500E8BC0F /* fft_accelerate.c in Sources */,
839366681815923C006DD712 /* CogPluginMulti.m in Sources */,
17D21EBE0B8BF44000D1EBDE /* AudioPlayer.m in Sources */,
17F94DD60B8D0F7000A34E87 /* PluginController.mm in Sources */,
839E56E72879450300DFB5F4 /* HrtfData.cpp in Sources */,
17F94DD60B8D0F7000A34E87 /* PluginController.m in Sources */,
17A2D3C60B8D1D37000778C4 /* AudioDecoder.m in Sources */,
8328995827CB51B700D7F028 /* SHA256Digest.m in Sources */,
17C940240B900909008627D6 /* AudioMetadataReader.m in Sources */,
17B619310B909BC300BC003F /* AudioPropertiesReader.m in Sources */,
83F843212D5C6272008C123B /* HeadphoneFilter.mm in Sources */,
17ADB13D0B97926D00257CA2 /* AudioSource.m in Sources */,
834FD4F127AF93680063BC83 /* ChunkList.m in Sources */,
83FFED532D5B09320044CCAF /* DSPNode.m in Sources */,
8EC122600B993BD500C5B3AD /* ConverterNode.m in Sources */,
835DD2672ACAF1D90057E319 /* OutputCoreAudio.m in Sources */,
83A349722D5C41810096D530 /* FSurroundFilter.mm in Sources */,
8E8D3D300CBAEE6E00135C1B /* AudioContainer.m in Sources */,
B0575F300D687A4000411D77 /* Helper.m in Sources */,
835DD2742ACAF5AD0057E319 /* lpc.c in Sources */,
834A41AA287A90AB00EB9D9B /* freesurround_decoder.cpp in Sources */,
07DB5F3F0ED353A900C2E3EF /* AudioMetadataWriter.m in Sources */,
8EC122600B993BD500C5B3AD /* Converter.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
@ -779,76 +387,46 @@
1DEB91AE08733DA50010E9CD /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
CLANG_CXX_LANGUAGE_STANDARD = "c++17";
CLANG_ENABLE_MODULES = YES;
COMBINE_HIDPI_IMAGES = YES;
COPY_PHASE_STRIP = NO;
DEAD_CODE_STRIPPING = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
ENABLE_MODULE_VERIFIER = YES;
FRAMEWORK_SEARCH_PATHS = "";
FRAMEWORK_VERSION = A;
GCC_DYNAMIC_NO_PIC = NO;
GCC_ENABLE_OBJC_EXCEPTIONS = YES;
GCC_ENABLE_FIX_AND_CONTINUE = YES;
GCC_MODEL_TUNING = G5;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PRECOMPILE_PREFIX_HEADER = YES;
GCC_PREFIX_HEADER = CogAudio_Prefix.pch;
GCC_PREPROCESSOR_DEFINITIONS = "DEBUG=1";
HEADER_SEARCH_PATHS = (
../ThirdParty/soxr/include,
../ThirdParty/rubberband/include,
);
INFOPLIST_FILE = Info.plist;
INSTALL_PATH = "@executable_path/../Frameworks";
LD_RUNPATH_SEARCH_PATHS = "@loader_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
../ThirdParty/soxr/lib,
../ThirdParty/rubberband/lib,
);
MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu17 c++17";
OTHER_LDFLAGS = "";
PRODUCT_BUNDLE_IDENTIFIER = org.cogx.cogaudio;
PRODUCT_NAME = CogAudio;
SKIP_INSTALL = YES;
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 5.0;
WARNING_LDFLAGS = "";
WRAPPER_EXTENSION = framework;
ZERO_LINK = YES;
};
name = Debug;
};
1DEB91AF08733DA50010E9CD /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
CLANG_CXX_LANGUAGE_STANDARD = "c++17";
CLANG_ENABLE_MODULES = YES;
COMBINE_HIDPI_IMAGES = YES;
DEAD_CODE_STRIPPING = YES;
ARCHS = (
ppc,
i386,
);
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
ENABLE_MODULE_VERIFIER = YES;
FRAMEWORK_SEARCH_PATHS = "";
FRAMEWORK_VERSION = A;
GCC_ENABLE_OBJC_EXCEPTIONS = YES;
GCC_GENERATE_DEBUGGING_SYMBOLS = NO;
GCC_MODEL_TUNING = G5;
GCC_PRECOMPILE_PREFIX_HEADER = YES;
GCC_PREFIX_HEADER = CogAudio_Prefix.pch;
GCC_PREPROCESSOR_DEFINITIONS = "";
HEADER_SEARCH_PATHS = (
../ThirdParty/soxr/include,
../ThirdParty/rubberband/include,
);
INFOPLIST_FILE = Info.plist;
INSTALL_PATH = "@executable_path/../Frameworks";
LD_RUNPATH_SEARCH_PATHS = "@loader_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
../ThirdParty/soxr/lib,
../ThirdParty/rubberband/lib,
);
MODULE_VERIFIER_SUPPORTED_LANGUAGE_STANDARDS = "gnu17 c++17";
OTHER_LDFLAGS = "";
PRODUCT_BUNDLE_IDENTIFIER = org.cogx.cogaudio;
PRODUCT_NAME = CogAudio;
SKIP_INSTALL = YES;
SWIFT_VERSION = 5.0;
WARNING_LDFLAGS = "";
WRAPPER_EXTENSION = framework;
};
@ -857,52 +435,10 @@
1DEB91B208733DA50010E9CD /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEAD_CODE_STRIPPING = YES;
DEFINES_MODULE = YES;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_NO_COMMON_BLOCKS = YES;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"HAVE_CONFIG_H=1",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.13;
ONLY_ACTIVE_ARCH = YES;
OTHER_CFLAGS = "-Wframe-larger-than=4000";
OTHER_CPLUSPLUSFLAGS = "-Wframe-larger-than=16000";
PRODUCT_MODULE_NAME = CogAudio;
SDKROOT = macosx;
SWIFT_OBJC_BRIDGING_HEADER = "CogAudio-Bridging-Header.h";
PREBINDING = NO;
SDKROOT = /Developer/SDKs/MacOSX10.4u.sdk;
SYMROOT = ../build;
};
name = Debug;
@ -910,48 +446,14 @@
1DEB91B308733DA50010E9CD /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
DEAD_CODE_STRIPPING = YES;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEFINES_MODULE = YES;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_NO_COMMON_BLOCKS = YES;
GCC_PREPROCESSOR_DEFINITIONS = "HAVE_CONFIG_H=1";
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
ARCHS = (
ppc,
i386,
);
GCC_WARN_ABOUT_RETURN_TYPE = YES;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.13;
OTHER_CFLAGS = "-Wframe-larger-than=4000";
OTHER_CPLUSPLUSFLAGS = "-Wframe-larger-than=16000";
PRODUCT_MODULE_NAME = CogAudio;
SDKROOT = macosx;
SWIFT_COMPILATION_MODE = wholemodule;
SWIFT_OBJC_BRIDGING_HEADER = "CogAudio-Bridging-Header.h";
PREBINDING = NO;
SDKROOT = /Developer/SDKs/MacOSX10.4u.sdk;
SYMROOT = ../build;
};
name = Release;
@ -959,7 +461,7 @@
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
1DEB91AD08733DA50010E9CD /* Build configuration list for PBXNativeTarget "CogAudio" */ = {
1DEB91AD08733DA50010E9CD /* Build configuration list for PBXNativeTarget "CogAudio Framework" */ = {
isa = XCConfigurationList;
buildConfigurations = (
1DEB91AE08733DA50010E9CD /* Debug */,

View file

@ -1,76 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1620"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "8DC2EF4F0486A6940098B216"
BuildableName = "CogAudio.framework"
BlueprintName = "CogAudio"
ReferencedContainer = "container:CogAudio.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "8DC2EF4F0486A6940098B216"
BuildableName = "CogAudio.framework"
BlueprintName = "CogAudio"
ReferencedContainer = "container:CogAudio.xcodeproj">
</BuildableReference>
</MacroExpansion>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "8DC2EF4F0486A6940098B216"
BuildableName = "CogAudio.framework"
BlueprintName = "CogAudio"
ReferencedContainer = "container:CogAudio.xcodeproj">
</BuildableReference>
</MacroExpansion>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>

View file

@ -3,6 +3,5 @@
//
#ifdef __OBJC__
#import <AssertMacros.h>
#import <Cocoa/Cocoa.h>
#endif

View file

@ -1,42 +0,0 @@
//
// CogPluginMulti.h
// CogAudio
//
// Created by Christopher Snowhill on 10/21/13.
//
//
#import "Plugin.h"
#import <Cocoa/Cocoa.h>
@interface CogDecoderMulti : NSObject <CogDecoder> {
NSArray *theDecoders;
id<CogDecoder> theDecoder;
BOOL observersAdded;
}
- (id)initWithDecoders:(NSArray *)decoders;
@end
@interface CogContainerMulti : NSObject {
}
+ (NSArray *)urlsForContainerURL:(NSURL *)url containers:(NSArray *)containers;
+ (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url containers:(NSArray *)containers;
@end
@interface CogMetadataReaderMulti : NSObject {
}
+ (NSDictionary *)metadataForURL:(NSURL *)url readers:(NSArray *)readers;
@end
@interface CogPropertiesReaderMulti : NSObject {
}
+ (NSDictionary *)propertiesForSource:(id<CogSource>)source readers:(NSArray *)readers;
@end

View file

@ -1,219 +0,0 @@
//
// CogPluginMulti.m
// CogAudio
//
// Created by Christopher Snowhill on 10/21/13.
//
//
#import "CogPluginMulti.h"
NSArray *sortClassesByPriority(NSArray *theClasses) {
NSMutableArray *sortedClasses = [NSMutableArray arrayWithArray:theClasses];
[sortedClasses sortUsingComparator:
^NSComparisonResult(id obj1, id obj2) {
NSString *classString1 = (NSString *)obj1;
NSString *classString2 = (NSString *)obj2;
Class class1 = NSClassFromString(classString1);
Class class2 = NSClassFromString(classString2);
float priority1 = [class1 priority];
float priority2 = [class2 priority];
if(priority1 == priority2)
return NSOrderedSame;
else if(priority1 > priority2)
return NSOrderedAscending;
else
return NSOrderedDescending;
}];
return sortedClasses;
}
@interface CogDecoderMulti (Private)
- (void)registerObservers;
- (void)removeObservers;
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context;
@end
@implementation CogDecoderMulti
static void *kCogDecoderMultiContext = &kCogDecoderMultiContext;
+ (NSArray *)mimeTypes {
return nil;
}
+ (NSArray *)fileTypes {
return nil;
}
+ (float)priority {
return -1.0;
}
+ (NSArray *)fileTypeAssociations {
return nil;
}
- (id)initWithDecoders:(NSArray *)decoders {
self = [super init];
if(self) {
theDecoders = sortClassesByPriority(decoders);
theDecoder = nil;
}
return self;
}
- (NSDictionary *)properties {
if(theDecoder != nil) return [theDecoder properties];
return nil;
}
- (NSDictionary *)metadata {
if(theDecoder != nil) return [theDecoder metadata];
return @{};
}
- (AudioChunk *)readAudio {
if(theDecoder != nil) return [theDecoder readAudio];
return nil;
}
- (BOOL)open:(id<CogSource>)source {
for(NSString *classString in theDecoders) {
Class decoder = NSClassFromString(classString);
theDecoder = [[decoder alloc] init];
[self registerObservers];
if([theDecoder open:source])
return YES;
[self removeObservers];
// HTTP reader supports limited rewinding
[source seek:0 whence:SEEK_SET];
}
theDecoder = nil;
return NO;
}
- (long)seek:(long)frame {
if(theDecoder != nil) return [theDecoder seek:frame];
return -1;
}
- (void)close {
if(theDecoder != nil) {
[self removeObservers];
[theDecoder close];
theDecoder = nil;
}
}
- (void)dealloc {
[self close];
}
- (void)registerObservers {
if(!observersAdded) {
[theDecoder addObserver:self
forKeyPath:@"properties"
options:(NSKeyValueObservingOptionNew)
context:kCogDecoderMultiContext];
[theDecoder addObserver:self
forKeyPath:@"metadata"
options:(NSKeyValueObservingOptionNew)
context:kCogDecoderMultiContext];
observersAdded = YES;
}
}
- (void)removeObservers {
if(observersAdded) {
observersAdded = NO;
[theDecoder removeObserver:self forKeyPath:@"properties" context:kCogDecoderMultiContext];
[theDecoder removeObserver:self forKeyPath:@"metadata" context:kCogDecoderMultiContext];
}
}
- (BOOL)setTrack:(NSURL *)track {
if(theDecoder != nil && [theDecoder respondsToSelector:@selector(setTrack:)]) return [theDecoder setTrack:track];
return NO;
}
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context {
if(context == kCogDecoderMultiContext) {
[self willChangeValueForKey:keyPath];
[self didChangeValueForKey:keyPath];
} else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
@end
@implementation CogContainerMulti
+ (NSArray *)urlsForContainerURL:(NSURL *)url containers:(NSArray *)containers {
NSArray *sortedContainers = sortClassesByPriority(containers);
for(NSString *classString in sortedContainers) {
Class container = NSClassFromString(classString);
NSArray *urls = [container urlsForContainerURL:url];
if([urls count])
return urls;
}
return nil;
}
+ (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url containers:(NSArray *)containers {
NSArray *sortedContainers = sortClassesByPriority(containers);
for(NSString *classString in sortedContainers) {
Class container = NSClassFromString(classString);
if([container respondsToSelector:@selector(dependencyUrlsForContainerURL:)]) {
NSArray *urls = [container dependencyUrlsForContainerURL:url];
if([urls count])
return urls;
}
}
return nil;
}
@end
@implementation CogMetadataReaderMulti
+ (NSDictionary *)metadataForURL:(NSURL *)url readers:(NSArray *)readers {
NSArray *sortedReaders = sortClassesByPriority(readers);
for(NSString *classString in sortedReaders) {
Class reader = NSClassFromString(classString);
NSDictionary *data = [reader metadataForURL:url];
if([data count])
return data;
}
return nil;
}
@end
@implementation CogPropertiesReaderMulti
+ (NSDictionary *)propertiesForSource:(id<CogSource>)source readers:(NSArray *)readers {
NSArray *sortedReaders = sortClassesByPriority(readers);
for(NSString *classString in sortedReaders) {
Class reader = NSClassFromString(classString);
NSDictionary *data = [reader propertiesForSource:source];
if([data count])
return data;
if([source seekable])
[source seek:0 whence:SEEK_SET];
}
return nil;
}
@end

View file

@ -1,11 +0,0 @@
/*
* Helper.h
* CogAudio
*
* Created by Andre Reffhaug on 2/17/08.
* Copyright 2008 __MyCompanyName__. All rights reserved.
*
*/
double logarithmicToLinear(const double logarithmic, double MAX_VOLUME);
double linearToLogarithmic(const double linear, double MAX_VOLUME);

View file

@ -1,25 +0,0 @@
/*
* Helper.c
* CogAudio
*
* Created by Andre Reffhaug on 2/17/08.
* Copyright 2008 __MyCompanyName__. All rights reserved.
*
*/
#include "Helper.h"
#include <math.h>
// These functions are helpers for the process of converting volume from a linear to logarithmic scale.
// Numbers that goes in to audioPlayer should be logarithmic. Numbers that are displayed to the user should be linear.
// Here's why: http://www.dr-lex.34sp.com/info-stuff/volumecontrols.html
// We are using the approximation of X^2 when volume is limited to 100% and X^4 when volume is limited to 800%.
// Input/Output values are in percents.
double logarithmicToLinear(const double logarithmic, double MAX_VOLUME) {
return (MAX_VOLUME == 100.0) ? pow((logarithmic / MAX_VOLUME), 0.5) * 100.0 : pow((logarithmic / MAX_VOLUME), 0.25) * 100.0;
}
double linearToLogarithmic(const double linear, double MAX_VOLUME) {
return (MAX_VOLUME == 100.0) ? (linear / 100.0) * (linear / 100.0) * MAX_VOLUME : (linear / 100.0) * (linear / 100.0) * (linear / 100.0) * (linear / 100.0) * MAX_VOLUME;
}
// End helper volume function thingies. ONWARDS TO GLORY!

View file

@ -1,19 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>English</string>
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundleIconFile</key>
<string></string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<string>com.yourcompany.yourcocoaframework</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleSignature</key>

View file

@ -1,160 +0,0 @@
//
// OutputAVFoundation.h
// Cog
//
// Created by Christopher Snowhill on 6/23/22.
// Copyright 2022 Christopher Snowhill. All rights reserved.
//
#import <AssertMacros.h>
#import <Cocoa/Cocoa.h>
#import <AVFoundation/AVFoundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import <AudioUnit/AudioUnit.h>
#import <CoreAudio/AudioHardware.h>
#import <CoreAudio/CoreAudioTypes.h>
#ifdef __cplusplus
#import <atomic>
using std::atomic_long;
#else
#import <stdatomic.h>
#endif
#import "Downmix.h"
#import <CogAudio/CogAudio-Swift.h>
#import "HeadphoneFilter.h"
//#define OUTPUT_LOG
#ifdef OUTPUT_LOG
#import <stdio.h>
#endif
@class OutputNode;
@class FSurroundFilter;
@interface OutputAVFoundation : NSObject {
OutputNode *outputController;
BOOL rsDone;
void *rsstate, *rsold;
double lastClippedSampleRate;
void *rsvis;
double lastVisRate;
BOOL stopInvoked;
BOOL stopCompleted;
BOOL running;
BOOL stopping;
BOOL stopped;
BOOL started;
BOOL paused;
BOOL restarted;
BOOL commandStop;
BOOL eqEnabled;
BOOL eqInitialized;
BOOL streamFormatStarted;
BOOL streamFormatChanged;
double secondsHdcdSustained;
BOOL defaultdevicelistenerapplied;
BOOL currentdevicelistenerapplied;
BOOL devicealivelistenerapplied;
BOOL observersapplied;
BOOL outputdevicechanged;
float volume;
float eqPreamp;
AudioDeviceID outputDeviceID;
AudioStreamBasicDescription realStreamFormat; // stream format pre-hrtf
AudioStreamBasicDescription streamFormat; // stream format last seen in render callback
AudioStreamBasicDescription realNewFormat; // in case of resampler flush
AudioStreamBasicDescription newFormat; // in case of resampler flush
AudioStreamBasicDescription visFormat; // Mono format for vis
uint32_t realStreamChannelConfig;
uint32_t streamChannelConfig;
uint32_t realNewChannelConfig;
uint32_t newChannelConfig;
AVSampleBufferAudioRenderer *audioRenderer;
AVSampleBufferRenderSynchronizer *renderSynchronizer;
CMAudioFormatDescriptionRef audioFormatDescription;
id currentPtsObserver;
NSLock *currentPtsLock;
CMTime currentPts, lastPts;
double secondsLatency;
CMTime outputPts, trackPts, lastCheckpointPts;
AudioTimeStamp timeStamp;
size_t _bufferSize;
AudioUnit _eq;
DownmixProcessor *downmixerForVis;
VisualizationController *visController;
BOOL enableHrtf;
HeadphoneFilter *hrtf;
BOOL enableFSurround;
BOOL FSurroundDelayRemoved;
int inputBufferLastTime;
FSurroundFilter *fsurround;
BOOL resetStreamFormat;
BOOL shouldPlayOutBuffer;
float *samplePtr;
float tempBuffer[512 * 32];
float rsTempBuffer[4096 * 32];
float inputBuffer[4096 * 32]; // 4096 samples times maximum supported channel count
float fsurroundBuffer[8192 * 6];
float hrtfBuffer[4096 * 2];
float eqBuffer[4096 * 32];
float visAudio[4096];
float visTemp[8192];
#ifdef OUTPUT_LOG
FILE *_logFile;
#endif
}
- (id)initWithController:(OutputNode *)c;
- (BOOL)setup;
- (OSStatus)setOutputDeviceByID:(AudioDeviceID)deviceID;
- (BOOL)setOutputDeviceWithDeviceDict:(NSDictionary *)deviceDict;
- (void)start;
- (void)pause;
- (void)resume;
- (void)stop;
- (double)latency;
- (void)setVolume:(double)v;
- (void)setEqualizerEnabled:(BOOL)enabled;
- (void)setShouldPlayOutBuffer:(BOOL)enabled;
- (void)sustainHDCD;
@end

File diff suppressed because it is too large Load diff

View file

@ -2,131 +2,35 @@
// OutputCoreAudio.h
// Cog
//
// Created by Christopher Snowhill on 7/25/23.
// Copyright 2023-2024 Christopher Snowhill. All rights reserved.
// Created by Vincent Spader on 8/2/05.
// Copyright 2005 Vincent Spader. All rights reserved.
//
#import <AssertMacros.h>
#import <Cocoa/Cocoa.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreAudio/AudioHardware.h>
#import <AudioToolbox/AudioToolbox.h>
#import <AudioUnit/AudioUnit.h>
#import <CoreAudio/AudioHardware.h>
#import <CoreAudio/CoreAudioTypes.h>
#ifdef __cplusplus
#import <atomic>
using std::atomic_long;
#else
#import <stdatomic.h>
#endif
#import <simd/simd.h>
#import <CogAudio/ChunkList.h>
#import <CogAudio/HeadphoneFilter.h>
//#define OUTPUT_LOG
@class OutputNode;
@class AudioChunk;
@interface OutputCoreAudio : NSObject {
OutputNode *outputController;
dispatch_semaphore_t writeSemaphore;
dispatch_semaphore_t readSemaphore;
NSLock *outputLock;
double streamTimestamp;
BOOL stopInvoked;
BOOL stopCompleted;
BOOL running;
BOOL stopping;
BOOL stopped;
BOOL started;
BOOL paused;
BOOL restarted;
BOOL commandStop;
BOOL resetting;
BOOL cutOffInput;
BOOL fading, faded;
float fadeLevel;
float fadeStep;
float fadeTarget;
BOOL eqEnabled;
BOOL eqInitialized;
BOOL streamFormatStarted;
BOOL streamFormatChanged;
double secondsHdcdSustained;
BOOL defaultdevicelistenerapplied;
BOOL currentdevicelistenerapplied;
BOOL devicealivelistenerapplied;
BOOL observersapplied;
BOOL outputdevicechanged;
float volume;
float eqPreamp;
AVAudioFormat *_deviceFormat;
AudioDeviceID outputDeviceID;
AudioStreamBasicDescription deviceFormat;
AudioStreamBasicDescription realStreamFormat; // stream format pre-hrtf
AudioStreamBasicDescription streamFormat; // stream format last seen in render callback
uint32_t deviceChannelConfig;
uint32_t realStreamChannelConfig;
uint32_t streamChannelConfig;
AUAudioUnit *_au;
size_t _bufferSize;
BOOL resetStreamFormat;
OutputNode * outputController;
BOOL shouldPlayOutBuffer;
ChunkList *outputBuffer;
#ifdef OUTPUT_LOG
NSFileHandle *_logFile;
#endif
AudioUnit outputUnit;
AURenderCallbackStruct renderCallback;
AudioStreamBasicDescription deviceFormat; // info about the default device
}
- (id)initWithController:(OutputNode *)c;
- (BOOL)setup;
- (OSStatus)setOutputDeviceByID:(int)deviceID;
- (BOOL)setOutputDeviceWithDeviceDict:(NSDictionary *)deviceDict;
- (BOOL)setOutputDevice:(AudioDeviceID)outputDevice;
- (void)start;
- (void)pause;
- (void)resume;
- (void)stop;
- (void)fadeOut;
- (void)fadeOutBackground;
- (void)fadeIn;
- (double)latency;
- (double)volume;
- (void)setVolume:(double)v;
- (void)setShouldPlayOutBuffer:(BOOL)enabled;
- (void)sustainHDCD;
- (AudioStreamBasicDescription)deviceFormat;
- (uint32_t)deviceChannelConfig;
- (void)setVolume:(double) v;
@end

File diff suppressed because it is too large Load diff

View file

@ -1,119 +1,55 @@
// Plugins! HOORAY!
/*
Are defines really appropriate for this?
We want something easily insertable into a dictionary.
Maybe should extern these, and shove the instances in PluginController.m, but will that cause linking problems?
*/
#if __has_include(<CogAudio/AudioChunk.h>)
# import <CogAudio/AudioChunk.h>
#else
# import "AudioChunk.h"
#endif
#define kCogSource @"CogSource"
#define kCogDecoder @"CogDecoder"
#define kCogMetadataReader @"CogMetadataReader"
#define kCogPropertiesReader @"CogPropertiesReader"
@protocol CogPlugin <NSObject>
//Dictionary containing classname/plugintype pairs. ex: @"VorbisDecoder": kCogDecoder, @"VorbisPropertiesRaeder": kCogPropertiesReader
+ (NSDictionary *)pluginInfo;
@end
@protocol CogSource <NSObject>
+ (NSArray *)schemes; // http, file, etc
+ (NSArray *)schemes; //http, file, etc
- (NSURL *)url;
- (NSString *)mimeType;
- (BOOL)open:(NSURL *)url;
- (NSDictionary *)properties; //Perhaps contains header info for HTTP stream, or path for a regular file.
- (BOOL)seekable;
- (BOOL)seek:(long)position whence:(int)whence;
- (long)tell;
- (long)read:(void *)buffer amount:(long)amount; // reads UP TO amount, returns amount read.
- (int)read:(void *)buffer amount:(int)amount; //reads UP TO amount, returns amount read.
- (void)close;
- (void)dealloc;
@end
@protocol CogVersionCheck <NSObject>
+ (BOOL)shouldLoadForOSVersion:(NSOperatingSystemVersion)version;
@end
@protocol CogDecoder <NSObject>
+ (NSArray *)fileTypes; //mp3, ogg, etc
@protocol CogContainer <NSObject>
+ (NSArray *)fileTypes; // mp3, ogg, etc
+ (NSArray *)mimeTypes;
+ (float)priority;
+ (NSArray *)urlsForContainerURL:(NSURL *)url;
@optional
+ (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url;
@end
@protocol CogDecoder <NSObject>
@required
+ (NSArray *)mimeTypes;
+ (NSArray *)fileTypes; // mp3, ogg, etc
+ (NSArray *)fileTypeAssociations; // array of NSArray of NSString, where first item in array is the type name, the second is the icon name, and the rest are the extensions
+ (float)priority; // should be 0.0 ... 1.0, higher means you get selected first, should default to 1.0 unless you know a reason why any of your extensions may behave badly, ie. greedily taking over some file type extension without performing any header validation on it
// For KVO
//For KVO
//- (void)setProperties:(NSDictionary *)p;
- (NSDictionary *)properties;
- (NSDictionary *)metadata; // Only to be implemented for dynamic metadata, send events on change
- (AudioChunk *)readAudio;
- (BOOL)open:(id<CogSource>)source;
- (long)seek:(long)frame;
- (double)seekToTime:(double)time; //time is in milleseconds, should return the time actually seeked to.
- (int)fillBuffer:(void *)buf ofSize:(UInt32)size;
- (void)close;
@optional
- (void)dealloc;
- (BOOL)setTrack:(NSURL *)track;
// These are in NSObject, so as long as you are a subclass of that, you are ok.
- (void)addObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath options:(NSKeyValueObservingOptions)options context:(void *)context;
- (void)removeObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath;
- (void)removeObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath context:(void *)context;
- (BOOL)isSilence;
@end
@protocol CogMetadataReader <NSObject>
+ (NSArray *)fileTypes;
+ (NSArray *)mimeTypes;
+ (float)priority;
+ (NSDictionary *)metadataForURL:(NSURL *)url;
@end
@protocol CogMetadataWriter <NSObject>
//+ (NSArray *)fileTypes;
//+ (NSArray *)mimeTypes;
+ (int)putMetadataInURL:(NSURL *)url tagData:(NSDictionary *)tagData;
+ (NSDictionary *)metadataForURL;
@end
@protocol CogPropertiesReader <NSObject>
+ (NSArray *)fileTypes;
+ (NSArray *)mimeTypes;
+ (float)priority;
+ (NSDictionary *)propertiesForSource:(id<CogSource>)source;
@end
@protocol CogPluginController <NSObject>
+ (id<CogPluginController>)sharedPluginController;
- (NSDictionary *)sources;
- (NSDictionary *)containers;
- (NSDictionary *)metadataReaders;
- (NSDictionary *)propertiesReadersByExtension;
- (NSDictionary *)propertiesReadersByMimeType;
- (NSDictionary *)decodersByExtension;
- (NSDictionary *)decodersByMimeType;
- (id<CogSource>)audioSourceForURL:(NSURL *)url;
- (NSArray *)urlsForContainerURL:(NSURL *)url;
- (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url;
- (NSDictionary *)metadataForURL:(NSURL *)url skipCue:(BOOL)skip;
- (NSDictionary *)propertiesForURL:(NSURL *)url skipCue:(BOOL)skip;
- (id<CogDecoder>)audioDecoderForSource:(id<CogSource>)source skipCue:(BOOL)skip;
- (int)putMetadataInURL:(NSURL *)url;
@end
#ifdef __cplusplus
extern "C" {
#endif
extern NSString *guess_encoding_of_string(const char *input);
#ifdef __cplusplus
}
#endif

View file

@ -2,45 +2,31 @@
#import <Cocoa/Cocoa.h>
#import <CogAudio/Plugin.h>
// Singletonish
@interface PluginController : NSObject <CogPluginController> {
//Singleton
@interface PluginController : NSObject
{
NSMutableDictionary *sources;
NSMutableDictionary *containers;
NSMutableDictionary *decoders;
NSMutableDictionary *metadataReaders;
NSMutableDictionary *propertiesReadersByExtension;
NSMutableDictionary *propertiesReadersByMimeType;
NSMutableDictionary *decodersByExtension;
NSMutableDictionary *decodersByMimeType;
BOOL configured;
NSMutableDictionary *propertiesReaders;
}
@property(retain) NSMutableDictionary *sources;
@property(retain) NSMutableDictionary *containers;
@property(retain) NSMutableDictionary *metadataReaders;
@property(retain) NSMutableDictionary *propertiesReadersByExtension;
@property(retain) NSMutableDictionary *propertiesReadersByMimeType;
@property(retain) NSMutableDictionary *decodersByExtension;
@property(retain) NSMutableDictionary *decodersByMimeType;
@property BOOL configured;
+ (PluginController *)sharedPluginController; //Use this to get the instance.
- (void)setup;
- (void)printPluginInfo;
- (void)loadPlugins;
- (void)loadPlugins;
- (void)loadPluginsAtPath:(NSString *)path;
- (void)setupSource:(NSString *)className;
- (void)setupContainer:(NSString *)className;
- (void)setupDecoder:(NSString *)className;
- (void)setupMetadataReader:(NSString *)className;
- (void)setupPropertiesReader:(NSString *)className;
- (NSDictionary *)sources;
- (NSDictionary *)decoders;
- (NSDictionary *)metadataReaders;
- (NSDictionary *)propertiesReaders;
@end

230
Audio/PluginController.m Normal file
View file

@ -0,0 +1,230 @@
#import "PluginController.h"
#import "Plugin.h"
@implementation PluginController
//Start of singleton-related stuff.
static PluginController *sharedPluginController = nil;
+ (PluginController*)sharedPluginController
{
@synchronized(self) {
if (sharedPluginController == nil) {
[[self alloc] init]; // assignment not done here
}
}
return sharedPluginController;
}
+ (id)allocWithZone:(NSZone *)zone
{
@synchronized(self) {
if (sharedPluginController == nil) {
sharedPluginController = [super allocWithZone:zone];
return sharedPluginController; // assignment and return on first allocation
}
}
return nil; //on subsequent allocation attempts return nil
}
- (id)copyWithZone:(NSZone *)zone
{
return self;
}
- (id)retain
{
return self;
}
- (unsigned)retainCount
{
return UINT_MAX; //denotes an object that cannot be released
}
- (void)release
{
//do nothing
}
- (id)autorelease
{
return self;
}
//End of singleton-related stuff
- (id)init {
self = [super init];
if (self) {
sources = [[NSMutableDictionary alloc] init];
decoders = [[NSMutableDictionary alloc] init];
metadataReaders = [[NSMutableDictionary alloc] init];
propertiesReaders = [[NSMutableDictionary alloc] init];
}
return self;
}
- (void)setup
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
[self loadPlugins];
[self printPluginInfo];
[pool release];
}
- (void)loadPluginsAtPath:(NSString *)path
{
NSArray *dirContents = [[NSFileManager defaultManager] directoryContentsAtPath:path];
NSEnumerator *dirEnum = [dirContents objectEnumerator];
NSString *pname;
while (pname = [dirEnum nextObject])
{
NSString *ppath;
ppath = [NSString pathWithComponents:[NSArray arrayWithObjects:path,pname,nil]];
if ([[pname pathExtension] isEqualToString:@"bundle"])
{
NSBundle *b = [NSBundle bundleWithPath:ppath];
if (b)
{
Class plugin = [b principalClass];
if ([plugin respondsToSelector:@selector(pluginInfo)])
{
//PluginInfo is a dictionary that contains keys/values like pluginClass,classType...ex: VorbisDecoder, Decoder
NSDictionary *pluginInfo = [plugin pluginInfo];
NSEnumerator *e = [pluginInfo keyEnumerator];
id className;
while (className = [e nextObject]) {
id pluginType = [pluginInfo objectForKey:className];
if ([pluginType isEqualToString:kCogDecoder]) {
[self setupDecoder:className];
}
else if ([pluginType isEqualToString:kCogMetadataReader]) {
[self setupMetadataReader:className];
}
else if ([pluginType isEqualToString:kCogPropertiesReader]) {
[self setupPropertiesReader:className];
}
else if ([pluginType isEqualToString:kCogSource]) {
[self setupSource:className];
}
else {
NSLog(@"Unknown plugin type!!");
}
}
}
}
}
}
}
- (void)loadPlugins
{
[self loadPluginsAtPath:[[NSBundle mainBundle] builtInPlugInsPath]];
[self loadPluginsAtPath:[@"~/Library/Application Support/Cog/Plugins" stringByExpandingTildeInPath]];
}
- (void)setupDecoder:(NSString *)className
{
Class decoder = NSClassFromString(className);
if (decoder && [decoder respondsToSelector:@selector(fileTypes)]) {
NSEnumerator *fileTypesEnum = [[decoder fileTypes] objectEnumerator];
id fileType;
while (fileType = [fileTypesEnum nextObject])
{
[decoders setObject:className forKey:[fileType lowercaseString]];
}
}
}
- (void)setupMetadataReader:(NSString *)className
{
Class metadataReader = NSClassFromString(className);
if (metadataReader && [metadataReader respondsToSelector:@selector(fileTypes)]) {
NSEnumerator *fileTypesEnum = [[metadataReader fileTypes] objectEnumerator];
id fileType;
while (fileType = [fileTypesEnum nextObject])
{
[metadataReaders setObject:className forKey:[fileType lowercaseString]];
}
}
}
- (void)setupPropertiesReader:(NSString *)className
{
Class propertiesReader = NSClassFromString(className);
if (propertiesReader && [propertiesReader respondsToSelector:@selector(fileTypes)]) {
NSEnumerator *fileTypesEnum = [[propertiesReader fileTypes] objectEnumerator];
id fileType;
while (fileType = [fileTypesEnum nextObject])
{
[propertiesReaders setObject:className forKey:[fileType lowercaseString]];
}
}
}
- (void)setupSource:(NSString *)className
{
Class source = NSClassFromString(className);
if (source && [source respondsToSelector:@selector(schemes)]) {
NSEnumerator *schemeEnum = [[source schemes] objectEnumerator];
id scheme;
while (scheme = [schemeEnum nextObject])
{
[sources setObject:className forKey:scheme];
}
}
}
- (void)printPluginInfo
{
/* NSLog(@"Sources: %@", sources);
NSLog(@"Decoders: %@", decoders);
NSLog(@"Metadata Readers: %@", metadataReaders);
NSLog(@"Properties Readers: %@", propertiesReaders); */
}
- (NSDictionary *)sources
{
return sources;
}
- (NSDictionary *)decoders
{
return decoders;
}
- (NSDictionary *)propertiesReaders
{
return propertiesReaders;
}
- (NSDictionary *)metadataReaders
{
return metadataReaders;
}
@end
//This is called when the framework is loaded.
void __attribute__ ((constructor)) InitializePlugins(void) {
static BOOL wasInitialized = NO;
if (!wasInitialized) {
// safety in case we get called twice.
[[PluginController sharedPluginController] setup];
wasInitialized = YES;
}
}

View file

@ -1,853 +0,0 @@
#import "PluginController.h"
#import "CogPluginMulti.h"
#import "Plugin.h"
#import "Logging.h"
#import "NSFileHandle+CreateFile.h"
#import "NSDictionary+Merge.h"
#import "RedundantPlaylistDataStore.h"
#import <chrono>
#import <map>
#import <mutex>
#import <thread>
struct Cached_Metadata {
std::chrono::steady_clock::time_point time_accessed;
NSDictionary *properties;
NSDictionary *metadata;
Cached_Metadata()
: properties(nil), metadata(nil) {
}
};
static std::mutex Cache_Lock;
static std::map<std::string, Cached_Metadata> Cache_List;
static RedundantPlaylistDataStore *Cache_Data_Store = nil;
static bool Cache_Running = false;
static bool Cache_Stopped = false;
static std::thread *Cache_Thread = NULL;
static void cache_run();
static void cache_init() {
Cache_Data_Store = [[RedundantPlaylistDataStore alloc] init];
Cache_Thread = new std::thread(cache_run);
}
static void cache_deinit() {
Cache_Running = false;
Cache_Thread->join();
while(!Cache_Stopped)
usleep(500);
delete Cache_Thread;
Cache_Data_Store = nil;
}
static void cache_insert_properties(NSURL *url, NSDictionary *properties) {
if(properties == nil) return;
std::lock_guard<std::mutex> lock(Cache_Lock);
std::string path = [[url absoluteString] UTF8String];
properties = [Cache_Data_Store coalesceEntryInfo:properties];
Cached_Metadata &entry = Cache_List[path];
entry.properties = properties;
entry.time_accessed = std::chrono::steady_clock::now();
}
static void cache_insert_metadata(NSURL *url, NSDictionary *metadata) {
if(metadata == nil) return;
std::lock_guard<std::mutex> lock(Cache_Lock);
std::string path = [[url absoluteString] UTF8String];
metadata = [Cache_Data_Store coalesceEntryInfo:metadata];
Cached_Metadata &entry = Cache_List[path];
entry.metadata = metadata;
entry.time_accessed = std::chrono::steady_clock::now();
}
static NSDictionary *cache_access_properties(NSURL *url) {
std::lock_guard<std::mutex> lock(Cache_Lock);
std::string path = [[url absoluteString] UTF8String];
Cached_Metadata &entry = Cache_List[path];
if(entry.properties) {
entry.time_accessed = std::chrono::steady_clock::now();
return entry.properties;
}
return nil;
}
static NSDictionary *cache_access_metadata(NSURL *url) {
std::lock_guard<std::mutex> lock(Cache_Lock);
std::string path = [[url absoluteString] UTF8String];
Cached_Metadata &entry = Cache_List[path];
if(entry.metadata) {
entry.time_accessed = std::chrono::steady_clock::now();
return entry.metadata;
}
return nil;
}
static void cache_run() {
std::chrono::milliseconds dura(250);
Cache_Running = true;
while(Cache_Running) {
std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now();
@autoreleasepool {
std::lock_guard<std::mutex> lock(Cache_Lock);
size_t cacheListOriginalSize = Cache_List.size();
for(auto it = Cache_List.begin(); it != Cache_List.end();) {
auto elapsed = std::chrono::duration_cast<std::chrono::seconds>(now - it->second.time_accessed);
if(elapsed.count() >= 10) {
it = Cache_List.erase(it);
continue;
}
++it;
}
if(cacheListOriginalSize && Cache_List.size() == 0) {
[Cache_Data_Store reset];
}
}
std::this_thread::sleep_for(dura);
}
Cache_Stopped = true;
}
@implementation PluginController
@synthesize sources;
@synthesize containers;
@synthesize metadataReaders;
@synthesize propertiesReadersByExtension;
@synthesize propertiesReadersByMimeType;
@synthesize decodersByExtension;
@synthesize decodersByMimeType;
@synthesize configured;
static PluginController *sharedPluginController = nil;
+ (id<CogPluginController>)sharedPluginController {
@synchronized(self) {
if(sharedPluginController == nil) {
sharedPluginController = [[self alloc] init];
}
}
return sharedPluginController;
}
- (id)init {
self = [super init];
if(self) {
self.sources = [[NSMutableDictionary alloc] init];
self.containers = [[NSMutableDictionary alloc] init];
self.metadataReaders = [[NSMutableDictionary alloc] init];
self.propertiesReadersByExtension = [[NSMutableDictionary alloc] init];
self.propertiesReadersByMimeType = [[NSMutableDictionary alloc] init];
self.decodersByExtension = [[NSMutableDictionary alloc] init];
self.decodersByMimeType = [[NSMutableDictionary alloc] init];
[self setup];
cache_init();
}
return self;
}
- (void)dealloc {
cache_deinit();
}
- (void)setup {
if(self.configured == NO) {
self.configured = YES;
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(bundleDidLoad:) name:NSBundleDidLoadNotification object:nil];
[self loadPlugins];
[[NSNotificationCenter defaultCenter] removeObserver:self name:NSBundleDidLoadNotification object:nil];
[self printPluginInfo];
}
}
- (void)bundleDidLoad:(NSNotification *)notification {
NSArray *classNames = [[notification userInfo] objectForKey:@"NSLoadedClasses"];
for(NSString *className in classNames) {
Class bundleClass = NSClassFromString(className);
if([bundleClass conformsToProtocol:@protocol(CogVersionCheck)]) {
DLog(@"Component has version check: %@", className);
if(![bundleClass shouldLoadForOSVersion:[[NSProcessInfo processInfo] operatingSystemVersion]]) {
DLog(@"Plugin fails OS version check, ignoring");
return;
}
}
}
for(NSString *className in classNames) {
DLog(@"Class loaded: %@", className);
Class bundleClass = NSClassFromString(className);
if([bundleClass conformsToProtocol:@protocol(CogContainer)]) {
[self setupContainer:className];
}
if([bundleClass conformsToProtocol:@protocol(CogDecoder)]) {
[self setupDecoder:className];
}
if([bundleClass conformsToProtocol:@protocol(CogMetadataReader)]) {
[self setupMetadataReader:className];
}
if([bundleClass conformsToProtocol:@protocol(CogPropertiesReader)]) {
[self setupPropertiesReader:className];
}
if([bundleClass conformsToProtocol:@protocol(CogSource)]) {
[self setupSource:className];
}
}
}
- (void)loadPluginsAtPath:(NSString *)path {
NSArray *dirContents = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:path error:nil];
for(NSString *pname in dirContents) {
NSString *ppath;
ppath = [NSString pathWithComponents:@[path, pname]];
if([[pname pathExtension] isEqualToString:@"bundle"]) {
NSBundle *b = [NSBundle bundleWithPath:ppath];
[b load];
}
}
}
- (void)loadPlugins {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSApplicationSupportDirectory, NSUserDomainMask, YES);
NSString *basePath = [[paths firstObject] stringByAppendingPathComponent:@"Cog"];
[self loadPluginsAtPath:[[NSBundle mainBundle] builtInPlugInsPath]];
[self loadPluginsAtPath:[basePath stringByAppendingPathComponent:@"Plugins"]];
}
- (void)setupContainer:(NSString *)className {
Class container = NSClassFromString(className);
if(container && [container respondsToSelector:@selector(fileTypes)]) {
for(id fileType in [container fileTypes]) {
NSString *ext = [fileType lowercaseString];
NSMutableArray *containerSet;
if(![containers objectForKey:ext]) {
containerSet = [[NSMutableArray alloc] init];
[containers setObject:containerSet forKey:ext];
} else
containerSet = [containers objectForKey:ext];
[containerSet addObject:className];
}
}
}
- (void)setupDecoder:(NSString *)className {
Class decoder = NSClassFromString(className);
if(decoder && [decoder respondsToSelector:@selector(fileTypes)]) {
for(id fileType in [decoder fileTypes]) {
NSString *ext = [fileType lowercaseString];
NSMutableArray *decoders;
if(![decodersByExtension objectForKey:ext]) {
decoders = [[NSMutableArray alloc] init];
[decodersByExtension setObject:decoders forKey:ext];
} else
decoders = [decodersByExtension objectForKey:ext];
[decoders addObject:className];
}
}
if(decoder && [decoder respondsToSelector:@selector(mimeTypes)]) {
for(id mimeType in [decoder mimeTypes]) {
NSString *mimetype = [mimeType lowercaseString];
NSMutableArray *decoders;
if(![decodersByMimeType objectForKey:mimetype]) {
decoders = [[NSMutableArray alloc] init];
[decodersByMimeType setObject:decoders forKey:mimetype];
} else
decoders = [decodersByMimeType objectForKey:mimetype];
[decoders addObject:className];
}
}
}
- (void)setupMetadataReader:(NSString *)className {
Class metadataReader = NSClassFromString(className);
if(metadataReader && [metadataReader respondsToSelector:@selector(fileTypes)]) {
for(id fileType in [metadataReader fileTypes]) {
NSString *ext = [fileType lowercaseString];
NSMutableArray *readers;
if(![metadataReaders objectForKey:ext]) {
readers = [[NSMutableArray alloc] init];
[metadataReaders setObject:readers forKey:ext];
} else
readers = [metadataReaders objectForKey:ext];
[readers addObject:className];
}
}
}
- (void)setupPropertiesReader:(NSString *)className {
Class propertiesReader = NSClassFromString(className);
if(propertiesReader && [propertiesReader respondsToSelector:@selector(fileTypes)]) {
for(id fileType in [propertiesReader fileTypes]) {
NSString *ext = [fileType lowercaseString];
NSMutableArray *readers;
if(![propertiesReadersByExtension objectForKey:ext]) {
readers = [[NSMutableArray alloc] init];
[propertiesReadersByExtension setObject:readers forKey:ext];
} else
readers = [propertiesReadersByExtension objectForKey:ext];
[readers addObject:className];
}
}
if(propertiesReader && [propertiesReader respondsToSelector:@selector(mimeTypes)]) {
for(id mimeType in [propertiesReader mimeTypes]) {
NSString *mimetype = [mimeType lowercaseString];
NSMutableArray *readers;
if(![propertiesReadersByMimeType objectForKey:mimetype]) {
readers = [[NSMutableArray alloc] init];
[propertiesReadersByMimeType setObject:readers forKey:mimetype];
} else
readers = [propertiesReadersByMimeType objectForKey:mimetype];
[readers addObject:className];
}
}
}
- (void)setupSource:(NSString *)className {
Class source = NSClassFromString(className);
if(source && [source respondsToSelector:@selector(schemes)]) {
for(id scheme in [source schemes]) {
[sources setObject:className forKey:scheme];
}
}
}
static NSString *xmlEscapeString(NSString * string) {
CFStringRef textXML = CFXMLCreateStringByEscapingEntities(kCFAllocatorDefault, (CFStringRef)string, nil);
if(textXML) {
NSString *textString = (__bridge NSString *)textXML;
CFRelease(textXML);
return textString;
}
return @"";
}
- (void)printPluginInfo {
ALog(@"Sources: %@", self.sources);
ALog(@"Containers: %@", self.containers);
ALog(@"Metadata Readers: %@", self.metadataReaders);
ALog(@"Properties Readers By Extension: %@", self.propertiesReadersByExtension);
ALog(@"Properties Readers By Mime Type: %@", self.propertiesReadersByMimeType);
ALog(@"Decoders by Extension: %@", self.decodersByExtension);
ALog(@"Decoders by Mime Type: %@", self.decodersByMimeType);
#if 0
// XXX Keep in sync with Info.plist on disk!
NSString * plistHeader = @"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\
<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n\
<plist version=\"1.0\">\n\
<dict>\n\
\t<key>SUEnableInstallerLauncherService</key>\n\
\t<true/>\n\
\t<key>CFBundleDevelopmentRegion</key>\n\
\t<string>en_US</string>\n\
\t<key>CFBundleDocumentTypes</key>\n\
\t<array>\n\
\t\t<dict>\n\
\t\t\t<key>CFBundleTypeExtensions</key>\n\
\t\t\t<array>\n\
\t\t\t\t<string>*</string>\n\
\t\t\t</array>\n\
\t\t\t<key>CFBundleTypeIconFile</key>\n\
\t\t\t<string>song.icns</string>\n\
\t\t\t<key>CFBundleTypeIconSystemGenerated</key>\n\
\t\t\t<integer>1</integer>\n\
\t\t\t<key>CFBundleTypeName</key>\n\
\t\t\t<string>Folder</string>\n\
\t\t\t<key>CFBundleTypeOSTypes</key>\n\
\t\t\t<array>\n\
\t\t\t\t<string>****</string>\n\
\t\t\t\t<string>fold</string>\n\
\t\t\t\t<string>disk</string>\n\
\t\t\t</array>\n\
\t\t\t<key>CFBundleTypeRole</key>\n\
\t\t\t<string>None</string>\n\
\t\t\t<key>LSHandlerRank</key>\n\
\t\t\t<string>Default</string>\n\
\t\t</dict>\n";
NSString * plistFooter = @"\t</array>\n\
\t<key>CFBundleExecutable</key>\n\
\t<string>Cog</string>\n\
\t<key>CFBundleHelpBookName</key>\n\
\t<string>org.cogx.cog.help</string>\n\
\t<key>CFBundleIdentifier</key>\n\
\t<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>\n\
\t<key>CFBundleInfoDictionaryVersion</key>\n\
\t<string>6.0</string>\n\
\t<key>CFBundleName</key>\n\
\t<string>$(PRODUCT_NAME)</string>\n\
\t<key>CFBundleDisplayName</key>\n\
\t<string>$(PRODUCT_NAME)</string>\n\
\t<key>CFBundlePackageType</key>\n\
\t<string>APPL</string>\n\
\t<key>CFBundleShortVersionString</key>\n\
\t<string>0.08</string>\n\
\t<key>CFBundleSignature</key>\n\
\t<string>????</string>\n\
\t<key>CFBundleVersion</key>\n\
\t<string>r516</string>\n\
\t<key>LSApplicationCategoryType</key>\n\
\t<string>public.app-category.music</string>\n\
\t<key>LSMinimumSystemVersion</key>\n\
\t<string>$(MACOSX_DEPLOYMENT_TARGET)</string>\n\
\t<key>NSAppTransportSecurity</key>\n\
\t<dict>\n\
\t\t<key>NSAllowsArbitraryLoads</key>\n\
\t\t<true/>\n\
\t</dict>\n\
\t<key>NSAppleScriptEnabled</key>\n\
\t<string>YES</string>\n\
\t<key>NSCalendarsUsageDescription</key>\n\
\t<string>Cog has no use for your calendar information. Why are you trying to open your Calendar with an audio player?</string>\n\
\t<key>NSCameraUsageDescription</key>\n\
\t<string>Cog is an audio player. It will never use your camera. Why is it asking for permission to use your camera?</string>\n\
\t<key>NSContactsUsageDescription</key>\n\
\t<string>Cog has no use for your contacts information. Why are you trying to open your contacts with an audio player?</string>\n\
\t<key>NSLocationUsageDescription</key>\n\
\t<string>Cog has no use for your location information. Something is obviously wrong with the application.</string>\n\
\t<key>NSMainNibFile</key>\n\
\t<string>MainMenu</string>\n\
\t<key>NSMicrophoneUsageDescription</key>\n\
\t<string>Cog is an audio player. It does not, however, record audio. Why is it asking for permission to use your microphone?</string>\n\
\t<key>NSPhotoLibraryUsageDescription</key>\n\
\t<string>Cog is an audio player. Why are you trying to access your Photos Library with an audio player?</string>\n\
\t<key>NSPrincipalClass</key>\n\
\t<string>MediaKeysApplication</string>\n\
\t<key>NSRemindersUsageDescription</key>\n\
\t<string>Cog has no use for your reminders. Why are you trying to access them with an audio player?</string>\n\
\t<key>NSDownloadsFolderUsageDescription</key>\n\
\t<string>We may request related audio files from this folder for playback purposes. We will only play back files you specifically add, unless you enable the option to add an entire folder. Granting permission either for individual files or for parent folders ensures their contents will remain playable in future sessions.</string>\n\
\t<key>NSDocumentsFolderUsageDescription</key>\n\
\t<string>We may request related audio files from this folder for playback purposes. We will only play back files you specifically add, unless you enable the option to add an entire folder. Granting permission either for individual files or for parent folders ensures their contents will remain playable in future sessions.</string>\n\
\t<key>NSDesktopFolderUsageDescription</key>\n\
\t<string>We may request related audio files from this folder for playback purposes. We will only play back files you specifically add, unless you enable the option to add an entire folder. Granting permission either for individual files or for parent folders ensures their contents will remain playable in future sessions.</string>\n\
\t<key>NSMotionUsageDescription</key>\n\
\t<string>Cog optionally supports motion tracking headphones for head tracked positional audio, using its own low latency positioning model.</string>\n\
\t<key>OSAScriptingDefinition</key>\n\
\t<string>Cog.sdef</string>\n\
\t<key>SUFeedURL</key>\n\
\t<string>https://cogcdn.cog.losno.co/mercury.xml</string>\n\
\t<key>SUPublicEDKey</key>\n\
\t<string>omxG7Rp0XK9/YEvKbVy7cd44eVAh1LJB6CmjQwjOJz4=</string>\n\
\t<key>ITSAppUsesNonExemptEncryption</key>\n\
\t<false/>\n\
</dict>\n\
</plist>\n";
NSMutableArray * decodersRegistered = [[NSMutableArray alloc] init];
NSArray * allKeys = [self.decodersByExtension allKeys];
for (NSString * ext in allKeys) {
NSArray * decoders = [self.decodersByExtension objectForKey:ext];
for (NSString * decoder in decoders) {
if (![decodersRegistered containsObject:decoder]) {
[decodersRegistered addObject:decoder];
}
}
}
NSMutableArray * stringList = [[NSMutableArray alloc] init];
[stringList addObject:plistHeader];
// These aren't handled by decoders, but as containers
NSArray * staticTypes = @[
@[@"M3U Playlist File", @"m3u.icns", @"m3u", @"m3u8"],
@[@"PLS Playlist File", @"pls.icns", @"pls"],
@[@"RAR Archive of SPC Files", @"vg.icns", @"rsn"],
@[@"7Z Archive of VGM Files", @"vg.icns", @"vgm7z"]
];
NSMutableArray * assocTypes = [[NSMutableArray alloc] init];
[assocTypes addObjectsFromArray:staticTypes];
for (NSString * decoderString in decodersRegistered) {
Class decoder = NSClassFromString(decoderString);
if (decoder && [decoder respondsToSelector:@selector(fileTypeAssociations)]) {
NSArray * types = [decoder fileTypeAssociations];
[assocTypes addObjectsFromArray:types];
}
}
for (NSArray * type in assocTypes) {
[stringList addObject:@"\t\t<dict>\n\
\t\t\t<key>CFBundleTypeExtensions</key>\n\
\t\t\t<array>\n\
"];
for (size_t i = 2; i < [type count]; ++i) {
[stringList addObject:@"\t\t\t\t<string>"];
[stringList addObject:[[type objectAtIndex:i] lowercaseString]];
[stringList addObject:@"</string>\n"];
}
[stringList addObject:@"\t\t\t</array>\n\
\t\t\t<key>CFBundleTypeIconFile</key>\n\
\t\t\t<string>"];
[stringList addObject:[type objectAtIndex:1]];
[stringList addObject:@"</string>\n\
\t\t\t<key>CFBundleTypeIconSystemGenerated</key>\n\
\t\t\t<integer>1</integer>\n\
\t\t\t<key>CFBundleTypeName</key>\n\
\t\t\t<string>"];
[stringList addObject:xmlEscapeString([type objectAtIndex:0])];
[stringList addObject:@"</string>\n\
\t\t\t<key>CFBundleTypeRole</key>\n\
\t\t\t<string>Viewer</string>\n\
\t\t\t<key>LSHandlerRank</key>\n\
\t\t\t<string>Default</string>\n\
\t\t\t<key>LSTypeIsPackage</key>\n\
\t\t\t<false/>\n\
\t\t</dict>\n"];
}
[stringList addObject:plistFooter];
NSFileHandle *fileHandle = [NSFileHandle fileHandleForWritingAtPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"Cog_Info.plist"] createFile:YES];
if (!fileHandle) {
DLog(@"Error saving Info.plist!");
return;
}
[fileHandle truncateFileAtOffset:0];
[fileHandle writeData:[[stringList componentsJoinedByString:@""] dataUsingEncoding:NSUTF8StringEncoding]];
[fileHandle closeFile];
#endif
}
- (id<CogSource>)audioSourceForURL:(NSURL *)url {
NSString *scheme = [url scheme];
Class source = NSClassFromString([sources objectForKey:scheme]);
return [[source alloc] init];
}
- (NSArray *)urlsForContainerURL:(NSURL *)url {
NSString *ext = [url pathExtension];
NSArray *containerSet = [containers objectForKey:[ext lowercaseString]];
NSString *classString;
if(containerSet) {
if([containerSet count] > 1) {
return [CogContainerMulti urlsForContainerURL:url containers:containerSet];
} else {
classString = [containerSet objectAtIndex:0];
}
} else {
return nil;
}
Class container = NSClassFromString(classString);
return [container urlsForContainerURL:url];
}
- (NSArray *)dependencyUrlsForContainerURL:(NSURL *)url {
NSString *ext = [url pathExtension];
NSArray *containerSet = [containers objectForKey:[ext lowercaseString]];
NSString *classString;
if(containerSet) {
if([containerSet count] > 1) {
return [CogContainerMulti dependencyUrlsForContainerURL:url containers:containerSet];
} else {
classString = [containerSet objectAtIndex:0];
}
} else {
return nil;
}
Class container = NSClassFromString(classString);
if([container respondsToSelector:@selector(dependencyUrlsForContainerURL:)]) {
return [container dependencyUrlsForContainerURL:url];
} else {
return nil;
}
}
// Note: Source is assumed to already be opened.
- (id<CogDecoder>)audioDecoderForSource:(id<CogSource>)source skipCue:(BOOL)skip {
NSString *ext = [[source url] pathExtension];
NSArray *decoders = [decodersByExtension objectForKey:[ext lowercaseString]];
NSString *classString;
if(decoders) {
if([decoders count] > 1) {
if(skip) {
NSMutableArray *_decoders = [decoders mutableCopy];
for(int i = 0; i < [_decoders count];) {
if([[_decoders objectAtIndex:i] isEqualToString:@"CueSheetDecoder"])
[_decoders removeObjectAtIndex:i];
else
++i;
}
return [[CogDecoderMulti alloc] initWithDecoders:_decoders];
}
return [[CogDecoderMulti alloc] initWithDecoders:decoders];
} else {
classString = [decoders objectAtIndex:0];
}
} else {
decoders = [decodersByMimeType objectForKey:[[source mimeType] lowercaseString]];
if(decoders) {
if([decoders count] > 1) {
return [[CogDecoderMulti alloc] initWithDecoders:decoders];
} else {
classString = [decoders objectAtIndex:0];
}
} else {
classString = @"SilenceDecoder";
}
}
if(skip && [classString isEqualToString:@"CueSheetDecoder"]) {
classString = @"SilenceDecoder";
}
Class decoder = NSClassFromString(classString);
return [[decoder alloc] init];
}
+ (BOOL)isCoverFile:(NSString *)fileName {
for(NSString *coverFileName in [PluginController coverNames]) {
if([[[[fileName lastPathComponent] stringByDeletingPathExtension] lowercaseString] hasSuffix:coverFileName]) {
return true;
}
}
return false;
}
+ (NSArray *)coverNames {
return @[@"cover", @"folder", @"album", @"front"];
}
- (NSDictionary *)metadataForURL:(NSURL *)url skipCue:(BOOL)skip {
NSString *urlScheme = [url scheme];
if([urlScheme isEqualToString:@"http"] ||
[urlScheme isEqualToString:@"https"])
return nil;
NSDictionary *cacheData = cache_access_metadata(url);
if(cacheData) return cacheData;
do {
NSString *ext = [url pathExtension];
NSArray *readers = [metadataReaders objectForKey:[ext lowercaseString]];
NSString *classString;
if(readers) {
if([readers count] > 1) {
if(skip) {
NSMutableArray *_readers = [readers mutableCopy];
for(int i = 0; i < [_readers count];) {
if([[_readers objectAtIndex:i] isEqualToString:@"CueSheetMetadataReader"])
[_readers removeObjectAtIndex:i];
else
++i;
}
cacheData = [CogMetadataReaderMulti metadataForURL:url readers:_readers];
break;
}
cacheData = [CogMetadataReaderMulti metadataForURL:url readers:readers];
break;
} else {
classString = [readers objectAtIndex:0];
}
} else {
cacheData = nil;
break;
}
if(skip && [classString isEqualToString:@"CueSheetMetadataReader"]) {
cacheData = nil;
break;
}
Class metadataReader = NSClassFromString(classString);
cacheData = [metadataReader metadataForURL:url];
} while(0);
if(cacheData == nil) {
cacheData = [NSDictionary dictionary];
}
if(cacheData) {
NSData *image = [cacheData objectForKey:@"albumArt"];
if(nil == image) {
// Try to load image from external file
NSString *path = [[url path] stringByDeletingLastPathComponent];
// Gather list of candidate image files
NSArray *fileNames = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:path error:nil];
NSArray *types = @[@"jpg", @"jpeg", @"png", @"gif", @"webp", @"avif", @"heic"];
NSArray *imageFileNames = [fileNames pathsMatchingExtensions:types];
for(NSString *fileName in imageFileNames) {
if([PluginController isCoverFile:fileName]) {
image = [NSData dataWithContentsOfFile:[path stringByAppendingPathComponent:fileName]];
break;
}
}
if(image) {
NSMutableDictionary *data = [cacheData mutableCopy];
[data setValue:image forKey:@"albumArt"];
cacheData = data;
}
}
}
cache_insert_metadata(url, cacheData);
return cacheData;
}
// If no properties reader is defined, use the decoder's properties.
- (NSDictionary *)propertiesForURL:(NSURL *)url skipCue:(BOOL)skip {
NSString *urlScheme = [url scheme];
if([urlScheme isEqualToString:@"http"] ||
[urlScheme isEqualToString:@"https"])
return nil;
NSDictionary *properties = nil;
properties = cache_access_properties(url);
if(properties) return properties;
NSString *ext = [url pathExtension];
id<CogSource> source = [self audioSourceForURL:url];
if(![source open:url])
return nil;
NSArray *readers = [propertiesReadersByExtension objectForKey:[ext lowercaseString]];
NSString *classString = nil;
if(readers) {
if([readers count] > 1) {
properties = [CogPropertiesReaderMulti propertiesForSource:source readers:readers];
if(properties != nil && [properties count]) {
cache_insert_properties(url, properties);
return properties;
}
} else {
classString = [readers objectAtIndex:0];
}
} else {
readers = [propertiesReadersByMimeType objectForKey:[[source mimeType] lowercaseString]];
if(readers) {
if([readers count] > 1) {
properties = [CogPropertiesReaderMulti propertiesForSource:source readers:readers];
if(properties != nil && [properties count]) {
cache_insert_properties(url, properties);
return properties;
}
} else {
classString = [readers objectAtIndex:0];
}
}
}
if(classString) {
Class propertiesReader = NSClassFromString(classString);
properties = [propertiesReader propertiesForSource:source];
if(properties != nil && [properties count]) {
cache_insert_properties(url, properties);
return properties;
}
}
{
id<CogDecoder> decoder = [self audioDecoderForSource:source skipCue:skip];
if(![decoder open:source]) {
return nil;
}
NSDictionary *properties = [decoder properties];
NSDictionary *metadata = [decoder metadata];
[decoder close];
NSDictionary *cacheData = [NSDictionary dictionaryByMerging:properties with:metadata];
cache_insert_properties(url, cacheData);
return cacheData;
}
}
- (int)putMetadataInURL:(NSURL *)url {
return 0;
}
@end
NSString *guess_encoding_of_string(const char *input) {
NSString *ret = @"";
if(input && *input) {
@try {
ret = [NSString stringWithUTF8String:input];
}
@catch(NSException *e) {
ret = nil;
}
if(!ret) {
// This method is incredibly slow
NSData *stringData = [NSData dataWithBytes:input length:strlen(input)];
[NSString stringEncodingForData:stringData encodingOptions:nil convertedString:&ret usedLossyConversion:nil];
if(!ret) {
ret = @"";
}
}
}
return ret;
}

View file

@ -7,11 +7,12 @@
*
*/
#import <Foundation/Foundation.h>
typedef NS_ENUM(NSInteger, CogStatus) {
CogStatusStopped = 0,
CogStatusPaused,
CogStatusPlaying,
CogStatusStopping,
enum
{
kCogStatusPaused = 0,
kCogStatusStopped,
kCogStatusPlaying,
// kCogStatusEndOfFile,
// kCogStatusEndOfPlaylist,
// kCogStatusPlaybackEnded
};

View file

@ -22,7 +22,7 @@
#include <AudioToolbox/AudioFile.h>
static BOOL hostIsBigEndian(void)
BOOL hostIsBigEndian()
{
#ifdef __BIG_ENDIAN__
return YES;
@ -42,27 +42,21 @@ AudioStreamBasicDescription propertiesToASBD(NSDictionary *properties)
asbd.mBitsPerChannel = [[properties objectForKey:@"bitsPerSample"] intValue];
asbd.mChannelsPerFrame = [[properties objectForKey:@"channels"] intValue];;
asbd.mBytesPerFrame = ((asbd.mBitsPerChannel+7)/8)*asbd.mChannelsPerFrame;
asbd.mBytesPerFrame = (asbd.mBitsPerChannel/8)*asbd.mChannelsPerFrame;
asbd.mFramesPerPacket = 1;
asbd.mBytesPerPacket = asbd.mBytesPerFrame * asbd.mFramesPerPacket;
asbd.mReserved = 0;
BOOL isFloat = [[properties objectForKey:@"floatingPoint"] boolValue];
if ([[properties objectForKey:@"endian"] isEqualToString:@"big"] || ([[properties objectForKey:@"endian"] isEqualToString:@"host"] && hostIsBigEndian() ))
{
asbd.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
asbd.mFormatFlags |= kLinearPCMFormatFlagIsAlignedHigh;
}
if(isFloat == NO && [[properties objectForKey:@"unSigned"] boolValue] == NO) {
if ([[properties objectForKey:@"unsigned"] boolValue] == NO) {
asbd.mFormatFlags |= kLinearPCMFormatFlagIsSignedInteger;
}
if (isFloat) {
asbd.mFormatFlags |= kLinearPCMFormatFlagIsFloat | kAudioFormatFlagIsPacked;
}
return asbd;
}

View file

@ -0,0 +1,88 @@
//
// VirtualRingBuffer.h
// PlayBufferedSoundFile
//
/*
Copyright (c) 2002, Kurt Revis. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of Snoize nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
//
// VirtualRingBuffer implements a classic ring buffer (or circular buffer), with a couple of twists.
//
// * It allows reads and writes to happen in different threads, with no explicit locking,
// so readers and writers will never block. This is useful if either thread uses the
// time-constraint scheduling policy, since it is bad for such threads to block for
// indefinite amounts of time.
//
// * It uses a virtual memory trick to allow the client to read or write using just one
// operation, even if the data involved wraps around the end of the buffer. We allocate
// our buffer normally, and then place a VM region immediately after it in the address
// space which maps back to the "real" buffer. So reads and writes into both sections
// are transparently translated into the same physical memory.
// This makes the API much simpler to use, and saves us from doing some math to
// calculate the wraparound points.
// The tradeoff is that we use twice as much address space for the buffer than we would
// otherwise. Address space is not typically constrained for most applications, though,
// so this isn't a big problem.
// The idea for this trick came from <http://phil.ipal.org/freeware/vrb/> (via sweetcode.org),
// although none of that code is used here. (We use the Mach VM API directly.)
//
// Threading note:
// It is expected that this object will be shared between exactly two threads; one will
// always read and the other will always write. In that situation, the implementation is
// thread-safe, and this object will never block or yield.
// It will also work in one thread, of course (although I don't know why you'd bother).
// However, if you have multiple reader or writer threads, all bets are off!
#import <Foundation/Foundation.h>
@interface VirtualRingBuffer : NSObject
{
void *buffer;
void *bufferEnd;
UInt32 bufferLength;
// buffer is the start of the ring buffer's address space.
// bufferEnd is the end of the "real" buffer (always buffer + bufferLength).
// Note that the "virtual" portion of the buffer extends from bufferEnd to bufferEnd+bufferLength.
void *readPointer;
void *writePointer;
}
- (id)initWithLength:(UInt32)length;
// Note: The specified length will be rounded up to an integral number of VM pages.
// Empties the buffer. It is NOT safe to do this while anyone is reading from or writing to the buffer.
- (void)empty;
// Checks if the buffer is empty or not. This is safe in any thread.
- (BOOL)isEmpty;
- (UInt32)bufferLength;
// Read operations:
// The reading thread must call this method first.
- (UInt32)lengthAvailableToReadReturningPointer:(void **)returnedReadPointer;
// Iff a value > 0 is returned, the reading thread may go on to read that much data from the returned pointer.
// Afterwards, the reading thread must call didReadLength:.
- (void)didReadLength:(UInt32)length;
// Write operations:
// The writing thread must call this method first.
- (UInt32)lengthAvailableToWriteReturningPointer:(void **)returnedWritePointer;
// Iff a value > 0 is returned, the writing thread may then write that much data into the returned pointer.
// Afterwards, the writing thread must call didWriteLength:.
- (void)didWriteLength:(UInt32)length;
@end

View file

@ -0,0 +1,309 @@
//
// VirtualRingBuffer.m
// PlayBufferedSoundFile
//
/*
Copyright (c) 2002, Kurt Revis. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of Snoize nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "VirtualRingBuffer.h"
#include <mach/mach.h>
#include <mach/mach_error.h>
@implementation VirtualRingBuffer
static void *allocateVirtualBuffer(UInt32 bufferLength);
static void deallocateVirtualBuffer(void *buffer, UInt32 bufferLength);
- (id)initWithLength:(UInt32)length
{
if (![super init])
return nil;
// We need to allocate entire VM pages, so round the specified length up to the next page if necessary.
bufferLength = round_page(length);
buffer = allocateVirtualBuffer(bufferLength);
if (buffer) {
bufferEnd = buffer + bufferLength;
} else {
[self release];
return nil;
}
readPointer = NULL;
writePointer = NULL;
return self;
}
- (void)dealloc
{
if (buffer)
deallocateVirtualBuffer(buffer, bufferLength);
[super dealloc];
}
- (void)empty
{
// Assumption:
// No one is reading or writing from the buffer, in any thread, when this method is called.
readPointer = NULL;
writePointer = NULL;
}
- (BOOL)isEmpty
{
return (readPointer != NULL && writePointer != NULL);
}
- (UInt32)bufferLength
{
return bufferLength;
}
//
// Theory of operation:
//
// This class keeps a pointer to the next byte to be read (readPointer) and a pointer to the next byte to be written (writePointer).
// readPointer is only advanced in the reading thread (except for one case: when the buffer first has data written to it).
// writePointer is only advanced in the writing thread.
//
// Since loading and storing word length data is atomic, each pointer can safely be modified in one thread while the other thread
// uses it, IF each thread is careful to make a local copy of the "opposite" pointer when necessary.
//
//
// Read operations
//
- (UInt32)lengthAvailableToReadReturningPointer:(void **)returnedReadPointer
{
// Assumptions:
// returnedReadPointer != NULL
UInt32 length;
// Read this pointer exactly once, so we're safe in case it is changed in another thread
void *localWritePointer = writePointer;
// Depending on out-of-order execution and memory storage, either one of these may be NULL when the buffer is empty. So we must check both.
if (!readPointer || !localWritePointer) {
// The buffer is empty
length = 0;
} else if (localWritePointer > readPointer) {
// Write is ahead of read in the buffer
length = localWritePointer - readPointer;
} else {
// Write has wrapped around past read, OR write == read (the buffer is full)
length = bufferLength - (readPointer - localWritePointer);
}
*returnedReadPointer = readPointer;
return length;
}
- (void)didReadLength:(UInt32)length
{
// Assumptions:
// [self lengthAvailableToReadReturningPointer:] currently returns a value >= length
// length > 0
void *newReadPointer;
newReadPointer = readPointer + length;
if (newReadPointer >= bufferEnd)
newReadPointer -= bufferLength;
if (newReadPointer == writePointer) {
// We just read the last data out of the buffer, so it is now empty.
newReadPointer = NULL;
}
// Store the new read pointer. This is the only place this happens in the read thread.
readPointer = newReadPointer;
}
//
// Write operations
//
- (UInt32)lengthAvailableToWriteReturningPointer:(void **)returnedWritePointer
{
// Assumptions:
// returnedWritePointer != NULL
UInt32 length;
// Read this pointer exactly once, so we're safe in case it is changed in another thread
void *localReadPointer = readPointer;
// Either one of these may be NULL when the buffer is empty. So we must check both.
if (!localReadPointer || !writePointer) {
// The buffer is empty. Set it up to be written into.
// This is one of the two places the write pointer can change; both are in the write thread.
writePointer = buffer;
length = bufferLength;
} else if (writePointer <= localReadPointer) {
// Write is before read in the buffer, OR write == read (meaning that the buffer is full).
length = localReadPointer - writePointer;
} else {
// Write is behind read in the buffer. The available space wraps around.
length = (bufferEnd - writePointer) + (localReadPointer - buffer);
}
*returnedWritePointer = writePointer;
return length;
}
- (void)didWriteLength:(UInt32)length
{
// Assumptions:
// [self lengthAvailableToWriteReturningPointer:] currently returns a value >= length
// length > 0
void *oldWritePointer = writePointer;
void *newWritePointer;
// Advance the write pointer, wrapping around if necessary.
newWritePointer = writePointer + length;
if (newWritePointer >= bufferEnd)
newWritePointer -= bufferLength;
// This is one of the two places the write pointer can change; both are in the write thread.
writePointer = newWritePointer;
// Also, if the read pointer is NULL, then we just wrote into a previously empty buffer, so set the read pointer.
// This is the only place the read pointer is changed in the write thread.
// The read thread should never change the read pointer when it is NULL, so this is safe.
if (!readPointer)
readPointer = oldWritePointer;
}
@end
void *allocateVirtualBuffer(UInt32 bufferLength)
{
kern_return_t error;
vm_address_t originalAddress = (vm_address_t)NULL;
vm_address_t realAddress = (vm_address_t)NULL;
mach_port_t memoryEntry;
vm_size_t memoryEntryLength;
vm_address_t virtualAddress = (vm_address_t)NULL;
// We want to find where we can get 2 * bufferLength bytes of contiguous address space.
// So let's just allocate that space, remember its address, and deallocate it.
// (This doesn't actually have to touch all of that memory so it's not terribly expensive.)
error = vm_allocate(mach_task_self(), &originalAddress, 2 * bufferLength, TRUE);
if (error) {
#if DEBUG
mach_error("vm_allocate initial chunk", error);
#endif
return NULL;
}
error = vm_deallocate(mach_task_self(), originalAddress, 2 * bufferLength);
if (error) {
#if DEBUG
mach_error("vm_deallocate initial chunk", error);
#endif
return NULL;
}
// Then allocate a "real" block of memory at the same address, but with the normal bufferLength.
realAddress = originalAddress;
error = vm_allocate(mach_task_self(), &realAddress, bufferLength, FALSE);
if (error) {
#if DEBUG
mach_error("vm_allocate real chunk", error);
#endif
return NULL;
}
if (realAddress != originalAddress) {
#if DEBUG
NSLog(@"allocateVirtualBuffer: vm_allocate 2nd time didn't return same address (%p vs %p)", originalAddress, realAddress);
#endif
goto errorReturn;
}
// Then make a memory entry for the area we just allocated.
memoryEntryLength = bufferLength;
error = mach_make_memory_entry(mach_task_self(), &memoryEntryLength, realAddress, VM_PROT_READ | VM_PROT_WRITE, &memoryEntry, (vm_address_t)NULL);
if (error) {
#if DEBUG
mach_error("mach_make_memory_entry", error);
#endif
goto errorReturn;
}
if (!memoryEntry) {
#if DEBUG
NSLog(@"mach_make_memory_entry: returned memoryEntry of NULL");
#endif
goto errorReturn;
}
if (memoryEntryLength != bufferLength) {
#if DEBUG
NSLog(@"mach_make_memory_entry: size changed (from %0x to %0x)", bufferLength, memoryEntryLength);
#endif
goto errorReturn;
}
// And map the area immediately after the first block, with length bufferLength, to that memory entry.
virtualAddress = realAddress + bufferLength;
error = vm_map(mach_task_self(), &virtualAddress, bufferLength, 0, FALSE, memoryEntry, 0, FALSE, VM_PROT_READ | VM_PROT_WRITE, VM_PROT_READ | VM_PROT_WRITE, VM_INHERIT_DEFAULT);
if (error) {
#if DEBUG
mach_error("vm_map", error);
#endif
// TODO Retry from the beginning, instead of failing completely. There is a tiny (but > 0) probability that someone
// will allocate this space out from under us.
virtualAddress = (vm_address_t)NULL;
goto errorReturn;
}
if (virtualAddress != realAddress + bufferLength) {
#if DEBUG
NSLog(@"vm_map: didn't return correct address (%p vs %p)", realAddress + bufferLength, virtualAddress);
#endif
goto errorReturn;
}
// Success!
return (void *)realAddress;
errorReturn:
if (realAddress)
vm_deallocate(mach_task_self(), realAddress, bufferLength);
if (virtualAddress)
vm_deallocate(mach_task_self(), virtualAddress, bufferLength);
return NULL;
}
void deallocateVirtualBuffer(void *buffer, UInt32 bufferLength)
{
kern_return_t error;
// We can conveniently deallocate both the vm_allocated memory and
// the vm_mapped region at the same time.
error = vm_deallocate(mach_task_self(), (vm_address_t)buffer, bufferLength * 2);
if (error) {
#if DEBUG
mach_error("vm_deallocate in dealloc", error);
#endif
}
}

View file

@ -1,40 +0,0 @@
/*
DeaDBeeF -- the music player
Copyright (C) 2009-2021 Alexey Yakovenko and other contributors
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
*/
#ifndef FFT_H
#define FFT_H
#ifdef __cplusplus
extern "C" {
}
#endif
void fft_calculate(const float *data, float *freq, int fft_size);
void fft_free(void);
#ifdef __cplusplus
}
#endif
#endif

View file

@ -1,106 +0,0 @@
/*
DeaDBeeF -- the music player
Copyright (C) 2009-2021 Alexey Yakovenko and other contributors
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
*/
#include "fft.h"
#include <Accelerate/Accelerate.h>
static int _fft_size;
static float *_input_real;
static float *_input_imaginary;
static float *_output_real;
static float *_output_imaginary;
static float *_hamming;
static float *_sq_mags;
static vDSP_DFT_Setup _dft_setup;
// Apparently _mm_malloc is Intel-only on newer macOS targets, so use supported posix_memalign
static void *_memalign_calloc(size_t count, size_t size, size_t align) {
size *= count;
void *ret = NULL;
if(posix_memalign(&ret, align, size) != 0) {
return NULL;
}
bzero(ret, size);
return ret;
}
static void
_init_buffers(int fft_size) {
if(fft_size != _fft_size) {
fft_free();
_input_real = _memalign_calloc(fft_size * 2, sizeof(float), 16);
_input_imaginary = _memalign_calloc(fft_size * 2, sizeof(float), 16);
_hamming = _memalign_calloc(fft_size * 2, sizeof(float), 16);
_sq_mags = _memalign_calloc(fft_size, sizeof(float), 16);
_output_real = _memalign_calloc(fft_size * 2 + 1, sizeof(float), 16);
_output_imaginary = _memalign_calloc(fft_size * 2 + 1, sizeof(float), 16);
_dft_setup = vDSP_DFT_zop_CreateSetup(NULL, fft_size * 2, FFT_FORWARD);
vDSP_hamm_window(_hamming, fft_size * 2, 0);
_fft_size = fft_size;
}
}
void fft_calculate(const float *data, float *freq, int fft_size) {
int dft_size = fft_size * 2;
_init_buffers(fft_size);
vDSP_vmul(data, 1, _hamming, 1, _input_real, 1, dft_size);
vDSP_DFT_Execute(_dft_setup, _input_real, _input_imaginary, _output_real, _output_imaginary);
DSPSplitComplex split_complex = {
.realp = _output_real,
.imagp = _output_imaginary
};
vDSP_zvmags(&split_complex, 1, _sq_mags, 1, fft_size);
int sq_count = fft_size;
vvsqrtf(_sq_mags, _sq_mags, &sq_count);
float mult = 2.f / fft_size;
vDSP_vsmul(_sq_mags, 1, &mult, freq, 1, fft_size);
}
void fft_free(void) {
free(_input_real);
free(_input_imaginary);
free(_hamming);
free(_sq_mags);
free(_output_real);
free(_output_imaginary);
if(_dft_setup != NULL) {
vDSP_DFT_DestroySetup(_dft_setup);
}
_input_real = NULL;
_input_imaginary = NULL;
_hamming = NULL;
_sq_mags = NULL;
_dft_setup = NULL;
_output_real = NULL;
_output_imaginary = NULL;
}

Some files were not shown because too many files have changed in this diff Show more