Compare commits

..

1 Commits

Author SHA1 Message Date
Thong Nguyen bf13ba318f Buffering branch 2014-03-13 18:15:27 +00:00
45 changed files with 696 additions and 1373 deletions

View File

@ -230,7 +230,7 @@
A1115929188D686000641365 /* Project object */ = { A1115929188D686000641365 /* Project object */ = {
isa = PBXProject; isa = PBXProject;
attributes = { attributes = {
LastUpgradeCheck = 0710; LastUpgradeCheck = 0510;
ORGANIZATIONNAME = "Thong Nguyen"; ORGANIZATIONNAME = "Thong Nguyen";
TargetAttributes = { TargetAttributes = {
A111594B188D686000641365 = { A111594B188D686000641365 = {
@ -346,7 +346,6 @@
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO; COPY_PHASE_STRIP = NO;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99; GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO; GCC_DYNAMIC_NO_PIC = NO;
GCC_OPTIMIZATION_LEVEL = 0; GCC_OPTIMIZATION_LEVEL = 0;
@ -366,7 +365,7 @@
/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include, /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include,
"$(SRCROOT)/../StreamingKit/StreamingKit", "$(SRCROOT)/../StreamingKit/StreamingKit",
); );
IPHONEOS_DEPLOYMENT_TARGET = 4.3; IPHONEOS_DEPLOYMENT_TARGET = 7.0;
ONLY_ACTIVE_ARCH = YES; ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos; SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2"; TARGETED_DEVICE_FAMILY = "1,2";
@ -404,7 +403,7 @@
/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include, /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/include,
"$(SRCROOT)/../StreamingKit/StreamingKit", "$(SRCROOT)/../StreamingKit/StreamingKit",
); );
IPHONEOS_DEPLOYMENT_TARGET = 4.3; IPHONEOS_DEPLOYMENT_TARGET = 7.0;
SDKROOT = iphoneos; SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2"; TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES; VALIDATE_PRODUCT = YES;
@ -423,7 +422,6 @@
IPHONEOS_DEPLOYMENT_TARGET = 6.0; IPHONEOS_DEPLOYMENT_TARGET = 6.0;
LLVM_LTO = YES; LLVM_LTO = YES;
OTHER_LDFLAGS = "-ObjC"; OTHER_LDFLAGS = "-ObjC";
PRODUCT_BUNDLE_IDENTIFIER = "abstractpath.com.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
WRAPPER_EXTENSION = app; WRAPPER_EXTENSION = app;
}; };
@ -440,7 +438,6 @@
IPHONEOS_DEPLOYMENT_TARGET = 6.0; IPHONEOS_DEPLOYMENT_TARGET = 6.0;
LLVM_LTO = YES; LLVM_LTO = YES;
OTHER_LDFLAGS = "-ObjC"; OTHER_LDFLAGS = "-ObjC";
PRODUCT_BUNDLE_IDENTIFIER = "abstractpath.com.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
WRAPPER_EXTENSION = app; WRAPPER_EXTENSION = app;
}; };
@ -462,7 +459,6 @@
"$(inherited)", "$(inherited)",
); );
INFOPLIST_FILE = "ExampleAppTests/ExampleAppTests-Info.plist"; INFOPLIST_FILE = "ExampleAppTests/ExampleAppTests-Info.plist";
PRODUCT_BUNDLE_IDENTIFIER = "abstractpath.com.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
TEST_HOST = "$(BUNDLE_LOADER)"; TEST_HOST = "$(BUNDLE_LOADER)";
WRAPPER_EXTENSION = xctest; WRAPPER_EXTENSION = xctest;
@ -481,7 +477,6 @@
GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PRECOMPILE_PREFIX_HEADER = YES;
GCC_PREFIX_HEADER = "ExampleApp/ExampleApp-Prefix.pch"; GCC_PREFIX_HEADER = "ExampleApp/ExampleApp-Prefix.pch";
INFOPLIST_FILE = "ExampleAppTests/ExampleAppTests-Info.plist"; INFOPLIST_FILE = "ExampleAppTests/ExampleAppTests-Info.plist";
PRODUCT_BUNDLE_IDENTIFIER = "abstractpath.com.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
TEST_HOST = "$(BUNDLE_LOADER)"; TEST_HOST = "$(BUNDLE_LOADER)";
WRAPPER_EXTENSION = xctest; WRAPPER_EXTENSION = xctest;

View File

@ -32,7 +32,6 @@
AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof(bufferLength), &bufferLength); AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof(bufferLength), &bufferLength);
self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
self.window.rootViewController = [[UIViewController alloc] init];
self.window.backgroundColor = [UIColor whiteColor]; self.window.backgroundColor = [UIColor whiteColor];
@ -47,9 +46,9 @@
[[UIApplication sharedApplication] beginReceivingRemoteControlEvents]; [[UIApplication sharedApplication] beginReceivingRemoteControlEvents];
[self becomeFirstResponder]; [self becomeFirstResponder];
[self.window makeKeyAndVisible]; [self.window addSubview:audioPlayerView];
[self.window.rootViewController.view addSubview:audioPlayerView]; [self.window makeKeyAndVisible];
return YES; return YES;
} }
@ -61,22 +60,13 @@
-(void) audioPlayerViewPlayFromHTTPSelected:(AudioPlayerView*)audioPlayerView -(void) audioPlayerViewPlayFromHTTPSelected:(AudioPlayerView*)audioPlayerView
{ {
NSURL* url = [NSURL URLWithString:@"http://www.abstractpath.com/files/audiosamples/sample.mp3"]; NSURL* url = [NSURL URLWithString:@"http://fs.bloom.fm/oss/audiosamples/sample.mp3"];
STKDataSource* dataSource = [STKAudioPlayer dataSourceFromURL:url]; STKDataSource* dataSource = [STKAudioPlayer dataSourceFromURL:url];
[audioPlayer setDataSource:dataSource withQueueItemId:[[SampleQueueId alloc] initWithUrl:url andCount:0]]; [audioPlayer setDataSource:dataSource withQueueItemId:[[SampleQueueId alloc] initWithUrl:url andCount:0]];
} }
-(void) audioPlayerViewPlayFromIcecastSelected:(AudioPlayerView *)audioPlayerView
{
NSURL* url = [NSURL URLWithString:@"http://shoutmedia.abc.net.au:10326"];
STKDataSource* dataSource = [STKAudioPlayer dataSourceFromURL:url];
[audioPlayer setDataSource:dataSource withQueueItemId:[[SampleQueueId alloc] initWithUrl:url andCount:0]];
}
-(void) audioPlayerViewQueueShortFileSelected:(AudioPlayerView*)audioPlayerView -(void) audioPlayerViewQueueShortFileSelected:(AudioPlayerView*)audioPlayerView
{ {
NSString* path = [[NSBundle mainBundle] pathForResource:@"airplane" ofType:@"aac"]; NSString* path = [[NSBundle mainBundle] pathForResource:@"airplane" ofType:@"aac"];
@ -99,7 +89,7 @@
-(void) audioPlayerViewQueuePcmWaveFileSelected:(AudioPlayerView*)audioPlayerView -(void) audioPlayerViewQueuePcmWaveFileSelected:(AudioPlayerView*)audioPlayerView
{ {
NSURL* url = [NSURL URLWithString:@"http://www.abstractpath.com/files/audiosamples/perfectly.wav"]; NSURL* url = [NSURL URLWithString:@"http://fs.bloom.fm/oss/audiosamples/perfectly.wav"];
STKDataSource* dataSource = [STKAudioPlayer dataSourceFromURL:url]; STKDataSource* dataSource = [STKAudioPlayer dataSourceFromURL:url];

View File

@ -39,7 +39,6 @@
@protocol AudioPlayerViewDelegate<NSObject> @protocol AudioPlayerViewDelegate<NSObject>
-(void) audioPlayerViewPlayFromHTTPSelected:(AudioPlayerView*)audioPlayerView; -(void) audioPlayerViewPlayFromHTTPSelected:(AudioPlayerView*)audioPlayerView;
-(void) audioPlayerViewPlayFromIcecastSelected:(AudioPlayerView*)audioPlayerView;
-(void) audioPlayerViewQueueShortFileSelected:(AudioPlayerView*)audioPlayerView; -(void) audioPlayerViewQueueShortFileSelected:(AudioPlayerView*)audioPlayerView;
-(void) audioPlayerViewPlayFromLocalFileSelected:(AudioPlayerView*)audioPlayerView; -(void) audioPlayerViewPlayFromLocalFileSelected:(AudioPlayerView*)audioPlayerView;
-(void) audioPlayerViewQueuePcmWaveFileSelected:(AudioPlayerView*)audioPlayerView; -(void) audioPlayerViewQueuePcmWaveFileSelected:(AudioPlayerView*)audioPlayerView;
@ -58,7 +57,6 @@
UIButton* playButton; UIButton* playButton;
UIButton* stopButton; UIButton* stopButton;
UIButton* playFromHTTPButton; UIButton* playFromHTTPButton;
UIButton* playFromIcecastButton;
UIButton* queueShortFileButton; UIButton* queueShortFileButton;
UIButton* queuePcmWaveFileFromHTTPButton; UIButton* queuePcmWaveFileFromHTTPButton;
UIButton* playFromLocalFileButton; UIButton* playFromLocalFileButton;

View File

@ -58,47 +58,42 @@
CGSize size = CGSizeMake(220, 50); CGSize size = CGSizeMake(220, 50);
playFromHTTPButton = [UIButton buttonWithType:UIButtonTypeRoundedRect]; playFromHTTPButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
playFromHTTPButton.frame = CGRectMake((frame.size.width - size.width) / 2, frame.size.height * 0.10, size.width, size.height); playFromHTTPButton.frame = CGRectMake((320 - size.width) / 2, frame.size.height * 0.10, size.width, size.height);
[playFromHTTPButton addTarget:self action:@selector(playFromHTTPButtonTouched) forControlEvents:UIControlEventTouchUpInside]; [playFromHTTPButton addTarget:self action:@selector(playFromHTTPButtonTouched) forControlEvents:UIControlEventTouchUpInside];
[playFromHTTPButton setTitle:@"Play from HTTP" forState:UIControlStateNormal]; [playFromHTTPButton setTitle:@"Play from HTTP" forState:UIControlStateNormal];
playFromIcecastButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
playFromIcecastButton.frame = CGRectMake((frame.size.width - size.width) / 2, frame.size.height * 0.10 + 35, size.width, size.height);
[playFromIcecastButton addTarget:self action:@selector(playFromIcecasButtonTouched) forControlEvents:UIControlEventTouchUpInside];
[playFromIcecastButton setTitle:@"Play from Icecast" forState:UIControlStateNormal];
playFromLocalFileButton = [UIButton buttonWithType:UIButtonTypeRoundedRect]; playFromLocalFileButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
playFromLocalFileButton.frame = CGRectMake((frame.size.width - size.width) / 2, frame.size.height * 0.10 + 70, size.width, size.height); playFromLocalFileButton.frame = CGRectMake((320 - size.width) / 2, frame.size.height * 0.10 + 50, size.width, size.height);
[playFromLocalFileButton addTarget:self action:@selector(playFromLocalFileButtonTouched) forControlEvents:UIControlEventTouchUpInside]; [playFromLocalFileButton addTarget:self action:@selector(playFromLocalFileButtonTouched) forControlEvents:UIControlEventTouchUpInside];
[playFromLocalFileButton setTitle:@"Play from Local File" forState:UIControlStateNormal]; [playFromLocalFileButton setTitle:@"Play from Local File" forState:UIControlStateNormal];
queueShortFileButton = [UIButton buttonWithType:UIButtonTypeRoundedRect]; queueShortFileButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
queueShortFileButton.frame = CGRectMake((frame.size.width - size.width) / 2, frame.size.height * 0.10 + 105, size.width, size.height); queueShortFileButton.frame = CGRectMake((320 - size.width) / 2, frame.size.height * 0.10 + 100, size.width, size.height);
[queueShortFileButton addTarget:self action:@selector(queueShortFileButtonTouched) forControlEvents:UIControlEventTouchUpInside]; [queueShortFileButton addTarget:self action:@selector(queueShortFileButtonTouched) forControlEvents:UIControlEventTouchUpInside];
[queueShortFileButton setTitle:@"Queue short file" forState:UIControlStateNormal]; [queueShortFileButton setTitle:@"Queue short file" forState:UIControlStateNormal];
queuePcmWaveFileFromHTTPButton = [UIButton buttonWithType:UIButtonTypeRoundedRect]; queuePcmWaveFileFromHTTPButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
queuePcmWaveFileFromHTTPButton.frame = CGRectMake((frame.size.width - size.width) / 2, frame.size.height * 0.10 + 140, size.width, size.height); queuePcmWaveFileFromHTTPButton.frame = CGRectMake((320 - size.width) / 2, frame.size.height * 0.10 + 150, size.width, size.height);
[queuePcmWaveFileFromHTTPButton addTarget:self action:@selector(queuePcmWaveFileButtonTouched) forControlEvents:UIControlEventTouchUpInside]; [queuePcmWaveFileFromHTTPButton addTarget:self action:@selector(queuePcmWaveFileButtonTouched) forControlEvents:UIControlEventTouchUpInside];
[queuePcmWaveFileFromHTTPButton setTitle:@"Queue PCM/WAVE from HTTP" forState:UIControlStateNormal]; [queuePcmWaveFileFromHTTPButton setTitle:@"Queue PCM/WAVE from HTTP" forState:UIControlStateNormal];
size = CGSizeMake(90, 40); size = CGSizeMake(90, 40);
playButton = [UIButton buttonWithType:UIButtonTypeRoundedRect]; playButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
playButton.frame = CGRectMake(30, 400, size.width, size.height); playButton.frame = CGRectMake(30, 380, size.width, size.height);
[playButton addTarget:self action:@selector(playButtonPressed) forControlEvents:UIControlEventTouchUpInside]; [playButton addTarget:self action:@selector(playButtonPressed) forControlEvents:UIControlEventTouchUpInside];
stopButton = [UIButton buttonWithType:UIButtonTypeRoundedRect]; stopButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
stopButton.frame = CGRectMake((frame.size.width - size.width) - 30, 400, size.width, size.height); stopButton.frame = CGRectMake((320 - size.width) - 30, 380, size.width, size.height);
[stopButton addTarget:self action:@selector(stopButtonPressed) forControlEvents:UIControlEventTouchUpInside]; [stopButton addTarget:self action:@selector(stopButtonPressed) forControlEvents:UIControlEventTouchUpInside];
[stopButton setTitle:@"Stop" forState:UIControlStateNormal]; [stopButton setTitle:@"Stop" forState:UIControlStateNormal];
muteButton = [UIButton buttonWithType:UIButtonTypeRoundedRect]; muteButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
muteButton.frame = CGRectMake((frame.size.width - size.width) - 30, 430, size.width, size.height); muteButton.frame = CGRectMake((320 - size.width) - 30, 410, size.width, size.height);
[muteButton addTarget:self action:@selector(muteButtonPressed) forControlEvents:UIControlEventTouchUpInside]; [muteButton addTarget:self action:@selector(muteButtonPressed) forControlEvents:UIControlEventTouchUpInside];
[muteButton setTitle:@"Mute" forState:UIControlStateNormal]; [muteButton setTitle:@"Mute" forState:UIControlStateNormal];
slider = [[UISlider alloc] initWithFrame:CGRectMake(20, 320, queuePcmWaveFileFromHTTPButton.frame.origin.y + queuePcmWaveFileFromHTTPButton.frame.size.height + 20, 20)]; slider = [[UISlider alloc] initWithFrame:CGRectMake(20, 320, 280, 20)];
slider.continuous = YES; slider.continuous = YES;
[slider addTarget:self action:@selector(sliderChanged) forControlEvents:UIControlEventValueChanged]; [slider addTarget:self action:@selector(sliderChanged) forControlEvents:UIControlEventValueChanged];
@ -106,16 +101,16 @@
repeatSwitch = [[UISwitch alloc] initWithFrame:CGRectMake(30, frame.size.height * 0.15 + 180, size.width, size.height)]; repeatSwitch = [[UISwitch alloc] initWithFrame:CGRectMake(30, frame.size.height * 0.15 + 180, size.width, size.height)];
enableEqSwitch = [[UISwitch alloc] initWithFrame:CGRectMake(frame.size.width - size.width - 30, frame.size.height * 0.15 + 180, size.width, size.height)]; enableEqSwitch = [[UISwitch alloc] initWithFrame:CGRectMake(320 - size.width - 30, frame.size.height * 0.15 + 180, size.width, size.height)];
enableEqSwitch.on = audioPlayer.equalizerEnabled; enableEqSwitch.on = audioPlayer.equalizerEnabled;
[enableEqSwitch addTarget:self action:@selector(onEnableEqSwitch) forControlEvents:UIControlEventAllTouchEvents]; [enableEqSwitch addTarget:self action:@selector(onEnableEqSwitch) forControlEvents:UIControlEventAllTouchEvents];
label = [[UILabel alloc] initWithFrame:CGRectMake(0, slider.frame.origin.y + slider.frame.size.height + 40, frame.size.width, 25)]; label = [[UILabel alloc] initWithFrame:CGRectMake(0, slider.frame.origin.y + slider.frame.size.height + 10, frame.size.width, 25)];
label.textAlignment = NSTextAlignmentCenter; label.textAlignment = NSTextAlignmentCenter;
statusLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, slider.frame.origin.y + slider.frame.size.height + label.frame.size.height + 50, frame.size.width, 50)]; statusLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, slider.frame.origin.y + slider.frame.size.height + label.frame.size.height + 8, frame.size.width, 50)];
statusLabel.textAlignment = NSTextAlignmentCenter; statusLabel.textAlignment = NSTextAlignmentCenter;
@ -126,7 +121,6 @@
[self addSubview:slider]; [self addSubview:slider];
[self addSubview:playButton]; [self addSubview:playButton];
[self addSubview:playFromHTTPButton]; [self addSubview:playFromHTTPButton];
[self addSubview:playFromIcecastButton];
[self addSubview:playFromLocalFileButton]; [self addSubview:playFromLocalFileButton];
[self addSubview:queueShortFileButton]; [self addSubview:queueShortFileButton];
[self addSubview:queuePcmWaveFileFromHTTPButton]; [self addSubview:queuePcmWaveFileFromHTTPButton];
@ -180,17 +174,6 @@
return; return;
} }
if (audioPlayer.currentlyPlayingQueueItemId == nil)
{
slider.value = 0;
slider.minimumValue = 0;
slider.maximumValue = 0;
label.text = @"";
return;
}
if (audioPlayer.duration != 0) if (audioPlayer.duration != 0)
{ {
slider.minimumValue = 0; slider.minimumValue = 0;
@ -205,7 +188,7 @@
slider.minimumValue = 0; slider.minimumValue = 0;
slider.maximumValue = 0; slider.maximumValue = 0;
label.text = [NSString stringWithFormat:@"Live stream %@", [self formatTimeFromSeconds:audioPlayer.progress]]; label.text = @"";
} }
statusLabel.text = audioPlayer.state == STKAudioPlayerStateBuffering ? @"buffering" : @""; statusLabel.text = audioPlayer.state == STKAudioPlayerStateBuffering ? @"buffering" : @"";
@ -220,11 +203,6 @@
[self.delegate audioPlayerViewPlayFromHTTPSelected:self]; [self.delegate audioPlayerViewPlayFromHTTPSelected:self];
} }
-(void) playFromIcecasButtonTouched
{
[self.delegate audioPlayerViewPlayFromIcecastSelected:self];
}
-(void) playFromLocalFileButtonTouched -(void) playFromLocalFileButtonTouched
{ {
[self.delegate audioPlayerViewPlayFromLocalFileSelected:self]; [self.delegate audioPlayerViewPlayFromLocalFileSelected:self];

View File

@ -9,7 +9,7 @@
<key>CFBundleExecutable</key> <key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string> <string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key> <key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string> <string>abstractpath.com.${PRODUCT_NAME:rfc1034identifier}</string>
<key>CFBundleInfoDictionaryVersion</key> <key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string> <string>6.0</string>
<key>CFBundleName</key> <key>CFBundleName</key>

View File

@ -5,31 +5,16 @@
"size" : "29x29", "size" : "29x29",
"scale" : "2x" "scale" : "2x"
}, },
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "3x"
},
{ {
"idiom" : "iphone", "idiom" : "iphone",
"size" : "40x40", "size" : "40x40",
"scale" : "2x" "scale" : "2x"
}, },
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "3x"
},
{ {
"idiom" : "iphone", "idiom" : "iphone",
"size" : "60x60", "size" : "60x60",
"scale" : "2x" "scale" : "2x"
}, },
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "3x"
},
{ {
"idiom" : "ipad", "idiom" : "ipad",
"size" : "29x29", "size" : "29x29",

View File

@ -1,6 +0,0 @@
{
"info" : {
"version" : 1,
"author" : "xcode"
}
}

View File

@ -1,31 +1,5 @@
{ {
"images" : [ "images" : [
{
"extent" : "full-screen",
"idiom" : "iphone",
"subtype" : "736h",
"filename" : "TX6sV.png",
"minimum-system-version" : "8.0",
"orientation" : "portrait",
"scale" : "3x"
},
{
"orientation" : "landscape",
"idiom" : "iphone",
"extent" : "full-screen",
"minimum-system-version" : "8.0",
"subtype" : "736h",
"scale" : "3x"
},
{
"extent" : "full-screen",
"idiom" : "iphone",
"subtype" : "667h",
"filename" : "dBEHd.png",
"minimum-system-version" : "8.0",
"orientation" : "portrait",
"scale" : "2x"
},
{ {
"orientation" : "portrait", "orientation" : "portrait",
"idiom" : "iphone", "idiom" : "iphone",
@ -34,12 +8,11 @@
"scale" : "2x" "scale" : "2x"
}, },
{ {
"extent" : "full-screen", "orientation" : "portrait",
"idiom" : "iphone", "idiom" : "iphone",
"subtype" : "retina4", "subtype" : "retina4",
"filename" : "TX6sV-2.png", "extent" : "full-screen",
"minimum-system-version" : "7.0", "minimum-system-version" : "7.0",
"orientation" : "portrait",
"scale" : "2x" "scale" : "2x"
}, },
{ {
@ -69,26 +42,6 @@
"extent" : "full-screen", "extent" : "full-screen",
"minimum-system-version" : "7.0", "minimum-system-version" : "7.0",
"scale" : "2x" "scale" : "2x"
},
{
"orientation" : "portrait",
"idiom" : "iphone",
"extent" : "full-screen",
"scale" : "1x"
},
{
"orientation" : "portrait",
"idiom" : "iphone",
"extent" : "full-screen",
"scale" : "2x"
},
{
"orientation" : "portrait",
"idiom" : "iphone",
"filename" : "TX6sV-1.png",
"extent" : "full-screen",
"subtype" : "retina4",
"scale" : "2x"
} }
], ],
"info" : { "info" : {

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.3 KiB

View File

@ -7,7 +7,7 @@
<key>CFBundleExecutable</key> <key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string> <string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key> <key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string> <string>abstractpath.com.${PRODUCT_NAME:rfc1034identifier}</string>
<key>CFBundleInfoDictionaryVersion</key> <key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string> <string>6.0</string>
<key>CFBundlePackageType</key> <key>CFBundlePackageType</key>

View File

@ -58,7 +58,7 @@
-(void) playFromHTTP -(void) playFromHTTP
{ {
[audioPlayer play:@"http://www.abstractpath.com/files/audiosamples/sample.mp3"]; [audioPlayer play:@"http://fs.bloom.fm/oss/audiosamples/sample.mp3"];
} }
-(void) tick:(NSTimer*)timer -(void) tick:(NSTimer*)timer
@ -72,7 +72,7 @@
CGFloat meterWidth = 0; CGFloat meterWidth = 0;
if (audioPlayer.currentlyPlayingQueueItemId != nil) if (audioPlayer.duration != 0)
{ {
slider.minValue = 0; slider.minValue = 0;
slider.maxValue = audioPlayer.duration; slider.maxValue = audioPlayer.duration;

View File

@ -4,7 +4,7 @@
Inspired by Matt Gallagher's AudioStreamer: Inspired by Matt Gallagher's AudioStreamer:
https://github.com/mattgallagher/AudioStreamer https://github.com/mattgallagher/AudioStreamer
Copyright (c) 2015 Thong Nguyen (tumtumtum@gmail.com). All rights reserved. Copyright (c) 2012 Thong Nguyen (tumtumtum@gmail.com). All rights reserved.
Redistribution and use in source and binary forms, with or without Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met: modification, are permitted provided that the following conditions are met:
@ -15,12 +15,12 @@
documentation and/or other materials provided with the distribution. documentation and/or other materials provided with the distribution.
3. All advertising materials mentioning features or use of this software 3. All advertising materials mentioning features or use of this software
must display the following acknowledgement: must display the following acknowledgement:
This product includes software developed by Thong Nguyen. This product includes software developed by the <organization>.
4. Neither the name of the <organization> nor the 4. Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission. derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THONG NGUYEN ''AS IS'' AND ANY THIS SOFTWARE IS PROVIDED BY <COPYRIGHT HOLDER> ''AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY

View File

@ -2,7 +2,7 @@
StreamingKit (formally Audjustable) is an audio playback and streaming library for iOS and Mac OSX. StreamingKit uses CoreAudio to decompress and playback audio (using hardware or software codecs) whilst providing a clean and simple object-oriented API. StreamingKit (formally Audjustable) is an audio playback and streaming library for iOS and Mac OSX. StreamingKit uses CoreAudio to decompress and playback audio (using hardware or software codecs) whilst providing a clean and simple object-oriented API.
The primary motivation of this project was to decouple the input data sources from the actual player logic in order to allow advanced customizable input handling such as HTTP progressive download based streaming, encryption/decryption, auto-recovery, dynamic-buffering. StreamingKit is the only streaming and playback library that supports dead-easy [gapless playback](https://github.com/tumtumtum/StreamingKit/wiki/Gapless-playback) between audio files of differing formats. The primary motivation of this project was to decouple the input data sources from the actual player logic in order to allow advanced customizable input handling such as HTTP streaming, encryption/decryption, auto-recovery, dynamic-buffering. StreamingKit is the only streaming and playback library that supports dead-easy [gapless playback](https://github.com/tumtumtum/StreamingKit/wiki/Gapless-playback) between audio files of differing formats.
## Main Features ## Main Features
@ -11,7 +11,7 @@ The primary motivation of this project was to decouple the input data sources fr
* Easy to read source. * Easy to read source.
* Carefully multi-threaded to provide a responsive API that won't block your UI thread nor starve the audio buffers. * Carefully multi-threaded to provide a responsive API that won't block your UI thread nor starve the audio buffers.
* Buffered and gapless playback between all format types. * Buffered and gapless playback between all format types.
* Easy to implement audio data sources (Local, HTTP, AutoRecoveringHTTP DataSources are provided). * Easy to implement audio data sources (Local, HTTP, AutoRecoveryingHTTP DataSources are provided).
* Easy to extend DataSource to support adaptive buffering, encryption, etc. * Easy to extend DataSource to support adaptive buffering, encryption, etc.
* Optimised for low CPU/battery usage (0% - 1% CPU usage when streaming). * Optimised for low CPU/battery usage (0% - 1% CPU usage when streaming).
* Optimised for linear data sources. Random access sources are required only for seeking. * Optimised for linear data sources. Random access sources are required only for seeking.
@ -34,7 +34,7 @@ There are two main classes. The `STKDataSource` class which is the abstract bas
```objective-c ```objective-c
STKAudioPlayer* audioPlayer = [[STKAudioPlayer alloc] init]; STKAudioPlayer* audioPlayer = [[STKAudioPlayer alloc] init];
[audioPlayer play:@"http://www.abstractpath.com/files/audiosamples/sample.mp3"]; [audioPlayer play:@"http://fs.bloom.fm/oss/audiosamples/sample.mp3"];
``` ```
### Gapless playback ### Gapless playback
@ -42,8 +42,8 @@ STKAudioPlayer* audioPlayer = [[STKAudioPlayer alloc] init];
```objective-c ```objective-c
STKAudioPlayer* audioPlayer = [[STKAudioPlayer alloc] init]; STKAudioPlayer* audioPlayer = [[STKAudioPlayer alloc] init];
[audioPlayer queue:@"http://www.abstractpath.com/files/audiosamples/sample.mp3"]; [audioPlayer queue:@"http://fs.bloom.fm/oss/audiosamples/sample.mp3"];
[audioPlayer queue:@"http://www.abstractpath.com/files/audiosamples/airplane.aac"]; [audioPlayer queue:@"http://fs.bloom.fm/oss/audiosamples/airplane.aac"];
``` ```

View File

@ -1,6 +1,6 @@
Pod::Spec.new do |s| Pod::Spec.new do |s|
s.name = "StreamingKit" s.name = "StreamingKit"
s.version = "0.1.29" s.version = "0.1.19"
s.summary = "A fast and extensible audio streamer for iOS and OSX with support for gapless playback and custom (non-HTTP) sources." s.summary = "A fast and extensible audio streamer for iOS and OSX with support for gapless playback and custom (non-HTTP) sources."
s.homepage = "https://github.com/tumtumtum/StreamingKit/" s.homepage = "https://github.com/tumtumtum/StreamingKit/"
s.license = 'MIT' s.license = 'MIT'

View File

@ -10,29 +10,29 @@
<string>StreamingKit</string> <string>StreamingKit</string>
<key>IDESourceControlProjectOriginsDictionary</key> <key>IDESourceControlProjectOriginsDictionary</key>
<dict> <dict>
<key>3E9414865BAE5433092B9D136FFC1F054EA505C2</key> <key>DD310C30-B3D0-4BD7-9565-9F29F09CC4F8</key>
<string>https://github.com/tumtumtum/StreamingKit.git</string> <string>https://github.com/tumtumtum/StreamingKit.git</string>
</dict> </dict>
<key>IDESourceControlProjectPath</key> <key>IDESourceControlProjectPath</key>
<string>StreamingKit.xcworkspace</string> <string>StreamingKit.xcworkspace</string>
<key>IDESourceControlProjectRelativeInstallPathDictionary</key> <key>IDESourceControlProjectRelativeInstallPathDictionary</key>
<dict> <dict>
<key>3E9414865BAE5433092B9D136FFC1F054EA505C2</key> <key>DD310C30-B3D0-4BD7-9565-9F29F09CC4F8</key>
<string>..</string> <string>..</string>
</dict> </dict>
<key>IDESourceControlProjectURL</key> <key>IDESourceControlProjectURL</key>
<string>https://github.com/tumtumtum/StreamingKit.git</string> <string>https://github.com/tumtumtum/StreamingKit.git</string>
<key>IDESourceControlProjectVersion</key> <key>IDESourceControlProjectVersion</key>
<integer>111</integer> <integer>110</integer>
<key>IDESourceControlProjectWCCIdentifier</key> <key>IDESourceControlProjectWCCIdentifier</key>
<string>3E9414865BAE5433092B9D136FFC1F054EA505C2</string> <string>DD310C30-B3D0-4BD7-9565-9F29F09CC4F8</string>
<key>IDESourceControlProjectWCConfigurations</key> <key>IDESourceControlProjectWCConfigurations</key>
<array> <array>
<dict> <dict>
<key>IDESourceControlRepositoryExtensionIdentifierKey</key> <key>IDESourceControlRepositoryExtensionIdentifierKey</key>
<string>public.vcs.git</string> <string>public.vcs.git</string>
<key>IDESourceControlWCCIdentifierKey</key> <key>IDESourceControlWCCIdentifierKey</key>
<string>3E9414865BAE5433092B9D136FFC1F054EA505C2</string> <string>DD310C30-B3D0-4BD7-9565-9F29F09CC4F8</string>
<key>IDESourceControlWCCName</key> <key>IDESourceControlWCCName</key>
<string>StreamingKit</string> <string>StreamingKit</string>
</dict> </dict>

View File

@ -7,16 +7,8 @@
objects = { objects = {
/* Begin PBXBuildFile section */ /* Begin PBXBuildFile section */
5B949CD21A1140E4005675A0 /* STKAudioPlayer.h in Headers */ = {isa = PBXBuildFile; fileRef = A1E7C4F1188D5E550010896F /* STKAudioPlayer.h */; settings = {ATTRIBUTES = (Public, ); }; }; A1682FA318B3903900F29FEC /* STKBufferingDataSource.m in Sources */ = {isa = PBXBuildFile; fileRef = A1682FA218B3903900F29FEC /* STKBufferingDataSource.m */; };
5B949CD31A1140E4005675A0 /* STKAutoRecoveringHTTPDataSource.h in Headers */ = {isa = PBXBuildFile; fileRef = A1E7C4F3188D5E550010896F /* STKAutoRecoveringHTTPDataSource.h */; settings = {ATTRIBUTES = (Public, ); }; }; A168C6F118BB67DC003D170D /* STKBufferChunk.m in Sources */ = {isa = PBXBuildFile; fileRef = A168C6F018BB67DC003D170D /* STKBufferChunk.m */; };
5B949CD41A1140E4005675A0 /* STKCoreFoundationDataSource.h in Headers */ = {isa = PBXBuildFile; fileRef = A1E7C4F5188D5E550010896F /* STKCoreFoundationDataSource.h */; settings = {ATTRIBUTES = (Public, ); }; };
5B949CD51A1140E4005675A0 /* STKDataSource.h in Headers */ = {isa = PBXBuildFile; fileRef = A1E7C4F7188D5E550010896F /* STKDataSource.h */; settings = {ATTRIBUTES = (Public, ); }; };
5B949CD61A1140E4005675A0 /* STKDataSourceWrapper.h in Headers */ = {isa = PBXBuildFile; fileRef = A1E7C4F9188D5E550010896F /* STKDataSourceWrapper.h */; settings = {ATTRIBUTES = (Public, ); }; };
5B949CD71A1140E4005675A0 /* STKHTTPDataSource.h in Headers */ = {isa = PBXBuildFile; fileRef = A1E7C4FB188D5E550010896F /* STKHTTPDataSource.h */; settings = {ATTRIBUTES = (Public, ); }; };
5B949CD81A1140E4005675A0 /* STKLocalFileDataSource.h in Headers */ = {isa = PBXBuildFile; fileRef = A1E7C4FD188D5E550010896F /* STKLocalFileDataSource.h */; settings = {ATTRIBUTES = (Public, ); }; };
5B949CD91A1140E4005675A0 /* STKQueueEntry.h in Headers */ = {isa = PBXBuildFile; fileRef = A1BF65D0189A6582004DD08C /* STKQueueEntry.h */; settings = {ATTRIBUTES = (Public, ); }; };
95F80F4E1C68EE2300DB24B3 /* STKFloatConverter.h in Headers */ = {isa = PBXBuildFile; fileRef = 95F80F4C1C68EE2300DB24B3 /* STKFloatConverter.h */; };
95F80F4F1C68EE2300DB24B3 /* STKFloatConverter.m in Sources */ = {isa = PBXBuildFile; fileRef = 95F80F4D1C68EE2300DB24B3 /* STKFloatConverter.m */; };
A1A4996B189E744400E2A2E2 /* Cocoa.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A1A4996A189E744400E2A2E2 /* Cocoa.framework */; }; A1A4996B189E744400E2A2E2 /* Cocoa.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A1A4996A189E744400E2A2E2 /* Cocoa.framework */; };
A1A49975189E744500E2A2E2 /* StreamingKitMac.m in Sources */ = {isa = PBXBuildFile; fileRef = A1A49974189E744500E2A2E2 /* StreamingKitMac.m */; }; A1A49975189E744500E2A2E2 /* StreamingKitMac.m in Sources */ = {isa = PBXBuildFile; fileRef = A1A49974189E744500E2A2E2 /* StreamingKitMac.m */; };
A1A4997B189E744500E2A2E2 /* XCTest.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A1E7C4D9188D57F60010896F /* XCTest.framework */; }; A1A4997B189E744500E2A2E2 /* XCTest.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A1E7C4D9188D57F60010896F /* XCTest.framework */; };
@ -93,8 +85,10 @@
/* End PBXCopyFilesBuildPhase section */ /* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */ /* Begin PBXFileReference section */
95F80F4C1C68EE2300DB24B3 /* STKFloatConverter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = STKFloatConverter.h; sourceTree = "<group>"; }; A1682FA118B3903900F29FEC /* STKBufferingDataSource.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = STKBufferingDataSource.h; sourceTree = "<group>"; };
95F80F4D1C68EE2300DB24B3 /* STKFloatConverter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = STKFloatConverter.m; sourceTree = "<group>"; }; A1682FA218B3903900F29FEC /* STKBufferingDataSource.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = STKBufferingDataSource.m; sourceTree = "<group>"; };
A168C6EF18BB67DC003D170D /* STKBufferChunk.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = STKBufferChunk.h; sourceTree = "<group>"; };
A168C6F018BB67DC003D170D /* STKBufferChunk.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = STKBufferChunk.m; sourceTree = "<group>"; };
A1A49969189E744400E2A2E2 /* libStreamingKitMac.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libStreamingKitMac.a; sourceTree = BUILT_PRODUCTS_DIR; }; A1A49969189E744400E2A2E2 /* libStreamingKitMac.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libStreamingKitMac.a; sourceTree = BUILT_PRODUCTS_DIR; };
A1A4996A189E744400E2A2E2 /* Cocoa.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Cocoa.framework; path = Library/Frameworks/Cocoa.framework; sourceTree = DEVELOPER_DIR; }; A1A4996A189E744400E2A2E2 /* Cocoa.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Cocoa.framework; path = Library/Frameworks/Cocoa.framework; sourceTree = DEVELOPER_DIR; };
A1A4996D189E744500E2A2E2 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; }; A1A4996D189E744500E2A2E2 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; };
@ -274,12 +268,14 @@
A1E7C4CD188D57F50010896F /* StreamingKit */ = { A1E7C4CD188D57F50010896F /* StreamingKit */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
95F80F4C1C68EE2300DB24B3 /* STKFloatConverter.h */,
95F80F4D1C68EE2300DB24B3 /* STKFloatConverter.m */,
A1E7C4F1188D5E550010896F /* STKAudioPlayer.h */, A1E7C4F1188D5E550010896F /* STKAudioPlayer.h */,
A1E7C4F2188D5E550010896F /* STKAudioPlayer.m */, A1E7C4F2188D5E550010896F /* STKAudioPlayer.m */,
A1E7C4F3188D5E550010896F /* STKAutoRecoveringHTTPDataSource.h */, A1E7C4F3188D5E550010896F /* STKAutoRecoveringHTTPDataSource.h */,
A1E7C4F4188D5E550010896F /* STKAutoRecoveringHTTPDataSource.m */, A1E7C4F4188D5E550010896F /* STKAutoRecoveringHTTPDataSource.m */,
A168C6EF18BB67DC003D170D /* STKBufferChunk.h */,
A168C6F018BB67DC003D170D /* STKBufferChunk.m */,
A1682FA118B3903900F29FEC /* STKBufferingDataSource.h */,
A1682FA218B3903900F29FEC /* STKBufferingDataSource.m */,
A1E7C4F5188D5E550010896F /* STKCoreFoundationDataSource.h */, A1E7C4F5188D5E550010896F /* STKCoreFoundationDataSource.h */,
A1E7C4F6188D5E550010896F /* STKCoreFoundationDataSource.m */, A1E7C4F6188D5E550010896F /* STKCoreFoundationDataSource.m */,
A1E7C4F7188D5E550010896F /* STKDataSource.h */, A1E7C4F7188D5E550010896F /* STKDataSource.h */,
@ -328,22 +324,6 @@
/* End PBXGroup section */ /* End PBXGroup section */
/* Begin PBXHeadersBuildPhase section */ /* Begin PBXHeadersBuildPhase section */
5B949CD11A1140CF005675A0 /* Headers */ = {
isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647;
files = (
5B949CD21A1140E4005675A0 /* STKAudioPlayer.h in Headers */,
5B949CD31A1140E4005675A0 /* STKAutoRecoveringHTTPDataSource.h in Headers */,
5B949CD41A1140E4005675A0 /* STKCoreFoundationDataSource.h in Headers */,
5B949CD51A1140E4005675A0 /* STKDataSource.h in Headers */,
5B949CD61A1140E4005675A0 /* STKDataSourceWrapper.h in Headers */,
95F80F4E1C68EE2300DB24B3 /* STKFloatConverter.h in Headers */,
5B949CD71A1140E4005675A0 /* STKHTTPDataSource.h in Headers */,
5B949CD81A1140E4005675A0 /* STKLocalFileDataSource.h in Headers */,
5B949CD91A1140E4005675A0 /* STKQueueEntry.h in Headers */,
);
runOnlyForDeploymentPostprocessing = 0;
};
A1A49967189E744400E2A2E2 /* Headers */ = { A1A49967189E744400E2A2E2 /* Headers */ = {
isa = PBXHeadersBuildPhase; isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
@ -396,7 +376,6 @@
A1E7C4C4188D57F50010896F /* Sources */, A1E7C4C4188D57F50010896F /* Sources */,
A1E7C4C5188D57F50010896F /* Frameworks */, A1E7C4C5188D57F50010896F /* Frameworks */,
A1E7C4C6188D57F50010896F /* CopyFiles */, A1E7C4C6188D57F50010896F /* CopyFiles */,
5B949CD11A1140CF005675A0 /* Headers */,
); );
buildRules = ( buildRules = (
); );
@ -432,7 +411,7 @@
isa = PBXProject; isa = PBXProject;
attributes = { attributes = {
CLASSPREFIX = STK; CLASSPREFIX = STK;
LastUpgradeCheck = 0710; LastUpgradeCheck = 0510;
ORGANIZATIONNAME = "Thong Nguyen"; ORGANIZATIONNAME = "Thong Nguyen";
}; };
buildConfigurationList = A1E7C4C3188D57F50010896F /* Build configuration list for PBXProject "StreamingKit" */; buildConfigurationList = A1E7C4C3188D57F50010896F /* Build configuration list for PBXProject "StreamingKit" */;
@ -532,9 +511,10 @@
A1E7C505188D5E550010896F /* STKLocalFileDataSource.m in Sources */, A1E7C505188D5E550010896F /* STKLocalFileDataSource.m in Sources */,
A1BF65D2189A6582004DD08C /* STKQueueEntry.m in Sources */, A1BF65D2189A6582004DD08C /* STKQueueEntry.m in Sources */,
A1E7C504188D5E550010896F /* STKHTTPDataSource.m in Sources */, A1E7C504188D5E550010896F /* STKHTTPDataSource.m in Sources */,
95F80F4F1C68EE2300DB24B3 /* STKFloatConverter.m in Sources */,
A1E7C503188D5E550010896F /* STKDataSourceWrapper.m in Sources */, A1E7C503188D5E550010896F /* STKDataSourceWrapper.m in Sources */,
A1682FA318B3903900F29FEC /* STKBufferingDataSource.m in Sources */,
A1E7C502188D5E550010896F /* STKDataSource.m in Sources */, A1E7C502188D5E550010896F /* STKDataSource.m in Sources */,
A168C6F118BB67DC003D170D /* STKBufferChunk.m in Sources */,
A1BF65D5189A65C6004DD08C /* NSMutableArray+STKAudioPlayer.m in Sources */, A1BF65D5189A65C6004DD08C /* NSMutableArray+STKAudioPlayer.m in Sources */,
A1E7C500188D5E550010896F /* STKAutoRecoveringHTTPDataSource.m in Sources */, A1E7C500188D5E550010896F /* STKAutoRecoveringHTTPDataSource.m in Sources */,
); );
@ -586,7 +566,6 @@
A1A49988189E744500E2A2E2 /* Debug */ = { A1A49988189E744500E2A2E2 /* Debug */ = {
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
buildSettings = { buildSettings = {
COMBINE_HIDPI_IMAGES = YES;
FRAMEWORK_SEARCH_PATHS = ( FRAMEWORK_SEARCH_PATHS = (
"$(inherited)", "$(inherited)",
"$(DEVELOPER_FRAMEWORKS_DIR)", "$(DEVELOPER_FRAMEWORKS_DIR)",
@ -600,7 +579,6 @@
); );
MACOSX_DEPLOYMENT_TARGET = ""; MACOSX_DEPLOYMENT_TARGET = "";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
PUBLIC_HEADERS_FOLDER_PATH = include/StreamingKit;
SDKROOT = macosx; SDKROOT = macosx;
}; };
name = Debug; name = Debug;
@ -608,7 +586,6 @@
A1A49989189E744500E2A2E2 /* Release */ = { A1A49989189E744500E2A2E2 /* Release */ = {
isa = XCBuildConfiguration; isa = XCBuildConfiguration;
buildSettings = { buildSettings = {
COMBINE_HIDPI_IMAGES = YES;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
FRAMEWORK_SEARCH_PATHS = ( FRAMEWORK_SEARCH_PATHS = (
"$(inherited)", "$(inherited)",
@ -619,7 +596,6 @@
GCC_PREFIX_HEADER = "StreamingKitMac/StreamingKitMac-Prefix.pch"; GCC_PREFIX_HEADER = "StreamingKitMac/StreamingKitMac-Prefix.pch";
MACOSX_DEPLOYMENT_TARGET = ""; MACOSX_DEPLOYMENT_TARGET = "";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
PUBLIC_HEADERS_FOLDER_PATH = include/StreamingKit;
SDKROOT = macosx; SDKROOT = macosx;
}; };
name = Release; name = Release;
@ -641,7 +617,6 @@
); );
INFOPLIST_FILE = "StreamingKitMacTests/StreamingKitMacTests-Info.plist"; INFOPLIST_FILE = "StreamingKitMacTests/StreamingKitMacTests-Info.plist";
MACOSX_DEPLOYMENT_TARGET = 10.8; MACOSX_DEPLOYMENT_TARGET = 10.8;
PRODUCT_BUNDLE_IDENTIFIER = "com.abstractpath.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = macosx; SDKROOT = macosx;
WRAPPER_EXTENSION = xctest; WRAPPER_EXTENSION = xctest;
@ -662,7 +637,6 @@
GCC_PREFIX_HEADER = "StreamingKitMac/StreamingKitMac-Prefix.pch"; GCC_PREFIX_HEADER = "StreamingKitMac/StreamingKitMac-Prefix.pch";
INFOPLIST_FILE = "StreamingKitMacTests/StreamingKitMacTests-Info.plist"; INFOPLIST_FILE = "StreamingKitMacTests/StreamingKitMacTests-Info.plist";
MACOSX_DEPLOYMENT_TARGET = 10.8; MACOSX_DEPLOYMENT_TARGET = 10.8;
PRODUCT_BUNDLE_IDENTIFIER = "com.abstractpath.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = macosx; SDKROOT = macosx;
WRAPPER_EXTENSION = xctest; WRAPPER_EXTENSION = xctest;
@ -686,7 +660,6 @@
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO; COPY_PHASE_STRIP = NO;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99; GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO; GCC_DYNAMIC_NO_PIC = NO;
GCC_OPTIMIZATION_LEVEL = 0; GCC_OPTIMIZATION_LEVEL = 0;
@ -701,7 +674,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES;
GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES; GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 4.3; IPHONEOS_DEPLOYMENT_TARGET = 6.0;
ONLY_ACTIVE_ARCH = YES; ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos; SDKROOT = iphoneos;
}; };
@ -732,7 +705,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES;
GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES; GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 4.3; IPHONEOS_DEPLOYMENT_TARGET = 6.0;
SDKROOT = iphoneos; SDKROOT = iphoneos;
VALIDATE_PRODUCT = YES; VALIDATE_PRODUCT = YES;
}; };
@ -750,7 +723,6 @@
GCC_PREFIX_HEADER = "StreamingKit/StreamingKit-Prefix.pch"; GCC_PREFIX_HEADER = "StreamingKit/StreamingKit-Prefix.pch";
OTHER_LDFLAGS = "-ObjC"; OTHER_LDFLAGS = "-ObjC";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
PUBLIC_HEADERS_FOLDER_PATH = include/StreamingKit;
SKIP_INSTALL = YES; SKIP_INSTALL = YES;
}; };
name = Debug; name = Debug;
@ -767,7 +739,6 @@
GCC_PREFIX_HEADER = "StreamingKit/StreamingKit-Prefix.pch"; GCC_PREFIX_HEADER = "StreamingKit/StreamingKit-Prefix.pch";
OTHER_LDFLAGS = "-ObjC"; OTHER_LDFLAGS = "-ObjC";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
PUBLIC_HEADERS_FOLDER_PATH = include/StreamingKit;
SKIP_INSTALL = YES; SKIP_INSTALL = YES;
}; };
name = Release; name = Release;
@ -787,7 +758,6 @@
"$(inherited)", "$(inherited)",
); );
INFOPLIST_FILE = "StreamingKitTests/StreamingKitTests-Info.plist"; INFOPLIST_FILE = "StreamingKitTests/StreamingKitTests-Info.plist";
PRODUCT_BUNDLE_IDENTIFIER = "abstractpath.com.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
WRAPPER_EXTENSION = xctest; WRAPPER_EXTENSION = xctest;
}; };
@ -804,7 +774,6 @@
GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PRECOMPILE_PREFIX_HEADER = YES;
GCC_PREFIX_HEADER = "StreamingKit/StreamingKit-Prefix.pch"; GCC_PREFIX_HEADER = "StreamingKit/StreamingKit-Prefix.pch";
INFOPLIST_FILE = "StreamingKitTests/StreamingKitTests-Info.plist"; INFOPLIST_FILE = "StreamingKitTests/StreamingKitTests-Info.plist";
PRODUCT_BUNDLE_IDENTIFIER = "abstractpath.com.${PRODUCT_NAME:rfc1034identifier}";
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
WRAPPER_EXTENSION = xctest; WRAPPER_EXTENSION = xctest;
}; };

View File

@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>

View File

@ -8,14 +8,10 @@
#import <Foundation/Foundation.h> #import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface NSMutableArray (STKAudioPlayer) @interface NSMutableArray (STKAudioPlayer)
-(void) enqueue:(id)obj; -(void) enqueue:(id)obj;
-(void) skipQueue:(id)obj; -(void) skipQueue:(id)obj;
-(void) skipQueueWithQueue:(NSMutableArray*)queue; -(void) skipQueueWithQueue:(NSMutableArray*)queue;
-(nullable id) dequeue; -(id) dequeue;
-(nullable id) peek; -(id) peek;
@end @end
NS_ASSUME_NONNULL_END

View File

@ -44,9 +44,7 @@
#include "UIKit/UIApplication.h" #include "UIKit/UIApplication.h"
#endif #endif
NS_ASSUME_NONNULL_BEGIN typedef enum
typedef NS_OPTIONS(NSInteger, STKAudioPlayerState)
{ {
STKAudioPlayerStateReady, STKAudioPlayerStateReady,
STKAudioPlayerStateRunning = 1, STKAudioPlayerStateRunning = 1,
@ -56,9 +54,10 @@ typedef NS_OPTIONS(NSInteger, STKAudioPlayerState)
STKAudioPlayerStateStopped = (1 << 4), STKAudioPlayerStateStopped = (1 << 4),
STKAudioPlayerStateError = (1 << 5), STKAudioPlayerStateError = (1 << 5),
STKAudioPlayerStateDisposed = (1 << 6) STKAudioPlayerStateDisposed = (1 << 6)
}; }
STKAudioPlayerState;
typedef NS_ENUM(NSInteger, STKAudioPlayerStopReason) typedef enum
{ {
STKAudioPlayerStopReasonNone = 0, STKAudioPlayerStopReasonNone = 0,
STKAudioPlayerStopReasonEof, STKAudioPlayerStopReasonEof,
@ -66,9 +65,10 @@ typedef NS_ENUM(NSInteger, STKAudioPlayerStopReason)
STKAudioPlayerStopReasonPendingNext, STKAudioPlayerStopReasonPendingNext,
STKAudioPlayerStopReasonDisposed, STKAudioPlayerStopReasonDisposed,
STKAudioPlayerStopReasonError = 0xffff STKAudioPlayerStopReasonError = 0xffff
}; }
STKAudioPlayerStopReason;
typedef NS_ENUM(NSInteger, STKAudioPlayerErrorCode) typedef enum
{ {
STKAudioPlayerErrorNone = 0, STKAudioPlayerErrorNone = 0,
STKAudioPlayerErrorDataSource, STKAudioPlayerErrorDataSource,
@ -77,13 +77,9 @@ typedef NS_ENUM(NSInteger, STKAudioPlayerErrorCode)
STKAudioPlayerErrorCodecError, STKAudioPlayerErrorCodecError,
STKAudioPlayerErrorDataNotFound, STKAudioPlayerErrorDataNotFound,
STKAudioPlayerErrorOther = 0xffff STKAudioPlayerErrorOther = 0xffff
}; }
STKAudioPlayerErrorCode;
///
/// Options to initiailise the Audioplayer with.
/// By default if you set buffer size or seconds to 0, the non-zero default will be used
/// If you would like to disable the buffer option completely set to STK_DISABLE_BUFFER
///
typedef struct typedef struct
{ {
/// If YES then seeking a track will cause all pending items to be flushed from the queue /// If YES then seeking a track will cause all pending items to be flushed from the queue
@ -95,7 +91,7 @@ typedef struct
/// The size of the internal I/O read buffer. This data in this buffer is transient and does not need to be larger. /// The size of the internal I/O read buffer. This data in this buffer is transient and does not need to be larger.
UInt32 readBufferSize; UInt32 readBufferSize;
/// The size of the decompressed buffer (Default is 10 seconds which uses about 1.7MB of RAM) /// The size of the decompressed buffer (Default is 10 seconds which uses about 1.7MB of RAM)
float bufferSizeInSeconds; UInt32 bufferSizeInSeconds;
/// Number of seconds of decompressed audio is required before playback first starts for each item (Default is 0.5 seconds. Must be larger than bufferSizeInSeconds) /// Number of seconds of decompressed audio is required before playback first starts for each item (Default is 0.5 seconds. Must be larger than bufferSizeInSeconds)
Float32 secondsRequiredToStartPlaying; Float32 secondsRequiredToStartPlaying;
/// Seconds after a seek is performed before data needs to come in (after which the state will change to playing/buffering) /// Seconds after a seek is performed before data needs to come in (after which the state will change to playing/buffering)
@ -105,9 +101,7 @@ typedef struct
} }
STKAudioPlayerOptions; STKAudioPlayerOptions;
#define STK_DISABLE_BUFFER (0xffffffff) typedef void(^STKFrameFilter)(UInt32 channelsPerFrame, UInt32 bytesPerFrame, UInt32 frameCount, void* frames);
typedef void(^STKFrameFilter)(UInt32 channelsPerFrame, UInt32 bytesPerFrame, UInt32 frameCount, float* frames);
@interface STKFrameFilterEntry : NSObject @interface STKFrameFilterEntry : NSObject
@property (readonly) NSString* name; @property (readonly) NSString* name;
@ -135,8 +129,6 @@ typedef void(^STKFrameFilter)(UInt32 channelsPerFrame, UInt32 bytesPerFrame, UIn
/// Raised when items queued items are cleared (usually because of a call to play, setDataSource or stop) /// Raised when items queued items are cleared (usually because of a call to play, setDataSource or stop)
-(void) audioPlayer:(STKAudioPlayer*)audioPlayer didCancelQueuedItems:(NSArray*)queuedItems; -(void) audioPlayer:(STKAudioPlayer*)audioPlayer didCancelQueuedItems:(NSArray*)queuedItems;
-(void) plotGraphWithBuffer:(float*)buffer andLength:(UInt32)count;
@end @end
@interface STKAudioPlayer : NSObject<STKDataSourceDelegate> @interface STKAudioPlayer : NSObject<STKDataSourceDelegate>
@ -155,13 +147,13 @@ typedef void(^STKFrameFilter)(UInt32 channelsPerFrame, UInt32 bytesPerFrame, UIn
/// Enables or disables the EQ /// Enables or disables the EQ
@property (readwrite) BOOL equalizerEnabled; @property (readwrite) BOOL equalizerEnabled;
/// Returns an array of STKFrameFilterEntry objects representing the filters currently in use /// Returns an array of STKFrameFilterEntry objects representing the filters currently in use
@property (readonly, nullable) NSArray* frameFilters; @property (readonly) NSArray* frameFilters;
/// Returns the items pending to be played (includes buffering and upcoming items but does not include the current item) /// Returns the items pending to be played (includes buffering and upcoming items but does not include the current item)
@property (readonly) NSArray* pendingQueue; @property (readonly) NSArray* pendingQueue;
/// The number of items pending to be played (includes buffering and upcoming items but does not include the current item) /// The number of items pending to be played (includes buffering and upcoming items but does not include the current item)
@property (readonly) NSUInteger pendingQueueCount; @property (readonly) NSUInteger pendingQueueCount;
/// Gets the most recently queued item that is still pending to play /// Gets the most recently queued item that is still pending to play
@property (readonly, nullable) NSObject* mostRecentlyQueuedStillPendingItem; @property (readonly) NSObject* mostRecentlyQueuedStillPendingItem;
/// Gets the current state of the player /// Gets the current state of the player
@property (readwrite) STKAudioPlayerState state; @property (readwrite) STKAudioPlayerState state;
/// Gets the options provided to the player on startup /// Gets the options provided to the player on startup
@ -178,10 +170,10 @@ typedef void(^STKFrameFilter)(UInt32 channelsPerFrame, UInt32 bytesPerFrame, UIn
+(STKDataSource*) dataSourceFromURL:(NSURL*)url; +(STKDataSource*) dataSourceFromURL:(NSURL*)url;
/// Initializes a new STKAudioPlayer with the default options /// Initializes a new STKAudioPlayer with the default options
-(instancetype) init; -(id) init;
/// Initializes a new STKAudioPlayer with the given options /// Initializes a new STKAudioPlayer with the given options
-(instancetype) initWithOptions:(STKAudioPlayerOptions)optionsIn; -(id) initWithOptions:(STKAudioPlayerOptions)optionsIn;
/// Plays an item from the given URL string (all pending queued items are removed). /// Plays an item from the given URL string (all pending queued items are removed).
/// The NSString is used as the queue item ID /// The NSString is used as the queue item ID
@ -258,19 +250,17 @@ typedef void(^STKFrameFilter)(UInt32 channelsPerFrame, UInt32 bytesPerFrame, UIn
/// Appends a frame filter with the given name and filter block just after the filter with the given name. /// Appends a frame filter with the given name and filter block just after the filter with the given name.
/// If the given name is nil, the filter will be inserted at the beginning of the filter change /// If the given name is nil, the filter will be inserted at the beginning of the filter change
-(void) addFrameFilterWithName:(NSString*)name afterFilterWithName:(nullable NSString*)afterFilterWithName block:(STKFrameFilter)block; -(void) addFrameFilterWithName:(NSString*)name afterFilterWithName:(NSString*)afterFilterWithName block:(STKFrameFilter)block;
/// Reads the peak power in decibals for the given channel (0 or 1). /// Reads the peak power in decibals for the given channel (0 or 1).
/// Return values are between -60 (low) and 0 (high). /// Return values are between -60 (low) and 0 (high).
//-(float) peakPowerInDecibelsForChannel:(NSUInteger)channelNumber; -(float) peakPowerInDecibelsForChannel:(NSUInteger)channelNumber;
/// Reads the average power in decibals for the given channel (0 or 1) /// Reads the average power in decibals for the given channel (0 or 1)
/// Return values are between -60 (low) and 0 (high). /// Return values are between -60 (low) and 0 (high).
//-(float) averagePowerInDecibelsForChannel:(NSUInteger)channelNumber; -(float) averagePowerInDecibelsForChannel:(NSUInteger)channelNumber;
/// Sets the gain value (from -96 low to +24 high) for an equalizer band (0 based index) /// Sets the gain value (from -96 low to +24 high) for an equalizer band (0 based index)
-(void) setGain:(float)gain forEqualizerBand:(int)bandIndex; -(void) setGain:(float)gain forEqualizerBand:(int)bandIndex;
@end @end
NS_ASSUME_NONNULL_END

505
StreamingKit/StreamingKit/STKAudioPlayer.m Executable file → Normal file
View File

@ -41,32 +41,24 @@
#import "NSMutableArray+STKAudioPlayer.h" #import "NSMutableArray+STKAudioPlayer.h"
#import "libkern/OSAtomic.h" #import "libkern/OSAtomic.h"
#import <float.h> #import <float.h>
#import "STKFloatConverter.h"
#ifndef DBL_MAX
#define DBL_MAX 1.7976931348623157e+308
#endif
#pragma mark Defines #pragma mark Defines
#define kOutputBus 0 #define kOutputBus 0
#define kInputBus 1 #define kInputBus 1
//#define STK_DBMIN (-60) #define STK_DBMIN (-60)
//#define STK_DBOFFSET (-74.0) #define STK_DBOFFSET (-74.0)
#define STK_LOWPASSFILTERTIMESLICE (0.0005) #define STK_LOWPASSFILTERTIMESLICE (0.0005)
#define STK_DEFAULT_PCM_BUFFER_SIZE_IN_SECONDS (0.1) #define STK_DEFAULT_PCM_BUFFER_SIZE_IN_SECONDS (10)
#define STK_DEFAULT_SECONDS_REQUIRED_TO_START_PLAYING (1) #define STK_DEFAULT_SECONDS_REQUIRED_TO_START_PLAYING (1)
#define STK_DEFAULT_SECONDS_REQUIRED_TO_START_PLAYING_AFTER_BUFFER_UNDERRUN (7.5) #define STK_DEFAULT_SECONDS_REQUIRED_TO_START_PLAYING_AFTER_BUFFER_UNDERRUN (7.5)
#define STK_MAX_COMPRESSED_PACKETS_FOR_BITRATE_CALCULATION (4096) #define STK_MAX_COMPRESSED_PACKETS_FOR_BITRATE_CALCULATION (4096)
#define STK_DEFAULT_READ_BUFFER_SIZE (128 * 1024) #define STK_DEFAULT_READ_BUFFER_SIZE (64 * 1024)
#define STK_DEFAULT_PACKET_BUFFER_SIZE (2048) #define STK_DEFAULT_PACKET_BUFFER_SIZE (2048)
#define STK_DEFAULT_GRACE_PERIOD_AFTER_SEEK_SECONDS (0.5) #define STK_DEFAULT_GRACE_PERIOD_AFTER_SEEK_SECONDS (0.5)
#define OSSTATUS_PRINTF_PLACEHOLDER @"%c%c%c%c"
#define OSSTATUS_PRINTF_VALUE(status) (char)(((status) >> 24) & 0xFF), (char)(((status) >> 16) & 0xFF), (char)(((status) >> 8) & 0xFF), (char)((status) & 0xFF)
#define LOGINFO(x) [self logInfo:[NSString stringWithFormat:@"%s %@", sel_getName(_cmd), x]]; #define LOGINFO(x) [self logInfo:[NSString stringWithFormat:@"%s %@", sel_getName(_cmd), x]];
static void PopulateOptionsWithDefault(STKAudioPlayerOptions* options) static void PopulateOptionsWithDefault(STKAudioPlayerOptions* options)
@ -97,34 +89,6 @@ static void PopulateOptionsWithDefault(STKAudioPlayerOptions* options)
} }
} }
static void NormalizeDisabledBuffers(STKAudioPlayerOptions* options)
{
if (options->bufferSizeInSeconds == STK_DISABLE_BUFFER)
{
options->bufferSizeInSeconds = 0;
}
if (options->readBufferSize == STK_DISABLE_BUFFER)
{
options->readBufferSize = 0;
}
if (options->secondsRequiredToStartPlaying == STK_DISABLE_BUFFER)
{
options->secondsRequiredToStartPlaying = 0;
}
if (options->secondsRequiredToStartPlayingAfterBufferUnderun == STK_DISABLE_BUFFER)
{
options->secondsRequiredToStartPlayingAfterBufferUnderun = 0;
}
if (options->gracePeriodAfterSeekInSeconds == STK_DISABLE_BUFFER)
{
options->gracePeriodAfterSeekInSeconds = 0;
}
}
#define CHECK_STATUS_AND_REPORT(call) \ #define CHECK_STATUS_AND_REPORT(call) \
if ((status = (call))) \ if ((status = (call))) \
{ \ { \
@ -174,7 +138,7 @@ STKAudioPlayerInternalState;
@end @end
@implementation STKFrameFilterEntry @implementation STKFrameFilterEntry
-(instancetype) initWithFilter:(STKFrameFilter)filterIn andName:(NSString*)nameIn -(id) initWithFilter:(STKFrameFilter)filterIn andName:(NSString*)nameIn
{ {
if (self = [super init]) if (self = [super init])
{ {
@ -205,7 +169,6 @@ static AudioComponentDescription nbandUnitDescription;
static AudioComponentDescription outputUnitDescription; static AudioComponentDescription outputUnitDescription;
static AudioComponentDescription convertUnitDescription; static AudioComponentDescription convertUnitDescription;
static AudioStreamBasicDescription canonicalAudioStreamBasicDescription; static AudioStreamBasicDescription canonicalAudioStreamBasicDescription;
static AudioStreamBasicDescription recordAudioStreamBasicDescription;
@interface STKAudioPlayer() @interface STKAudioPlayer()
{ {
@ -275,15 +238,6 @@ static AudioStreamBasicDescription recordAudioStreamBasicDescription;
NSConditionLock* threadStartedLock; NSConditionLock* threadStartedLock;
NSConditionLock* threadFinishedCondLock; NSConditionLock* threadFinishedCondLock;
AudioFileID recordAudioFileId;
UInt32 recordFilePacketPosition;
AudioConverterRef recordAudioConverterRef;
UInt32 recordOutputBufferSize;
UInt8 *recordOutputBuffer;
UInt32 recordPacketsPerBuffer;
UInt32 recordPacketSize;
AudioStreamPacketDescription *recordPacketDescriptions;
void(^stopBackBackgroundTaskBlock)(); void(^stopBackBackgroundTaskBlock)();
int32_t seekVersion; int32_t seekVersion;
@ -300,9 +254,6 @@ static AudioStreamBasicDescription recordAudioStreamBasicDescription;
volatile BOOL disposeWasRequested; volatile BOOL disposeWasRequested;
volatile BOOL seekToTimeWasRequested; volatile BOOL seekToTimeWasRequested;
volatile STKAudioPlayerStopReason stopReason; volatile STKAudioPlayerStopReason stopReason;
float **_floatBuffers;
STKFloatConverter *_floatConverter;
} }
@property (readwrite) STKAudioPlayerInternalState internalState; @property (readwrite) STKAudioPlayerInternalState internalState;
@ -337,15 +288,9 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
.componentSubType = kAudioUnitSubType_AUConverter, .componentSubType = kAudioUnitSubType_AUConverter,
.componentFlags = 0, .componentFlags = 0,
.componentFlagsMask = 0 .componentFlagsMask = 0
}; };
#ifdef CA_CANONICAL_DEPRECATED
const int bytesPerSample = sizeof(SInt16);
#elif __IPHONE_OS_VERSION_MIN_REQUIRED >= 80000
const int bytesPerSample = sizeof(SInt16);
#else
const int bytesPerSample = sizeof(AudioSampleType); const int bytesPerSample = sizeof(AudioSampleType);
#endif
canonicalAudioStreamBasicDescription = (AudioStreamBasicDescription) canonicalAudioStreamBasicDescription = (AudioStreamBasicDescription)
{ {
@ -508,12 +453,12 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
} }
} }
-(instancetype) init -(id) init
{ {
return [self initWithOptions:(STKAudioPlayerOptions){}]; return [self initWithOptions:(STKAudioPlayerOptions){}];
} }
-(instancetype) initWithOptions:(STKAudioPlayerOptions)optionsIn -(id) initWithOptions:(STKAudioPlayerOptions)optionsIn
{ {
if (self = [super init]) if (self = [super init])
{ {
@ -523,7 +468,6 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
self->equalizerEnabled = optionsIn.equalizerBandFrequencies[0] != 0; self->equalizerEnabled = optionsIn.equalizerBandFrequencies[0] != 0;
PopulateOptionsWithDefault(&options); PopulateOptionsWithDefault(&options);
NormalizeDisabledBuffers(&options);
framesRequiredToStartPlaying = canonicalAudioStreamBasicDescription.mSampleRate * options.secondsRequiredToStartPlaying; framesRequiredToStartPlaying = canonicalAudioStreamBasicDescription.mSampleRate * options.secondsRequiredToStartPlaying;
framesRequiredToPlayAfterRebuffering = canonicalAudioStreamBasicDescription.mSampleRate * options.secondsRequiredToStartPlayingAfterBufferUnderun; framesRequiredToPlayAfterRebuffering = canonicalAudioStreamBasicDescription.mSampleRate * options.secondsRequiredToStartPlayingAfterBufferUnderun;
@ -560,20 +504,6 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
upcomingQueue = [[NSMutableArray alloc] init]; upcomingQueue = [[NSMutableArray alloc] init];
bufferingQueue = [[NSMutableArray alloc] init]; bufferingQueue = [[NSMutableArray alloc] init];
//initialie the float converter
// Allocate the float buffers
_floatConverter = [[STKFloatConverter alloc] initWithSourceFormat:canonicalAudioStreamBasicDescription];
size_t sizeToAllocate = sizeof(float*) * canonicalAudioStreamBasicDescription.mChannelsPerFrame;
sizeToAllocate = MAX(8, sizeToAllocate);
_floatBuffers = (float**)malloc( sizeToAllocate );
UInt32 outputBufferSize = 32 * 1024; // 32 KB
for ( int i=0; i< canonicalAudioStreamBasicDescription.mChannelsPerFrame; i++ ) {
_floatBuffers[i] = (float*)malloc(outputBufferSize);
}
[self resetPcmBuffers]; [self resetPcmBuffers];
[self createAudioGraph]; [self createAudioGraph];
[self createPlaybackThread]; [self createPlaybackThread];
@ -596,16 +526,9 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
{ {
currentlyPlayingEntry.dataSource.delegate = nil; currentlyPlayingEntry.dataSource.delegate = nil;
[currentlyReadingEntry.dataSource unregisterForEvents]; [currentlyReadingEntry.dataSource unregisterForEvents];
OSSpinLockLock(&currentEntryReferencesLock);
currentlyPlayingEntry = nil; currentlyPlayingEntry = nil;
OSSpinLockUnlock(&currentEntryReferencesLock);
} }
[self closeRecordAudioFile];
[self stopAudioUnitWithReason:STKAudioPlayerStopReasonDisposed]; [self stopAudioUnitWithReason:STKAudioPlayerStopReasonDisposed];
[self clearQueue]; [self clearQueue];
@ -646,7 +569,6 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
pthread_cond_destroy(&mainThreadSyncCallReadyCondition); pthread_cond_destroy(&mainThreadSyncCallReadyCondition);
free(readBuffer); free(readBuffer);
free(pcmAudioBufferList.mBuffers[0].mData);
} }
-(void) startSystemBackgroundTask -(void) startSystemBackgroundTask
@ -874,11 +796,7 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
pthread_mutex_lock(&playerMutex); pthread_mutex_lock(&playerMutex);
if (entryToUpdate->audioStreamBasicDescription.mFormatID == 0) entryToUpdate->audioStreamBasicDescription = newBasicDescription;
{
entryToUpdate->audioStreamBasicDescription = newBasicDescription;
}
entryToUpdate->sampleRate = entryToUpdate->audioStreamBasicDescription.mSampleRate; entryToUpdate->sampleRate = entryToUpdate->audioStreamBasicDescription.mSampleRate;
entryToUpdate->packetDuration = entryToUpdate->audioStreamBasicDescription.mFramesPerPacket / entryToUpdate->sampleRate; entryToUpdate->packetDuration = entryToUpdate->audioStreamBasicDescription.mFramesPerPacket / entryToUpdate->sampleRate;
@ -925,7 +843,7 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
} }
case kAudioFileStreamProperty_ReadyToProducePackets: case kAudioFileStreamProperty_ReadyToProducePackets:
{ {
if (audioConverterAudioStreamBasicDescription.mFormatID != kAudioFormatLinearPCM) if (!audioConverterAudioStreamBasicDescription.mFormatID == kAudioFormatLinearPCM)
{ {
discontinuous = YES; discontinuous = YES;
} }
@ -959,8 +877,13 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
if (pasbd.mFormatID == kAudioFormatMPEG4AAC_HE || pasbd.mFormatID == kAudioFormatMPEG4AAC_HE_V2) if (pasbd.mFormatID == kAudioFormatMPEG4AAC_HE || pasbd.mFormatID == kAudioFormatMPEG4AAC_HE_V2)
{ {
//
// We've found HE-AAC, remember this to tell the audio queue
// when we construct it.
//
#if !TARGET_IPHONE_SIMULATOR
currentlyReadingEntry->audioStreamBasicDescription = pasbd; currentlyReadingEntry->audioStreamBasicDescription = pasbd;
#endif
break; break;
} }
} }
@ -1001,15 +924,20 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
} }
OSSpinLockLock(&currentEntryReferencesLock); OSSpinLockLock(&currentEntryReferencesLock);
STKQueueEntry* entry = currentlyPlayingEntry; STKQueueEntry* entry = currentlyPlayingEntry;
OSSpinLockUnlock(&currentEntryReferencesLock);
if (entry == nil) if (entry == nil)
{ {
return 0; OSSpinLockUnlock(&currentEntryReferencesLock);
return 0;
} }
double retval = [entry duration]; double retval = [entry duration];
OSSpinLockUnlock(&currentEntryReferencesLock);
double progress = [self progress]; double progress = [self progress];
if (retval < progress && retval > 0) if (retval < progress && retval > 0)
@ -1032,9 +960,7 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
return 0; return 0;
} }
OSSpinLockLock(&currentEntryReferencesLock);
STKQueueEntry* entry = currentlyPlayingEntry; STKQueueEntry* entry = currentlyPlayingEntry;
OSSpinLockUnlock(&currentEntryReferencesLock);
if (entry == nil) if (entry == nil)
{ {
@ -1164,8 +1090,6 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
[currentlyReadingEntry.dataSource registerForEvents:[NSRunLoop currentRunLoop]]; [currentlyReadingEntry.dataSource registerForEvents:[NSRunLoop currentRunLoop]];
[currentlyReadingEntry.dataSource seekToOffset:0]; [currentlyReadingEntry.dataSource seekToOffset:0];
[self closeRecordAudioFile];
if (startPlaying) if (startPlaying)
{ {
if (clearQueue) if (clearQueue)
@ -1424,6 +1348,7 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
} }
pthread_mutex_unlock(&playerMutex); pthread_mutex_unlock(&playerMutex);
return YES; return YES;
} }
@ -1469,14 +1394,10 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
OSSpinLockUnlock(&currentEntryReferencesLock); OSSpinLockUnlock(&currentEntryReferencesLock);
pthread_mutex_unlock(&playerMutex); pthread_mutex_unlock(&playerMutex);
[self closeRecordAudioFile];
self.internalState = STKAudioPlayerInternalStateDisposed; self.internalState = STKAudioPlayerInternalStateDisposed;
playbackThreadRunLoop = nil; playbackThreadRunLoop = nil;
[self destroyAudioResources];
[threadFinishedCondLock lock]; [threadFinishedCondLock lock];
[threadFinishedCondLock unlockWithCondition:1]; [threadFinishedCondLock unlockWithCondition:1];
} }
@ -1532,11 +1453,6 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
AudioConverterReset(audioConverterRef); AudioConverterReset(audioConverterRef);
} }
if (recordAudioConverterRef)
{
AudioConverterReset(recordAudioConverterRef);
}
[currentEntry reset]; [currentEntry reset];
[currentEntry.dataSource seekToOffset:seekByteOffset]; [currentEntry.dataSource seekToOffset:seekByteOffset];
@ -1656,8 +1572,6 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
NSObject* queueItemId = currentlyReadingEntry.queueItemId; NSObject* queueItemId = currentlyReadingEntry.queueItemId;
[self closeRecordAudioFile];
[self dispatchSyncOnMainThread:^ [self dispatchSyncOnMainThread:^
{ {
[self.delegate audioPlayer:self didFinishBufferingSourceWithQueueItemId:queueItemId]; [self.delegate audioPlayer:self didFinishBufferingSourceWithQueueItemId:queueItemId];
@ -1763,10 +1677,10 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
self->pcmBufferFrameStartIndex = 0; self->pcmBufferFrameStartIndex = 0;
self->pcmBufferUsedFrameCount = 0; self->pcmBufferUsedFrameCount = 0;
// self->peakPowerDb[0] = STK_DBMIN; self->peakPowerDb[0] = STK_DBMIN;
// self->peakPowerDb[1] = STK_DBMIN; self->peakPowerDb[1] = STK_DBMIN;
// self->averagePowerDb[0] = STK_DBMIN; self->averagePowerDb[0] = STK_DBMIN;
// self->averagePowerDb[1] = STK_DBMIN; self->averagePowerDb[1] = STK_DBMIN;
OSSpinLockUnlock(&pcmBufferSpinLock); OSSpinLockUnlock(&pcmBufferSpinLock);
} }
@ -1782,8 +1696,6 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
return; return;
} }
[self closeRecordAudioFile];
[self stopAudioUnitWithReason:STKAudioPlayerStopReasonUserAction]; [self stopAudioUnitWithReason:STKAudioPlayerStopReasonUserAction];
[self resetPcmBuffers]; [self resetPcmBuffers];
@ -1878,35 +1790,6 @@ static void AudioFileStreamPacketsProc(void* clientData, UInt32 numberBytes, UIn
self.muted = NO; self.muted = NO;
} }
-(void) closeRecordAudioFile
{
if (recordAudioFileId)
{
AudioFileClose(recordAudioFileId);
recordAudioFileId = NULL;
}
if (recordAudioConverterRef)
{
AudioConverterDispose(recordAudioConverterRef);
recordAudioConverterRef = nil;
}
if (recordOutputBuffer)
{
free(recordOutputBuffer);
recordOutputBuffer = NULL;
}
if (recordPacketDescriptions)
{
free(recordPacketDescriptions);
recordPacketDescriptions = NULL;
}
recordFilePacketPosition = 0;
}
-(void) dispose -(void) dispose
{ {
[self stop]; [self stop];
@ -1979,42 +1862,17 @@ static BOOL GetHardwareCodecClassDesc(UInt32 formatId, AudioClassDescription* cl
{ {
OSStatus status; OSStatus status;
Boolean writable; Boolean writable;
UInt32 cookieSize = 0; UInt32 cookieSize;
if (memcmp(asbd, &audioConverterAudioStreamBasicDescription, sizeof(AudioStreamBasicDescription)) == 0) if (memcmp(asbd, &audioConverterAudioStreamBasicDescription, sizeof(AudioStreamBasicDescription)) == 0)
{ {
AudioConverterReset(audioConverterRef); AudioConverterReset(audioConverterRef);
if (recordAudioConverterRef)
{
AudioConverterReset(recordAudioConverterRef);
}
return; return;
} }
[self destroyAudioConverter]; [self destroyAudioConverter];
BOOL isRecording = currentlyReadingEntry.dataSource.recordToFileUrl != nil;
if (isRecording)
{
recordAudioStreamBasicDescription = (AudioStreamBasicDescription)
{
.mFormatID = kAudioFormatMPEG4AAC,
.mFormatFlags = kMPEG4Object_AAC_LC,
.mChannelsPerFrame = canonicalAudioStreamBasicDescription.mChannelsPerFrame,
.mSampleRate = canonicalAudioStreamBasicDescription.mSampleRate,
};
UInt32 dataSize = sizeof(recordAudioStreamBasicDescription);
AudioFormatGetProperty(kAudioFormatProperty_FormatInfo,
0,
NULL,
&dataSize,
&recordAudioStreamBasicDescription);
}
AudioClassDescription classDesc; AudioClassDescription classDesc;
if (GetHardwareCodecClassDesc(asbd->mFormatID, &classDesc)) if (GetHardwareCodecClassDesc(asbd->mFormatID, &classDesc))
@ -2034,28 +1892,13 @@ static BOOL GetHardwareCodecClassDesc(UInt32 formatId, AudioClassDescription* cl
} }
} }
if (isRecording && !recordAudioConverterRef)
{
status = AudioConverterNew(&canonicalAudioStreamBasicDescription, &recordAudioStreamBasicDescription, &recordAudioConverterRef);
if (status)
{
NSLog(@"STKAudioPlayer failed to create a recording audio converter");
}
}
audioConverterAudioStreamBasicDescription = *asbd; audioConverterAudioStreamBasicDescription = *asbd;
if (self->currentlyReadingEntry.dataSource.audioFileTypeHint != kAudioFileAAC_ADTSType) status = AudioFileStreamGetPropertyInfo(audioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, &writable);
{
status = AudioFileStreamGetPropertyInfo(audioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, &writable);
if (status) if (!status)
{ {
return; void* cookieData = alloca(cookieSize);
}
void* cookieData = alloca(cookieSize);
status = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, cookieData); status = AudioFileStreamGetProperty(audioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, cookieData);
@ -2068,87 +1911,9 @@ static BOOL GetHardwareCodecClassDesc(UInt32 formatId, AudioClassDescription* cl
if (status) if (status)
{ {
[self unexpectedError:STKAudioPlayerErrorAudioSystemError];
return; return;
} }
} }
if (recordAudioConverterRef)
{
if (recordAudioFileId)
{
AudioFileClose(recordAudioFileId);
recordAudioFileId = NULL;
}
if (recordOutputBuffer)
{
free(recordOutputBuffer);
recordOutputBuffer = NULL;
}
if (recordPacketDescriptions)
{
free(recordPacketDescriptions);
recordPacketDescriptions = NULL;
}
recordOutputBufferSize = 32 * 1024;
recordPacketSize = canonicalAudioStreamBasicDescription.mBytesPerPacket;
if (recordPacketSize == 0)
{
UInt32 size = sizeof(recordPacketSize);
if (0 == AudioConverterGetProperty(recordAudioConverterRef, kAudioConverterPropertyMaximumOutputPacketSize, &size, &recordPacketSize))
{
if (recordPacketSize > recordOutputBufferSize)
{
recordOutputBufferSize = recordPacketSize;
}
recordPacketsPerBuffer = recordOutputBufferSize / recordPacketSize;
}
else
{
AudioConverterDispose(recordAudioConverterRef);
recordAudioConverterRef = NULL;
NSLog(@"STKAudioPlayer: Can't support this output format for recording");
}
}
else
{
recordPacketsPerBuffer = recordOutputBufferSize / recordPacketSize;
}
UInt32 propertySize = sizeof(UInt32);
UInt32 externallyFramed = 0;
OSStatus error = AudioFormatGetProperty(kAudioFormatProperty_FormatIsExternallyFramed, sizeof(recordAudioStreamBasicDescription), &recordAudioStreamBasicDescription, &propertySize, &externallyFramed);
if (externallyFramed)
{
recordPacketDescriptions = (AudioStreamPacketDescription *)malloc(sizeof(AudioStreamPacketDescription) * recordPacketsPerBuffer);
}
recordOutputBuffer = (UInt8 *)malloc(sizeof(UInt8) * recordOutputBufferSize);
error = AudioFileCreateWithURL(
(__bridge CFURLRef)(currentlyReadingEntry.dataSource.recordToFileUrl),
kAudioFileCAFType,
&recordAudioStreamBasicDescription,
kAudioFileFlags_EraseFile,
&recordAudioFileId);
recordFilePacketPosition = 0;
if (error)
{
NSLog(@"STKAudioPlayer failed to create a recording audio file at %@", currentlyReadingEntry.dataSource.recordToFileUrl);
[self closeRecordAudioFile];
}
}
} }
-(void) createOutputUnit -(void) createOutputUnit
@ -2459,9 +2224,6 @@ static BOOL GetHardwareCodecClassDesc(UInt32 formatId, AudioClassDescription* cl
} }
else if (!isRunning) else if (!isRunning)
{ {
stopReason = stopReasonIn;
self.internalState = STKAudioPlayerInternalStateStopped;
return; return;
} }
@ -2644,11 +2406,6 @@ OSStatus AudioConverterCallback(AudioConverterRef inAudioConverter, UInt32* ioNu
framesAdded = framesToDecode; framesAdded = framesToDecode;
if ((status == 100 || status == 0) && recordAudioFileId && recordAudioConverterRef)
{
[self handleRecordingOfAudioPackets:framesToDecode audioBuffer:&localPcmBufferList.mBuffers[0]];
}
if (status == 100) if (status == 100)
{ {
OSSpinLockLock(&pcmBufferSpinLock); OSSpinLockLock(&pcmBufferSpinLock);
@ -2691,11 +2448,6 @@ OSStatus AudioConverterCallback(AudioConverterRef inAudioConverter, UInt32* ioNu
framesAdded += framesToDecode; framesAdded += framesToDecode;
if ((status == 100 || status == 0) && recordAudioFileId && recordAudioConverterRef)
{
[self handleRecordingOfAudioPackets:framesToDecode audioBuffer:&localPcmBufferList.mBuffers[0]];
}
if (status == 100) if (status == 100)
{ {
OSSpinLockLock(&pcmBufferSpinLock); OSSpinLockLock(&pcmBufferSpinLock);
@ -2740,11 +2492,6 @@ OSStatus AudioConverterCallback(AudioConverterRef inAudioConverter, UInt32* ioNu
framesAdded = framesToDecode; framesAdded = framesToDecode;
if ((status == 100 || status == 0) && recordAudioFileId && recordAudioConverterRef)
{
[self handleRecordingOfAudioPackets:framesToDecode audioBuffer:&localPcmBufferList.mBuffers[0]];
}
if (status == 100) if (status == 100)
{ {
OSSpinLockLock(&pcmBufferSpinLock); OSSpinLockLock(&pcmBufferSpinLock);
@ -2779,79 +2526,15 @@ OSStatus AudioConverterCallback(AudioConverterRef inAudioConverter, UInt32* ioNu
} }
} }
- (void)handleRecordingOfAudioPackets:(UInt32)numberOfPackets audioBuffer:(AudioBuffer *)audioBuffer
{
if (recordAudioFileId && recordAudioConverterRef)
{
AudioConvertInfo recordConvertInfo;
recordConvertInfo.done = NO;
recordConvertInfo.numberOfPackets = numberOfPackets;
recordConvertInfo.packetDescriptions = NULL;
recordConvertInfo.audioBuffer = *audioBuffer;
AudioBufferList convertedData;
convertedData.mNumberBuffers = 1;
convertedData.mBuffers[0].mNumberChannels = recordAudioStreamBasicDescription.mChannelsPerFrame;
convertedData.mBuffers[0].mDataByteSize = recordOutputBufferSize;
convertedData.mBuffers[0].mData = recordOutputBuffer;
UInt32 ioOutputDataPackets;
OSStatus status;
while (1)
{
ioOutputDataPackets = recordPacketsPerBuffer;
status = AudioConverterFillComplexBuffer(recordAudioConverterRef, AudioConverterCallback, (void*)&recordConvertInfo, &ioOutputDataPackets, &convertedData, recordPacketDescriptions);
if (status == 100 || status == 0)
{
if (ioOutputDataPackets > 0)
{
OSStatus writeError = AudioFileWritePackets(recordAudioFileId,
NO,
convertedData.mBuffers[0].mDataByteSize,
recordPacketDescriptions,
recordFilePacketPosition,
&ioOutputDataPackets,
convertedData.mBuffers[0].mData);
if (writeError)
{
NSLog(@"STKAudioPlayer:handleRecordingOfAudioPackets failed on AudioFileWritePackets with error \"" OSSTATUS_PRINTF_PLACEHOLDER "\"", OSSTATUS_PRINTF_VALUE(writeError));
}
else
{
recordFilePacketPosition += ioOutputDataPackets;
}
}
}
else
{
NSLog(@"STKAudioPlayer: Unexpected error during recording audio file conversion");
}
if (status == 100)
{
break;
}
}
}
}
static OSStatus OutputRenderCallback(void* inRefCon, AudioUnitRenderActionFlags* ioActionFlags, const AudioTimeStamp* inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList* ioData) static OSStatus OutputRenderCallback(void* inRefCon, AudioUnitRenderActionFlags* ioActionFlags, const AudioTimeStamp* inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList* ioData)
{ {
STKAudioPlayer* audioPlayer = (__bridge STKAudioPlayer*)inRefCon; STKAudioPlayer* audioPlayer = (__bridge STKAudioPlayer*)inRefCon;
OSSpinLockLock(&audioPlayer->currentEntryReferencesLock);
STKQueueEntry* entry = audioPlayer->currentlyPlayingEntry;
STKQueueEntry* currentlyReadingEntry = audioPlayer->currentlyReadingEntry;
OSSpinLockUnlock(&audioPlayer->currentEntryReferencesLock);
OSSpinLockLock(&audioPlayer->pcmBufferSpinLock); OSSpinLockLock(&audioPlayer->pcmBufferSpinLock);
BOOL waitForBuffer = NO; BOOL waitForBuffer = NO;
BOOL muted = audioPlayer->muted; BOOL muted = audioPlayer->muted;
STKQueueEntry* entry = audioPlayer->currentlyPlayingEntry;
AudioBuffer* audioBuffer = audioPlayer->pcmAudioBuffer; AudioBuffer* audioBuffer = audioPlayer->pcmAudioBuffer;
UInt32 frameSizeInBytes = audioPlayer->pcmBufferFrameSizeInBytes; UInt32 frameSizeInBytes = audioPlayer->pcmBufferFrameSizeInBytes;
UInt32 used = audioPlayer->pcmBufferUsedFrameCount; UInt32 used = audioPlayer->pcmBufferUsedFrameCount;
@ -2869,10 +2552,10 @@ static OSStatus OutputRenderCallback(void* inRefCon, AudioUnitRenderActionFlags*
if (entry->lastFrameQueued >= 0) if (entry->lastFrameQueued >= 0)
{ {
framesRequiredToStartPlaying = MIN(framesRequiredToStartPlaying, entry->lastFrameQueued); framesRequiredToStartPlaying = MIN(framesRequiredToStartPlaying, audioPlayer->currentlyPlayingEntry->lastFrameQueued);
} }
if (entry && currentlyReadingEntry == entry if (entry && audioPlayer->currentlyReadingEntry == entry
&& entry->framesQueued < framesRequiredToStartPlaying) && entry->framesQueued < framesRequiredToStartPlaying)
{ {
waitForBuffer = YES; waitForBuffer = YES;
@ -3082,19 +2765,13 @@ static OSStatus OutputRenderCallback(void* inRefCon, AudioUnitRenderActionFlags*
{ {
pthread_mutex_lock(&audioPlayer->playerMutex); pthread_mutex_lock(&audioPlayer->playerMutex);
OSSpinLockLock(&audioPlayer->currentEntryReferencesLock); if (lastFramePlayed && entry == audioPlayer->currentlyPlayingEntry)
STKQueueEntry* currentlyPlayingEntry = audioPlayer->currentlyPlayingEntry;
OSSpinLockUnlock(&audioPlayer->currentEntryReferencesLock);
if (lastFramePlayed && entry == currentlyPlayingEntry)
{ {
[audioPlayer audioQueueFinishedPlaying:entry]; [audioPlayer audioQueueFinishedPlaying:entry];
while (extraFramesPlayedNotAssigned > 0) while (extraFramesPlayedNotAssigned > 0)
{ {
OSSpinLockLock(&audioPlayer->currentEntryReferencesLock);
STKQueueEntry* newEntry = audioPlayer->currentlyPlayingEntry; STKQueueEntry* newEntry = audioPlayer->currentlyPlayingEntry;
OSSpinLockUnlock(&audioPlayer->currentEntryReferencesLock);
if (newEntry != nil) if (newEntry != nil)
{ {
@ -3255,66 +2932,60 @@ static OSStatus OutputRenderCallback(void* inRefCon, AudioUnitRenderActionFlags*
} }
else else
{ {
[self appendFrameFilterWithName:@"STKMeteringFilter" block:^(UInt32 channelsPerFrame, UInt32 bytesPerFrame, UInt32 frameCount, float* frames) [self appendFrameFilterWithName:@"STKMeteringFilter" block:^(UInt32 channelsPerFrame, UInt32 bytesPerFrame, UInt32 frameCount, void* frames)
{ {
STKFloatConverterToFloat(_floatConverter,&(pcmAudioBufferList),_floatBuffers,frameCount); SInt16* samples16 = (SInt16*)frames;
SInt32* samples32 = (SInt32*)frames;
UInt32 countLeft = 0;
UInt32 countRight = 0;
Float32 decibelsLeft = STK_DBMIN;
Float32 peakValueLeft = STK_DBMIN;
Float64 totalValueLeft = 0;
Float32 previousFilteredValueOfSampleAmplitudeLeft = 0;
Float32 decibelsRight = STK_DBMIN;
Float32 peakValueRight = STK_DBMIN;
Float64 totalValueRight = 0;
Float32 previousFilteredValueOfSampleAmplitudeRight = 0;
if ([self.delegate respondsToSelector:@selector(plotGraphWithBuffer:andLength:)]) { if (bytesPerFrame / channelsPerFrame == 2)
[self.delegate plotGraphWithBuffer:*(_floatBuffers) andLength:frameCount]; {
} for (int i = 0; i < frameCount * channelsPerFrame; i += channelsPerFrame)
{
Float32 absoluteValueOfSampleAmplitudeLeft = abs(samples16[i]);
Float32 absoluteValueOfSampleAmplitudeRight = abs(samples16[i + 1]);
// SInt16* samples16 = (SInt16*)frames; CALCULATE_METER(Left);
// SInt32* samples32 = (SInt32*)frames; CALCULATE_METER(Right);
// UInt32 countLeft = 0; }
// UInt32 countRight = 0; }
// Float32 decibelsLeft = STK_DBMIN; else if (bytesPerFrame / channelsPerFrame == 4)
// Float32 peakValueLeft = STK_DBMIN; {
// Float64 totalValueLeft = 0; for (int i = 0; i < frameCount * channelsPerFrame; i += channelsPerFrame)
// Float32 previousFilteredValueOfSampleAmplitudeLeft = 0; {
// Float32 decibelsRight = STK_DBMIN; Float32 absoluteValueOfSampleAmplitudeLeft = abs(samples32[i]) / 32768.0;
// Float32 peakValueRight = STK_DBMIN; Float32 absoluteValueOfSampleAmplitudeRight = abs(samples32[i + 1]) / 32768.0;
// Float64 totalValueRight = 0;
// Float32 previousFilteredValueOfSampleAmplitudeRight = 0; CALCULATE_METER(Left);
// CALCULATE_METER(Right);
// if (bytesPerFrame / channelsPerFrame == 2) }
// { }
// for (int i = 0; i < frameCount * channelsPerFrame; i += channelsPerFrame) else
// { {
// Float32 absoluteValueOfSampleAmplitudeLeft = abs(samples16[i]); return;
// Float32 absoluteValueOfSampleAmplitudeRight = abs(samples16[i + 1]); }
//
// CALCULATE_METER(Left); peakPowerDb[0] = MIN(MAX(decibelsLeft, -60), 0);
// CALCULATE_METER(Right); peakPowerDb[1] = MIN(MAX(decibelsRight, -60), 0);
// }
// } if (countLeft > 0)
// else if (bytesPerFrame / channelsPerFrame == 4) {
// { averagePowerDb[0] = MIN(MAX(totalValueLeft / frameCount, -60), 0);
// for (int i = 0; i < frameCount * channelsPerFrame; i += channelsPerFrame) }
// {
// Float32 absoluteValueOfSampleAmplitudeLeft = abs(samples32[i]) / 32768.0; if (countRight != 0)
// Float32 absoluteValueOfSampleAmplitudeRight = abs(samples32[i + 1]) / 32768.0; {
// averagePowerDb[1] = MIN(MAX(totalValueRight / frameCount, -60), 0);
// CALCULATE_METER(Left); }
// CALCULATE_METER(Right);
// }
// }
// else
// {
// return;
// }
//
// peakPowerDb[0] = MIN(MAX(decibelsLeft, -60), 0);
// peakPowerDb[1] = MIN(MAX(decibelsRight, -60), 0);
//
// if (countLeft > 0)
// {
// averagePowerDb[0] = MIN(MAX(totalValueLeft / frameCount, -60), 0);
// }
//
// if (countRight != 0)
// {
// averagePowerDb[1] = MIN(MAX(totalValueRight / frameCount, -60), 0);
// }
}]; }];
} }
} }

View File

@ -36,8 +36,6 @@
#import "STKHTTPDataSource.h" #import "STKHTTPDataSource.h"
#import "STKDataSourceWrapper.h" #import "STKDataSourceWrapper.h"
NS_ASSUME_NONNULL_BEGIN
typedef struct typedef struct
{ {
int watchdogPeriodSeconds; int watchdogPeriodSeconds;
@ -47,10 +45,8 @@ STKAutoRecoveringHTTPDataSourceOptions;
@interface STKAutoRecoveringHTTPDataSource : STKDataSourceWrapper @interface STKAutoRecoveringHTTPDataSource : STKDataSourceWrapper
-(instancetype) initWithHTTPDataSource:(STKHTTPDataSource*)innerDataSource; -(id) initWithHTTPDataSource:(STKHTTPDataSource*)innerDataSource;
@property (readonly) STKHTTPDataSource* innerDataSource; @property (readonly) STKHTTPDataSource* innerDataSource;
@end @end
NS_ASSUME_NONNULL_END

View File

@ -101,24 +101,22 @@ static void PopulateOptionsWithDefault(STKAutoRecoveringHTTPDataSourceOptions* o
@implementation STKAutoRecoveringHTTPDataSource @implementation STKAutoRecoveringHTTPDataSource
@dynamic innerDataSource;
-(STKHTTPDataSource*) innerHTTPDataSource -(STKHTTPDataSource*) innerHTTPDataSource
{ {
return (STKHTTPDataSource*)self.innerDataSource; return (STKHTTPDataSource*)self.innerDataSource;
} }
-(instancetype) initWithDataSource:(STKDataSource *)innerDataSource -(id) initWithDataSource:(STKDataSource *)innerDataSource
{ {
return [self initWithHTTPDataSource:(STKHTTPDataSource*)innerDataSource]; return [self initWithHTTPDataSource:(STKHTTPDataSource*)innerDataSource];
} }
-(instancetype) initWithHTTPDataSource:(STKHTTPDataSource*)innerDataSourceIn -(id) initWithHTTPDataSource:(STKHTTPDataSource*)innerDataSourceIn
{ {
return [self initWithHTTPDataSource:innerDataSourceIn andOptions:(STKAutoRecoveringHTTPDataSourceOptions){}]; return [self initWithHTTPDataSource:innerDataSourceIn andOptions:(STKAutoRecoveringHTTPDataSourceOptions){}];
} }
-(instancetype) initWithHTTPDataSource:(STKHTTPDataSource*)innerDataSourceIn andOptions:(STKAutoRecoveringHTTPDataSourceOptions)optionsIn -(id) initWithHTTPDataSource:(STKHTTPDataSource*)innerDataSourceIn andOptions:(STKAutoRecoveringHTTPDataSourceOptions)optionsIn
{ {
if (self = [super initWithDataSource:innerDataSourceIn]) if (self = [super initWithDataSource:innerDataSourceIn])
{ {
@ -371,15 +369,16 @@ static void PopulateOptionsWithDefault(STKAutoRecoveringHTTPDataSourceOptions* o
[self.delegate dataSourceEof:self]; [self.delegate dataSourceEof:self];
} }
- (void)dataSourceErrorOccured:(STKDataSource*)dataSource { -(void) dataSourceErrorOccured:(STKDataSource*)dataSource
{
NSLog(@"dataSourceErrorOccured"); NSLog(@"dataSourceErrorOccured");
/* Range out of bounds */ if (self.innerDataSource.httpStatusCode == 416 /* Range out of bounds */)
if (self.innerDataSource.httpStatusCode == 416) { {
[super dataSourceEof:dataSource]; [super dataSourceEof:dataSource];
} else if ([self hasGotNetworkConnection]) { }
[super dataSourceErrorOccured:dataSource]; else
} else { {
[self processRetryOnError]; [self processRetryOnError];
} }
} }

View File

@ -0,0 +1,25 @@
//
// STKBufferChunk.h
// StreamingKit
//
// Created by Thong Nguyen on 24/02/2014.
// Copyright (c) 2014 Thong Nguyen. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface STKBufferChunk : NSObject
{
@public
UInt32 index;
UInt32 size;
UInt32 position;
UInt8* buffer;
}
@property (readonly) UInt32 absoluteStart;
@property (readonly) UInt32 absolutePosition;
-(id) initWithBufferSize:(UInt32)sizeIn;
@end

View File

@ -0,0 +1,40 @@
//
// STKBufferChunk.m
// StreamingKit
//
// Created by Thong Nguyen on 24/02/2014.
// Copyright (c) 2014 Thong Nguyen. All rights reserved.
//
#import "STKBufferChunk.h"
@implementation STKBufferChunk
-(id) initWithBufferSize:(UInt32)sizeIn
{
if (self = [super init])
{
self->size = sizeIn;
self->buffer = calloc(sizeof(UInt8), sizeIn);
}
return self;
}
-(void) dealloc
{
free(self->buffer);
}
-(UInt32) absoluteStart
{
return self->index * self->size;
}
-(UInt32) absolutePosition
{
return self.absoluteStart + self->position;
}
@end

View File

@ -0,0 +1,44 @@
/**********************************************************************************
STKBufferingDataSource.h
Created by Thong Nguyen on 16/10/2012.
https://github.com/tumtumtum/audjustable
Copyright (c) 2012-2014 Thong Nguyen (tumtumtum@gmail.com). All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. All advertising materials mentioning features or use of this software
must display the following acknowledgement:
This product includes software developed by Thong Nguyen (tumtumtum@gmail.com)
4. Neither the name of Thong Nguyen nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY Thong Nguyen ''AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THONG NGUYEN BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**********************************************************************************/
#import "STKDataSource.h"
@interface STKBufferingDataSource : STKDataSource
@property (readonly) SInt64 position;
@property (readonly) SInt64 length;
-(id) initWithDataSource:(STKDataSource*)dataSourceIn withMaxSize:(int)maxSizeIn;
@end

View File

@ -0,0 +1,319 @@
/**********************************************************************************
STKBufferingDataSource.m
Created by Thong Nguyen on 16/10/2012.
https://github.com/tumtumtum/audjustable
Copyright (c) 2012-2014 Thong Nguyen (tumtumtum@gmail.com). All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. All advertising materials mentioning features or use of this software
must display the following acknowledgement:
This product includes software developed by Thong Nguyen (tumtumtum@gmail.com)
4. Neither the name of Thong Nguyen nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY Thong Nguyen ''AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THONG NGUYEN BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**********************************************************************************/
#import "STKBufferingDataSource.h"
#import "STKBufferChunk.h"
#import <pthread.h>
#define STK_BUFFER_CHUNK_SIZE (128 * 1024)
@interface STKBufferingDataSource()
{
@private
NSRunLoop* runLoop;
SInt32 maxSize;
UInt32 chunkSize;
UInt32 chunkCount;
SInt64 position;
pthread_mutex_t mutex;
pthread_cond_t condition;
STKBufferChunk* __strong * bufferChunks;
STKDataSource* dataSource;
}
@end
@interface STKBufferingDataSourceThread : NSThread
{
@private
NSRunLoop* runLoop;
NSConditionLock* threadStartedLock;
}
@end
@implementation STKBufferingDataSourceThread
-(id) init
{
if (self = [super init])
{
threadStartedLock = [[NSConditionLock alloc] initWithCondition:0];
}
return self;
}
-(NSRunLoop*) runLoop
{
[threadStartedLock lockWhenCondition:1];
[threadStartedLock unlockWithCondition:0];
return self->runLoop;
}
-(void) main
{
runLoop = [NSRunLoop currentRunLoop];
[threadStartedLock lockWhenCondition:0];
[threadStartedLock unlockWithCondition:1];
[runLoop addPort:[NSPort port] forMode:NSDefaultRunLoopMode];
while (true)
{
NSDate* date = [[NSDate alloc] initWithTimeIntervalSinceNow:10];
[runLoop runMode:NSDefaultRunLoopMode beforeDate:date];
}
}
@end
static STKBufferingDataSourceThread* thread;
@implementation STKBufferingDataSource
+(void) initialize
{
thread = [[STKBufferingDataSourceThread alloc] init];
[thread start];
}
-(id) initWithDataSource:(STKDataSource*)dataSourceIn withMaxSize:(int)maxSizeIn
{
if (self = [super init])
{
self->maxSize = maxSizeIn;
self->dataSource = dataSourceIn;
self->chunkSize = STK_BUFFER_CHUNK_SIZE;
self->dataSource.delegate = self.delegate;
[self->dataSource registerForEvents:[thread runLoop]];
pthread_mutexattr_t attr;
pthread_mutexattr_init(&attr);
pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
pthread_mutex_init(&self->mutex, &attr);
pthread_cond_init(&self->condition, NULL);
}
return self;
}
-(void) dealloc
{
self->dataSource.delegate = nil;
for (int i = 0; i < self->chunkCount; i++)
{
self->bufferChunks[i] = nil;
}
free(self->bufferChunks);
pthread_mutex_destroy(&self->mutex);
pthread_cond_destroy(&self->condition);
}
-(void) createBuffer
{
if (self->bufferChunks == nil)
{
int length = (int)MIN(self.length == 0? 1024 * 1024 : self.length, self->maxSize);
self->chunkCount = (int)((length / self->chunkSize) + 1);
self->bufferChunks = (__strong STKBufferChunk**)calloc(sizeof(STKBufferChunk*), self->chunkCount);
}
}
-(STKBufferChunk*) chunkForPosition:(SInt64)positionIn createIfNotExist:(BOOL)createIfNotExist
{
int chunkIndex = (int)(positionIn / chunkCount);
if (self->bufferChunks[chunkIndex] == nil && createIfNotExist)
{
self->bufferChunks[chunkIndex] = [[STKBufferChunk alloc] initWithBufferSize:STK_BUFFER_CHUNK_SIZE];
}
return self->bufferChunks[chunkIndex];
}
-(SInt64) length
{
return self->dataSource.length;
}
-(void) seekToOffset:(SInt64)offset
{
pthread_mutex_lock(&mutex);
[self seekToNextGap];
pthread_mutex_unlock(&mutex);
}
-(BOOL) hasBytesAvailable
{
return NO;
}
-(int) readIntoBuffer:(UInt8*)bufferIn withSize:(int)size
{
return 0;
}
-(void) invokeBlockOnEventsRunLoop:(void(^)())block
{
if (!runLoop)
{
return;
}
block = [block copy];
CFRunLoopPerformBlock(runLoop.getCFRunLoop, NSRunLoopCommonModes, ^
{
if ([self hasBytesAvailable])
{
block();
}
});
CFRunLoopWakeUp(runLoop.getCFRunLoop);
}
-(BOOL) registerForEvents:(NSRunLoop*)runLoopIn
{
runLoop = runLoopIn;
[dataSource registerForEvents:[thread runLoop]];
return YES;
}
-(void) unregisterForEvents
{
runLoop = nil;
[dataSource unregisterForEvents];
}
-(void) close
{
[dataSource unregisterForEvents];
[dataSource close];
}
-(void) seekToNextGap
{
int startChunkIndex = (int)(self->position / chunkCount);
for (int i = 0; i < self->chunkCount; i++)
{
int chunkIndex = (i + startChunkIndex) % self->chunkCount;
STKBufferChunk* chunk = self->bufferChunks[chunkIndex];
if (chunk == nil)
{
chunk = [[STKBufferChunk alloc] initWithBufferSize:STK_BUFFER_CHUNK_SIZE];
chunk->index = chunkIndex;
self->bufferChunks[chunkIndex] = chunk;
}
if (chunk->position < chunk->size)
{
[dataSource seekToOffset:(self->chunkSize * chunk->index) + chunk->position];
}
}
}
-(void) dataSourceDataAvailable:(STKDataSource*)dataSourceIn
{
if (![dataSourceIn hasBytesAvailable])
{
return;
}
pthread_mutex_lock(&mutex);
if (self->bufferChunks == nil)
{
[self createBuffer];
}
SInt64 sourcePosition = dataSourceIn.position;
STKBufferChunk* chunk = [self chunkForPosition:sourcePosition createIfNotExist:YES];
if (chunk->position >= chunk->size)
{
[self seekToNextGap];
return;
}
int offset = dataSourceIn.position % self->chunkSize;
if (offset >= chunk->position)
{
[self seekToNextGap];
return;
}
int bytesToRead = self->chunkSize - offset;
int bytesRead = [dataSourceIn readIntoBuffer:(chunk->buffer + offset) withSize:bytesToRead];
chunk->position = offset + bytesRead;
pthread_mutex_unlock(&mutex);
}
-(void) dataSourceErrorOccured:(STKDataSource*)dataSourceIn
{
[self.delegate dataSourceErrorOccured:self];
}
-(void) dataSourceEof:(STKDataSource*)dataSourceIn
{
}
@end

View File

@ -34,8 +34,6 @@
#import "STKDataSource.h" #import "STKDataSource.h"
NS_ASSUME_NONNULL_BEGIN
@class STKCoreFoundationDataSource; @class STKCoreFoundationDataSource;
@interface CoreFoundationDataSourceClientInfo : NSObject @interface CoreFoundationDataSourceClientInfo : NSObject
@ -45,10 +43,9 @@ NS_ASSUME_NONNULL_BEGIN
@interface STKCoreFoundationDataSource : STKDataSource @interface STKCoreFoundationDataSource : STKDataSource
{ {
@public
CFReadStreamRef stream;
@protected @protected
BOOL isInErrorState; BOOL isInErrorState;
CFReadStreamRef stream;
NSRunLoop* eventsRunLoop; NSRunLoop* eventsRunLoop;
} }
@ -64,5 +61,3 @@ NS_ASSUME_NONNULL_BEGIN
-(CFStreamStatus) status; -(CFStreamStatus) status;
@end @end
NS_ASSUME_NONNULL_END

View File

@ -41,10 +41,8 @@ static void ReadStreamCallbackProc(CFReadStreamRef stream, CFStreamEventType eve
switch (eventType) switch (eventType)
{ {
case kCFStreamEventErrorOccurred: case kCFStreamEventErrorOccurred:
{
[datasource errorOccured]; [datasource errorOccured];
break; break;
}
case kCFStreamEventEndEncountered: case kCFStreamEventEndEncountered:
[datasource eof]; [datasource eof];
break; break;
@ -137,6 +135,8 @@ static void ReadStreamCallbackProc(CFReadStreamRef stream, CFStreamEventType eve
{ {
CFReadStreamSetClient(stream, kCFStreamEventHasBytesAvailable | kCFStreamEventErrorOccurred | kCFStreamEventEndEncountered, NULL, NULL); CFReadStreamSetClient(stream, kCFStreamEventHasBytesAvailable | kCFStreamEventErrorOccurred | kCFStreamEventEndEncountered, NULL, NULL);
CFReadStreamUnscheduleFromRunLoop(stream, [eventsRunLoop getCFRunLoop], kCFRunLoopCommonModes); CFReadStreamUnscheduleFromRunLoop(stream, [eventsRunLoop getCFRunLoop], kCFRunLoopCommonModes);
eventsRunLoop = nil;
} }
} }

9
StreamingKit/StreamingKit/STKDataSource.h Executable file → Normal file
View File

@ -35,8 +35,6 @@
#import <Foundation/Foundation.h> #import <Foundation/Foundation.h>
#include <AudioToolbox/AudioToolbox.h> #include <AudioToolbox/AudioToolbox.h>
NS_ASSUME_NONNULL_BEGIN
@class STKDataSource; @class STKDataSource;
@protocol STKDataSourceDelegate<NSObject> @protocol STKDataSourceDelegate<NSObject>
@ -47,13 +45,10 @@ NS_ASSUME_NONNULL_BEGIN
@interface STKDataSource : NSObject @interface STKDataSource : NSObject
@property (readonly) BOOL supportsSeek;
@property (readonly) SInt64 position; @property (readonly) SInt64 position;
@property (readonly) SInt64 length; @property (readonly) SInt64 length;
@property (readonly) BOOL hasBytesAvailable; @property (readonly) BOOL hasBytesAvailable;
@property (nonatomic, readwrite, assign) double durationHint; @property (readwrite, unsafe_unretained) id<STKDataSourceDelegate> delegate;
@property (readwrite, unsafe_unretained, nullable) id<STKDataSourceDelegate> delegate;
@property (nonatomic, strong, nullable) NSURL *recordToFileUrl;
-(BOOL) registerForEvents:(NSRunLoop*)runLoop; -(BOOL) registerForEvents:(NSRunLoop*)runLoop;
-(void) unregisterForEvents; -(void) unregisterForEvents;
@ -64,5 +59,3 @@ NS_ASSUME_NONNULL_BEGIN
-(AudioFileTypeID) audioFileTypeHint; -(AudioFileTypeID) audioFileTypeHint;
@end @end
NS_ASSUME_NONNULL_END

View File

@ -79,9 +79,4 @@
return 0; return 0;
} }
-(BOOL) supportsSeek
{
return YES;
}
@end @end

View File

@ -34,14 +34,10 @@
#import "STKDataSource.h" #import "STKDataSource.h"
NS_ASSUME_NONNULL_BEGIN
@interface STKDataSourceWrapper : STKDataSource<STKDataSourceDelegate> @interface STKDataSourceWrapper : STKDataSource<STKDataSourceDelegate>
-(instancetype) initWithDataSource:(STKDataSource*)innerDataSource; -(id) initWithDataSource:(STKDataSource*)innerDataSource;
@property (readonly) STKDataSource* innerDataSource; @property (readonly) STKDataSource* innerDataSource;
@end @end
NS_ASSUME_NONNULL_END

View File

@ -40,7 +40,7 @@
@implementation STKDataSourceWrapper @implementation STKDataSourceWrapper
-(instancetype) initWithDataSource:(STKDataSource*)innerDataSourceIn -(id) initWithDataSource:(STKDataSource*)innerDataSourceIn
{ {
if (self = [super init]) if (self = [super init])
{ {

View File

@ -1,124 +0,0 @@
//
// AEFloatConverter.h
// The Amazing Audio Engine
//
// Created by Michael Tyson on 25/10/2012.
//
// This software is provided 'as-is', without any express or implied
// warranty. In no event will the authors be held liable for any damages
// arising from the use of this software.
//
// Permission is granted to anyone to use this software for any purpose,
// including commercial applications, and to alter it and redistribute it
// freely, subject to the following restrictions:
//
// 1. The origin of this software must not be misrepresented; you must not
// claim that you wrote the original software. If you use this software
// in a product, an acknowledgment in the product documentation would be
// appreciated but is not required.
//
// 2. Altered source versions must be plainly marked as such, and must not be
// misrepresented as being the original software.
//
// 3. This notice may not be removed or altered from any source distribution.
//
#ifdef __cplusplus
extern "C" {
#endif
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
/*!
* Universal converter to float format
*
* Use this class to easily convert arbitrary audio formats to floating point
* for use with utilities like the Accelerate framework.
*/
@interface STKFloatConverter : NSObject
/*!
* Initialize
*
* @param sourceFormat The audio format to use
*/
- (id)initWithSourceFormat:(AudioStreamBasicDescription)sourceFormat;
/*!
* Convert audio to floating-point
*
* This C function, safe to use in a Core Audio realtime thread context, will take
* an audio buffer list of audio in the format you provided at initialisation, and
* convert it into a noninterleaved float array.
*
* @param converter Pointer to the converter object.
* @param sourceBuffer An audio buffer list containing the source audio.
* @param targetBuffers An array of floating-point arrays to store the converted float audio into.
* Note that you must provide the correct number of arrays, to match the number of channels.
* @param frames The number of frames to convert.
* @return YES on success; NO on failure
*/
BOOL STKFloatConverterToFloat(STKFloatConverter* converter, AudioBufferList *sourceBuffer, float * const * targetBuffers, UInt32 frames);
/*!
* Convert audio to floating-point, in a buffer list
*
* This C function, safe to use in a Core Audio realtime thread context, will take
* an audio buffer list of audio in the format you provided at initialisation, and
* convert it into a noninterleaved float format.
*
* @param converter Pointer to the converter object.
* @param sourceBuffer An audio buffer list containing the source audio.
* @param targetBuffer An audio buffer list to store the converted floating-point audio.
* @param frames The number of frames to convert.
* @return YES on success; NO on failure
*/
BOOL STKFloatConverterToFloatBufferList(STKFloatConverter* converter, AudioBufferList *sourceBuffer, AudioBufferList *targetBuffer, UInt32 frames);
/*!
* Convert audio from floating-point
*
* This C function, safe to use in a Core Audio realtime thread context, will take
* an audio buffer list of audio in the format you provided at initialisation, and
* convert it into a float array.
*
* @param converter Pointer to the converter object.
* @param sourceBuffers An array of floating-point arrays containing the floating-point audio to convert.
* Note that you must provide the correct number of arrays, to match the number of channels.
* @param targetBuffer An audio buffer list to store the converted audio into.
* @param frames The number of frames to convert.
* @return YES on success; NO on failure
*/
BOOL STKFloatConverterFromFloat(STKFloatConverter* converter, float * const * sourceBuffers, AudioBufferList *targetBuffer, UInt32 frames);
/*!
* Convert audio from floating-point, in a buffer list
*
* This C function, safe to use in a Core Audio realtime thread context, will take
* an audio buffer list of audio in the format you provided at initialisation, and
* convert it into a float array.
*
* @param converter Pointer to the converter object.
* @param sourceBuffer An audio buffer list containing the source audio.
* @param targetBuffer An audio buffer list to store the converted audio into.
* @param frames The number of frames to convert.
* @return YES on success; NO on failure
*/
BOOL STKFloatConverterFromFloatBufferList(STKFloatConverter* converter, AudioBufferList *sourceBuffer, AudioBufferList *targetBuffer, UInt32 frames);
/*!
* The AudioStreamBasicDescription representing the converted floating-point format
*/
@property (nonatomic, readonly) AudioStreamBasicDescription floatingPointAudioDescription;
/*!
* The source audio format set at initialization
*/
@property (nonatomic, readonly) AudioStreamBasicDescription sourceFormat;
@end
#ifdef __cplusplus
}
#endif

View File

@ -1,211 +0,0 @@
//
// AEFloatConverter.m
// The Amazing Audio Engine
//
// Created by Michael Tyson on 25/10/2012.
//
// This software is provided 'as-is', without any express or implied
// warranty. In no event will the authors be held liable for any damages
// arising from the use of this software.
//
// Permission is granted to anyone to use this software for any purpose,
// including commercial applications, and to alter it and redistribute it
// freely, subject to the following restrictions:
//
// 1. The origin of this software must not be misrepresented; you must not
// claim that you wrote the original software. If you use this software
// in a product, an acknowledgment in the product documentation would be
// appreciated but is not required.
//
// 2. Altered source versions must be plainly marked as such, and must not be
// misrepresented as being the original software.
//
// 3. This notice may not be removed or altered from any source distribution.
//
#import "STKFloatConverter.h"
#define checkResult(result,operation) (_checkResult((result),(operation),strrchr(__FILE__, '/')+1,__LINE__))
static inline BOOL _checkResult(OSStatus result, const char *operation, const char* file, int line) {
if ( result != noErr ) {
NSLog(@"%s:%d: %s result %d %08X %4.4s", file, line, operation, (int)result, (int)result, (char*)&result);
return NO;
}
return YES;
}
#define kNoMoreDataErr -2222
struct complexInputDataProc_t {
AudioBufferList *sourceBuffer;
};
@interface STKFloatConverter () {
AudioStreamBasicDescription _sourceAudioDescription;
AudioStreamBasicDescription _floatAudioDescription;
AudioConverterRef _toFloatConverter;
AudioConverterRef _fromFloatConverter;
AudioBufferList *_scratchFloatBufferList;
}
static OSStatus complexInputDataProc(AudioConverterRef inAudioConverter,
UInt32 *ioNumberDataPackets,
AudioBufferList *ioData,
AudioStreamPacketDescription **outDataPacketDescription,
void *inUserData);
@end
@implementation STKFloatConverter
@synthesize sourceFormat = _sourceAudioDescription;
-(id)initWithSourceFormat:(AudioStreamBasicDescription)sourceFormat {
if ( !(self = [super init]) ) return nil;
_floatAudioDescription.mFormatID = kAudioFormatLinearPCM;
_floatAudioDescription.mFormatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved;
_floatAudioDescription.mChannelsPerFrame = sourceFormat.mChannelsPerFrame;
_floatAudioDescription.mBytesPerPacket = sizeof(float);
_floatAudioDescription.mFramesPerPacket = 1;
_floatAudioDescription.mBytesPerFrame = sizeof(float);
_floatAudioDescription.mBitsPerChannel = 8 * sizeof(float);
_floatAudioDescription.mSampleRate = sourceFormat.mSampleRate;
_sourceAudioDescription = sourceFormat;
if ( memcmp(&sourceFormat, &_floatAudioDescription, sizeof(AudioStreamBasicDescription)) != 0 ) {
checkResult(AudioConverterNew(&sourceFormat, &_floatAudioDescription, &_toFloatConverter), "AudioConverterNew");
checkResult(AudioConverterNew(&_floatAudioDescription, &sourceFormat, &_fromFloatConverter), "AudioConverterNew");
_scratchFloatBufferList = (AudioBufferList*)malloc(sizeof(AudioBufferList) + (_floatAudioDescription.mChannelsPerFrame-1)*sizeof(AudioBuffer));
_scratchFloatBufferList->mNumberBuffers = _floatAudioDescription.mChannelsPerFrame;
for ( int i=0; i<_scratchFloatBufferList->mNumberBuffers; i++ ) {
_scratchFloatBufferList->mBuffers[i].mNumberChannels = 1;
}
}
return self;
}
-(void)dealloc {
if ( _toFloatConverter ) AudioConverterDispose(_toFloatConverter);
if ( _fromFloatConverter ) AudioConverterDispose(_fromFloatConverter);
if ( _scratchFloatBufferList ) free(_scratchFloatBufferList);
// [super dealloc];
}
BOOL STKFloatConverterToFloat(STKFloatConverter* THIS, AudioBufferList *sourceBuffer, float * const * targetBuffers, UInt32 frames) {
if ( frames == 0 ) return YES;
if ( THIS->_toFloatConverter ) {
UInt32 priorDataByteSize = sourceBuffer->mBuffers[0].mDataByteSize;
for ( int i=0; i<sourceBuffer->mNumberBuffers; i++ ) {
sourceBuffer->mBuffers[i].mDataByteSize = frames * THIS->_sourceAudioDescription.mBytesPerFrame;
}
for ( int i=0; i<THIS->_scratchFloatBufferList->mNumberBuffers; i++ ) {
THIS->_scratchFloatBufferList->mBuffers[i].mData = targetBuffers[i];
THIS->_scratchFloatBufferList->mBuffers[i].mDataByteSize = frames * sizeof(float);
}
OSStatus result = AudioConverterFillComplexBuffer(THIS->_toFloatConverter,
complexInputDataProc,
&(struct complexInputDataProc_t) { .sourceBuffer = sourceBuffer },
&frames,
THIS->_scratchFloatBufferList,
NULL);
for ( int i=0; i<sourceBuffer->mNumberBuffers; i++ ) {
sourceBuffer->mBuffers[i].mDataByteSize = priorDataByteSize;
}
if ( !checkResult(result, "AudioConverterConvertComplexBuffer") ) {
return NO;
}
} else {
for ( int i=0; i<sourceBuffer->mNumberBuffers; i++ ) {
memcpy(targetBuffers[i], sourceBuffer->mBuffers[i].mData, frames * sizeof(float));
}
}
return YES;
}
BOOL STKFloatConverterToFloatBufferList(STKFloatConverter* converter, AudioBufferList *sourceBuffer, AudioBufferList *targetBuffer, UInt32 frames) {
assert(targetBuffer->mNumberBuffers == converter->_floatAudioDescription.mChannelsPerFrame);
float *targetBuffers[targetBuffer->mNumberBuffers];
for ( int i=0; i<targetBuffer->mNumberBuffers; i++ ) {
targetBuffers[i] = (float*)targetBuffer->mBuffers[i].mData;
}
return STKFloatConverterToFloat(converter, sourceBuffer, targetBuffers, frames);
}
BOOL STKFloatConverterFromFloat(STKFloatConverter* THIS, float * const * sourceBuffers, AudioBufferList *targetBuffer, UInt32 frames) {
if ( frames == 0 ) return YES;
if ( THIS->_fromFloatConverter ) {
for ( int i=0; i<THIS->_scratchFloatBufferList->mNumberBuffers; i++ ) {
THIS->_scratchFloatBufferList->mBuffers[i].mData = sourceBuffers[i];
THIS->_scratchFloatBufferList->mBuffers[i].mDataByteSize = frames * sizeof(float);
}
UInt32 priorDataByteSize = targetBuffer->mBuffers[0].mDataByteSize;
for ( int i=0; i<targetBuffer->mNumberBuffers; i++ ) {
targetBuffer->mBuffers[i].mDataByteSize = frames * THIS->_sourceAudioDescription.mBytesPerFrame;
}
OSStatus result = AudioConverterFillComplexBuffer(THIS->_fromFloatConverter,
complexInputDataProc,
&(struct complexInputDataProc_t) { .sourceBuffer = THIS->_scratchFloatBufferList },
&frames,
targetBuffer,
NULL);
for ( int i=0; i<targetBuffer->mNumberBuffers; i++ ) {
targetBuffer->mBuffers[i].mDataByteSize = priorDataByteSize;
}
if ( !checkResult(result, "AudioConverterConvertComplexBuffer") ) {
return NO;
}
} else {
for ( int i=0; i<targetBuffer->mNumberBuffers; i++ ) {
memcpy(targetBuffer->mBuffers[i].mData, sourceBuffers[i], frames * sizeof(float));
}
}
return YES;
}
BOOL STKFloatConverterFromFloatBufferList(STKFloatConverter* converter, AudioBufferList *sourceBuffer, AudioBufferList *targetBuffer, UInt32 frames) {
assert(sourceBuffer->mNumberBuffers == converter->_floatAudioDescription.mChannelsPerFrame);
float *sourceBuffers[sourceBuffer->mNumberBuffers];
for ( int i=0; i<sourceBuffer->mNumberBuffers; i++ ) {
sourceBuffers[i] = (float*)sourceBuffer->mBuffers[i].mData;
}
return STKFloatConverterFromFloat(converter, sourceBuffers, targetBuffer, frames);
}
static OSStatus complexInputDataProc(AudioConverterRef inAudioConverter,
UInt32 *ioNumberDataPackets,
AudioBufferList *ioData,
AudioStreamPacketDescription **outDataPacketDescription,
void *inUserData) {
struct complexInputDataProc_t *arg = (struct complexInputDataProc_t*)inUserData;
if ( !arg->sourceBuffer ) {
return kNoMoreDataErr;
}
memcpy(ioData, arg->sourceBuffer, sizeof(AudioBufferList) + (arg->sourceBuffer->mNumberBuffers-1)*sizeof(AudioBuffer));
arg->sourceBuffer = NULL;
return noErr;
}
-(AudioStreamBasicDescription)floatingPointAudioDescription {
return _floatAudioDescription;
}
@end

View File

@ -34,12 +34,10 @@
#import "STKCoreFoundationDataSource.h" #import "STKCoreFoundationDataSource.h"
NS_ASSUME_NONNULL_BEGIN
@class STKHTTPDataSource; @class STKHTTPDataSource;
typedef void(^STKURLBlock)(NSURL* url); typedef void(^STKURLBlock)(NSURL* url);
typedef NSURL* _Nonnull (^STKURLProvider)(); typedef NSURL*(^STKURLProvider)();
typedef void(^STKAsyncURLProvider)(STKHTTPDataSource* dataSource, BOOL forSeek, STKURLBlock callback); typedef void(^STKAsyncURLProvider)(STKHTTPDataSource* dataSource, BOOL forSeek, STKURLBlock callback);
@interface STKHTTPDataSource : STKCoreFoundationDataSource @interface STKHTTPDataSource : STKCoreFoundationDataSource
@ -48,13 +46,10 @@ typedef void(^STKAsyncURLProvider)(STKHTTPDataSource* dataSource, BOOL forSeek,
@property (readonly) UInt32 httpStatusCode; @property (readonly) UInt32 httpStatusCode;
+(AudioFileTypeID) audioFileTypeHintFromMimeType:(NSString*)fileExtension; +(AudioFileTypeID) audioFileTypeHintFromMimeType:(NSString*)fileExtension;
-(instancetype) initWithURL:(NSURL*)url; -(id) initWithURL:(NSURL*)url;
-(instancetype) initWithURL:(NSURL*)url httpRequestHeaders:(NSDictionary*)httpRequestHeaders; -(id) initWithURLProvider:(STKURLProvider)urlProvider;
-(instancetype) initWithURLProvider:(STKURLProvider)urlProvider; -(id) initWithAsyncURLProvider:(STKAsyncURLProvider)asyncUrlProvider;
-(instancetype) initWithAsyncURLProvider:(STKAsyncURLProvider)asyncUrlProvider; -(NSRunLoop*) eventsRunLoop;
-(nullable NSRunLoop*) eventsRunLoop;
-(void) reconnect; -(void) reconnect;
@end @end
NS_ASSUME_NONNULL_END

357
StreamingKit/StreamingKit/STKHTTPDataSource.m Executable file → Normal file
View File

@ -38,25 +38,17 @@
@interface STKHTTPDataSource() @interface STKHTTPDataSource()
{ {
@private @private
BOOL supportsSeek;
UInt32 httpStatusCode; UInt32 httpStatusCode;
SInt64 seekStart; SInt64 seekStart;
SInt64 relativePosition; SInt64 relativePosition;
SInt64 fileLength; SInt64 fileLength;
int discontinuous; int discontinuous;
int requestSerialNumber; int requestSerialNumber;
int prefixBytesRead;
NSData* prefixBytes;
NSMutableData* iceHeaderData;
BOOL iceHeaderSearchComplete;
BOOL iceHeaderAvailable;
BOOL httpHeaderNotAvailable;
NSURL* currentUrl; NSURL* currentUrl;
STKAsyncURLProvider asyncUrlProvider; STKAsyncURLProvider asyncUrlProvider;
NSDictionary* httpHeaders; NSDictionary* httpHeaders;
AudioFileTypeID audioFileTypeHint; AudioFileTypeID audioFileTypeHint;
NSDictionary* requestHeaders;
} }
-(void) open; -(void) open;
@ -64,19 +56,12 @@
@implementation STKHTTPDataSource @implementation STKHTTPDataSource
-(instancetype) initWithURL:(NSURL*)urlIn -(id) initWithURL:(NSURL*)urlIn
{ {
return [self initWithURLProvider:^NSURL* { return urlIn; }]; return [self initWithURLProvider:^NSURL* { return urlIn; }];
} }
-(instancetype) initWithURL:(NSURL *)urlIn httpRequestHeaders:(NSDictionary *)httpRequestHeaders -(id) initWithURLProvider:(STKURLProvider)urlProviderIn
{
self = [self initWithURLProvider:^NSURL* { return urlIn; }];
self->requestHeaders = httpRequestHeaders;
return self;
}
-(instancetype) initWithURLProvider:(STKURLProvider)urlProviderIn
{ {
urlProviderIn = [urlProviderIn copy]; urlProviderIn = [urlProviderIn copy];
@ -86,7 +71,7 @@
}]; }];
} }
-(instancetype) initWithAsyncURLProvider:(STKAsyncURLProvider)asyncUrlProviderIn -(id) initWithAsyncURLProvider:(STKAsyncURLProvider)asyncUrlProviderIn
{ {
if (self = [super init]) if (self = [super init])
{ {
@ -125,8 +110,6 @@
@"audio/mpg": @(kAudioFileMP3Type), @"audio/mpg": @(kAudioFileMP3Type),
@"audio/mpeg": @(kAudioFileMP3Type), @"audio/mpeg": @(kAudioFileMP3Type),
@"audio/wav": @(kAudioFileWAVEType), @"audio/wav": @(kAudioFileWAVEType),
@"audio/x-wav": @(kAudioFileWAVEType),
@"audio/vnd.wav": @(kAudioFileWAVEType),
@"audio/aifc": @(kAudioFileAIFCType), @"audio/aifc": @(kAudioFileAIFCType),
@"audio/aiff": @(kAudioFileAIFFType), @"audio/aiff": @(kAudioFileAIFFType),
@"audio/x-m4a": @(kAudioFileM4AType), @"audio/x-m4a": @(kAudioFileM4AType),
@ -134,18 +117,10 @@
@"audio/aacp": @(kAudioFileAAC_ADTSType), @"audio/aacp": @(kAudioFileAAC_ADTSType),
@"audio/m4a": @(kAudioFileM4AType), @"audio/m4a": @(kAudioFileM4AType),
@"audio/mp4": @(kAudioFileMPEG4Type), @"audio/mp4": @(kAudioFileMPEG4Type),
@"video/mp4": @(kAudioFileMPEG4Type),
@"audio/caf": @(kAudioFileCAFType), @"audio/caf": @(kAudioFileCAFType),
@"audio/x-caf": @(kAudioFileCAFType),
@"audio/aac": @(kAudioFileAAC_ADTSType), @"audio/aac": @(kAudioFileAAC_ADTSType),
@"audio/aacp": @(kAudioFileAAC_ADTSType),
@"audio/ac3": @(kAudioFileAC3Type), @"audio/ac3": @(kAudioFileAC3Type),
@"audio/3gp": @(kAudioFile3GPType), @"audio/3gp": @(kAudioFile3GPType)
@"video/3gp": @(kAudioFile3GPType),
@"audio/3gpp": @(kAudioFile3GPType),
@"video/3gpp": @(kAudioFile3GPType),
@"audio/3gp2": @(kAudioFile3GP2Type),
@"video/3gp2": @(kAudioFile3GP2Type)
}; };
}); });
@ -164,233 +139,70 @@
return audioFileTypeHint; return audioFileTypeHint;
} }
-(NSDictionary*) parseIceHeader:(NSData*)headerData
{
NSMutableDictionary* retval = [[NSMutableDictionary alloc] init];
NSCharacterSet* characterSet = [NSCharacterSet characterSetWithCharactersInString:@"\r\n"];
NSString* fullString = [[NSString alloc] initWithData:headerData encoding:NSUTF8StringEncoding];
NSArray* strings = [fullString componentsSeparatedByCharactersInSet:characterSet];
httpHeaders = [NSMutableDictionary dictionary];
for (NSString* s in strings)
{
if (s.length == 0)
{
continue;
}
if ([s hasPrefix:@"ICY "])
{
NSArray* parts = [s componentsSeparatedByString:@" "];
if (parts.count >= 2)
{
self->httpStatusCode = [parts[1] intValue];
}
continue;
}
NSRange range = [s rangeOfString:@":"];
if (range.location == NSNotFound)
{
continue;
}
NSString* key = [s substringWithRange: (NSRange){.location = 0, .length = range.location}];
NSString* value = [s substringFromIndex:range.location + 1];
[retval setValue:value forKey:key];
}
return retval;
}
-(BOOL) parseHttpHeader
{
if (!httpHeaderNotAvailable)
{
CFTypeRef response = CFReadStreamCopyProperty(stream, kCFStreamPropertyHTTPResponseHeader);
if (response)
{
httpHeaders = (__bridge_transfer NSDictionary*)CFHTTPMessageCopyAllHeaderFields((CFHTTPMessageRef)response);
if (httpHeaders.count == 0)
{
httpHeaderNotAvailable = YES;
}
else
{
self->httpStatusCode = (UInt32)CFHTTPMessageGetResponseStatusCode((CFHTTPMessageRef)response);
}
CFRelease(response);
}
}
if (httpHeaderNotAvailable)
{
if (self->iceHeaderSearchComplete && !self->iceHeaderAvailable)
{
return YES;
}
if (!self->iceHeaderSearchComplete)
{
UInt8 byte;
UInt8 terminal1[] = { '\n', '\n' };
UInt8 terminal2[] = { '\r', '\n', '\r', '\n' };
if (iceHeaderData == nil)
{
iceHeaderData = [NSMutableData dataWithCapacity:1024];
}
while (true)
{
if (![self hasBytesAvailable])
{
break;
}
int read = [super readIntoBuffer:&byte withSize:1];
if (read <= 0)
{
break;
}
[iceHeaderData appendBytes:&byte length:read];
if (iceHeaderData.length >= sizeof(terminal1))
{
if (memcmp(&terminal1[0], [self->iceHeaderData bytes] + iceHeaderData.length - sizeof(terminal1), sizeof(terminal1)) == 0)
{
self->iceHeaderAvailable = YES;
self->iceHeaderSearchComplete = YES;
break;
}
}
if (iceHeaderData.length >= sizeof(terminal2))
{
if (memcmp(&terminal2[0], [self->iceHeaderData bytes] + iceHeaderData.length - sizeof(terminal2), sizeof(terminal2)) == 0)
{
self->iceHeaderAvailable = YES;
self->iceHeaderSearchComplete = YES;
break;
}
}
if (iceHeaderData.length >= 4)
{
if (memcmp([self->iceHeaderData bytes], "ICY ", 4) != 0 && memcmp([self->iceHeaderData bytes], "HTTP", 4) != 0)
{
self->iceHeaderAvailable = NO;
self->iceHeaderSearchComplete = YES;
prefixBytes = iceHeaderData;
return YES;
}
}
}
if (!self->iceHeaderSearchComplete)
{
return NO;
}
}
httpHeaders = [self parseIceHeader:self->iceHeaderData];
self->iceHeaderData = nil;
}
if (([httpHeaders objectForKey:@"Accept-Ranges"] ?: [httpHeaders objectForKey:@"accept-ranges"]) != nil)
{
self->supportsSeek = YES;
}
if (self.httpStatusCode == 200)
{
if (seekStart == 0)
{
id value = [httpHeaders objectForKey:@"Content-Length"] ?: [httpHeaders objectForKey:@"content-length"];
fileLength = (SInt64)[value longLongValue];
}
NSString* contentType = [httpHeaders objectForKey:@"Content-Type"] ?: [httpHeaders objectForKey:@"content-type"] ;
AudioFileTypeID typeIdFromMimeType = [STKHTTPDataSource audioFileTypeHintFromMimeType:contentType];
if (typeIdFromMimeType != 0)
{
audioFileTypeHint = typeIdFromMimeType;
}
}
else if (self.httpStatusCode == 206)
{
NSString* contentRange = [httpHeaders objectForKey:@"Content-Range"] ?: [httpHeaders objectForKey:@"content-range"];
NSArray* components = [contentRange componentsSeparatedByString:@"/"];
if (components.count == 2)
{
fileLength = [[components objectAtIndex:1] integerValue];
}
}
else if (self.httpStatusCode == 416)
{
if (self.length >= 0)
{
seekStart = self.length;
}
[self eof];
return NO;
}
else if (self.httpStatusCode >= 300)
{
[self errorOccured];
return NO;
}
return YES;
}
-(void) dataAvailable -(void) dataAvailable
{ {
if (stream == NULL) if (stream == NULL) {
{
return; return;
} }
if (self.httpStatusCode == 0) if (self.httpStatusCode == 0)
{ {
if ([self parseHttpHeader]) CFTypeRef response = CFReadStreamCopyProperty(stream, kCFStreamPropertyHTTPResponseHeader);
{
if ([self hasBytesAvailable])
{
[super dataAvailable];
}
return; if (response)
}
else
{ {
return; httpHeaders = (__bridge_transfer NSDictionary*)CFHTTPMessageCopyAllHeaderFields((CFHTTPMessageRef)response);
self->httpStatusCode = (UInt32)CFHTTPMessageGetResponseStatusCode((CFHTTPMessageRef)response);
CFRelease(response);
} }
if (self.httpStatusCode == 200)
{
if (seekStart == 0)
{
fileLength = (SInt64)[[httpHeaders objectForKey:@"Content-Length"] longLongValue];
}
NSString* contentType = [httpHeaders objectForKey:@"Content-Type"];
AudioFileTypeID typeIdFromMimeType = [STKHTTPDataSource audioFileTypeHintFromMimeType:contentType];
if (typeIdFromMimeType != 0)
{
audioFileTypeHint = typeIdFromMimeType;
}
}
else if (self.httpStatusCode == 206)
{
NSString* contentRange = [httpHeaders objectForKey:@"Content-Range"];
NSArray* components = [contentRange componentsSeparatedByString:@"/"];
if (components.count == 2)
{
fileLength = [[components objectAtIndex:1] integerValue];
}
}
else if (self.httpStatusCode == 416)
{
if (self.length >= 0)
{
seekStart = self.length;
}
[self eof];
return;
}
else if (self.httpStatusCode >= 300)
{
[self errorOccured];
return;
}
} }
else
{ [super dataAvailable];
[super dataAvailable];
}
} }
-(SInt64) position -(SInt64) position
@ -411,7 +223,7 @@
eventsRunLoop = savedEventsRunLoop; eventsRunLoop = savedEventsRunLoop;
[self seekToOffset:self->supportsSeek ? self.position : 0]; [self seekToOffset:self.position];
} }
-(void) seekToOffset:(SInt64)offset -(void) seekToOffset:(SInt64)offset
@ -430,43 +242,17 @@
self->isInErrorState = NO; self->isInErrorState = NO;
if (!self->supportsSeek && offset != self->relativePosition)
{
return;
}
[self openForSeek:YES]; [self openForSeek:YES];
} }
-(int) readIntoBuffer:(UInt8*)buffer withSize:(int)size -(int) readIntoBuffer:(UInt8*)buffer withSize:(int)size
{
return [self privateReadIntoBuffer:buffer withSize:size];
}
-(int) privateReadIntoBuffer:(UInt8*)buffer withSize:(int)size
{ {
if (size == 0) if (size == 0)
{ {
return 0; return 0;
} }
if (prefixBytes != nil) int read = (int)CFReadStreamRead(stream, buffer, size);
{
int count = MIN(size, (int)prefixBytes.length - prefixBytesRead);
[prefixBytes getBytes:buffer length:count];
prefixBytesRead += count;
if (prefixBytesRead >= prefixBytes.length)
{
prefixBytes = nil;
}
return count;
}
int read = [super readIntoBuffer:buffer withSize:size];
if (read < 0) if (read < 0)
{ {
@ -506,23 +292,13 @@
CFHTTPMessageRef message = CFHTTPMessageCreateRequest(NULL, (CFStringRef)@"GET", (__bridge CFURLRef)self->currentUrl, kCFHTTPVersion1_1); CFHTTPMessageRef message = CFHTTPMessageCreateRequest(NULL, (CFStringRef)@"GET", (__bridge CFURLRef)self->currentUrl, kCFHTTPVersion1_1);
if (seekStart > 0 && supportsSeek) if (seekStart > 0)
{ {
CFHTTPMessageSetHeaderFieldValue(message, CFSTR("Range"), (__bridge CFStringRef)[NSString stringWithFormat:@"bytes=%lld-", seekStart]); CFHTTPMessageSetHeaderFieldValue(message, CFSTR("Range"), (__bridge CFStringRef)[NSString stringWithFormat:@"bytes=%lld-", seekStart]);
discontinuous = YES; discontinuous = YES;
} }
for (NSString* key in self->requestHeaders)
{
NSString* value = [self->requestHeaders objectForKey:key];
CFHTTPMessageSetHeaderFieldValue(message, (__bridge CFStringRef)key, (__bridge CFStringRef)value);
}
CFHTTPMessageSetHeaderFieldValue(message, CFSTR("Accept"), CFSTR("*/*"));
CFHTTPMessageSetHeaderFieldValue(message, CFSTR("Ice-MetaData"), CFSTR("0"));
stream = CFReadStreamCreateForHTTPRequest(NULL, message); stream = CFReadStreamCreateForHTTPRequest(NULL, message);
if (stream == nil) if (stream == nil)
@ -534,8 +310,6 @@
return; return;
} }
CFReadStreamSetProperty(stream, (__bridge CFStringRef)NSStreamNetworkServiceTypeBackground, (__bridge CFStringRef)NSStreamNetworkServiceTypeBackground);
if (!CFReadStreamSetProperty(stream, kCFStreamPropertyHTTPShouldAutoredirect, kCFBooleanTrue)) if (!CFReadStreamSetProperty(stream, kCFStreamPropertyHTTPShouldAutoredirect, kCFBooleanTrue))
{ {
CFRelease(message); CFRelease(message);
@ -546,15 +320,20 @@
} }
// Proxy support // Proxy support
CFDictionaryRef proxySettings = CFNetworkCopySystemProxySettings(); CFDictionaryRef proxySettings = CFNetworkCopySystemProxySettings();
CFReadStreamSetProperty(stream, kCFStreamPropertyHTTPProxy, proxySettings); CFReadStreamSetProperty(stream, kCFStreamPropertyHTTPProxy, proxySettings);
CFRelease(proxySettings); CFRelease(proxySettings);
// SSL support // SSL support
if ([self->currentUrl.scheme caseInsensitiveCompare:@"https"] == NSOrderedSame) if ([self->currentUrl.scheme caseInsensitiveCompare:@"https"] == NSOrderedSame)
{ {
NSDictionary* sslSettings = [NSDictionary dictionaryWithObjectsAndKeys: NSDictionary* sslSettings = [NSDictionary dictionaryWithObjectsAndKeys:
(NSString*)kCFStreamSocketSecurityLevelNegotiatedSSL, kCFStreamSSLLevel, (NSString*)kCFStreamSocketSecurityLevelNegotiatedSSL, kCFStreamSSLLevel,
[NSNumber numberWithBool:YES], kCFStreamSSLAllowsExpiredCertificates,
[NSNumber numberWithBool:YES], kCFStreamSSLAllowsExpiredRoots,
[NSNumber numberWithBool:YES], kCFStreamSSLAllowsAnyRoot,
[NSNumber numberWithBool:NO], kCFStreamSSLValidatesCertificateChain, [NSNumber numberWithBool:NO], kCFStreamSSLValidatesCertificateChain,
[NSNull null], kCFStreamSSLPeerName, [NSNull null], kCFStreamSSLPeerName,
nil]; nil];
@ -567,6 +346,7 @@
self->httpStatusCode = 0; self->httpStatusCode = 0;
// Open // Open
if (!CFReadStreamOpen(stream)) if (!CFReadStreamOpen(stream))
{ {
CFRelease(stream); CFRelease(stream);
@ -600,9 +380,4 @@
return [NSString stringWithFormat:@"HTTP data source with file length: %lld and position: %lld", self.length, self.position]; return [NSString stringWithFormat:@"HTTP data source with file length: %lld and position: %lld", self.length, self.position];
} }
-(BOOL) supportsSeek
{
return self->supportsSeek;
}
@end @end

View File

@ -34,14 +34,10 @@
#import "STKCoreFoundationDataSource.h" #import "STKCoreFoundationDataSource.h"
NS_ASSUME_NONNULL_BEGIN
@interface STKLocalFileDataSource : STKCoreFoundationDataSource @interface STKLocalFileDataSource : STKCoreFoundationDataSource
+(AudioFileTypeID) audioFileTypeHintFromFileExtension:(NSString*)fileExtension; +(AudioFileTypeID) audioFileTypeHintFromFileExtension:(NSString*)fileExtension;
@property (readonly, copy) NSString* filePath; @property (readonly, copy) NSString* filePath;
-(instancetype) initWithFilePath:(NSString*)filePath; -(id) initWithFilePath:(NSString*)filePath;
@end @end
NS_ASSUME_NONNULL_END

View File

@ -47,13 +47,13 @@
@implementation STKLocalFileDataSource @implementation STKLocalFileDataSource
@synthesize filePath; @synthesize filePath;
-(instancetype) initWithFilePath:(NSString*)filePathIn -(id) initWithFilePath:(NSString*)filePathIn
{ {
if (self = [super init]) if (self = [super init])
{ {
self.filePath = filePathIn; self.filePath = filePathIn;
audioFileTypeHint = [[self class] audioFileTypeHintFromFileExtension:filePathIn.pathExtension]; audioFileTypeHint = [STKLocalFileDataSource audioFileTypeHintFromFileExtension:filePathIn.pathExtension];
} }
return self; return self;
@ -200,18 +200,6 @@
[self open]; [self open];
} }
if (stream == 0)
{
CFRunLoopPerformBlock(eventsRunLoop.getCFRunLoop, NSRunLoopCommonModes, ^
{
[self errorOccured];
});
CFRunLoopWakeUp(eventsRunLoop.getCFRunLoop);
return;
}
if (CFReadStreamSetProperty(stream, kCFStreamPropertyFileCurrentOffset, (__bridge CFTypeRef)[NSNumber numberWithLongLong:offset]) != TRUE) if (CFReadStreamSetProperty(stream, kCFStreamPropertyFileCurrentOffset, (__bridge CFTypeRef)[NSNumber numberWithLongLong:offset]) != TRUE)
{ {
position = 0; position = 0;

7
StreamingKit/StreamingKit/STKQueueEntry.h Executable file → Normal file
View File

@ -10,8 +10,6 @@
#import "libkern/OSAtomic.h" #import "libkern/OSAtomic.h"
#import "AudioToolbox/AudioToolbox.h" #import "AudioToolbox/AudioToolbox.h"
NS_ASSUME_NONNULL_BEGIN
@interface STKQueueEntry : NSObject @interface STKQueueEntry : NSObject
{ {
@public @public
@ -30,14 +28,13 @@ NS_ASSUME_NONNULL_BEGIN
volatile int processedPacketsCount; volatile int processedPacketsCount;
volatile int processedPacketsSizeTotal; volatile int processedPacketsSizeTotal;
AudioStreamBasicDescription audioStreamBasicDescription; AudioStreamBasicDescription audioStreamBasicDescription;
double durationHint;
} }
@property (readonly) UInt64 audioDataLengthInBytes; @property (readonly) UInt64 audioDataLengthInBytes;
@property (readwrite, retain) NSObject* queueItemId; @property (readwrite, retain) NSObject* queueItemId;
@property (readwrite, retain) STKDataSource* dataSource; @property (readwrite, retain) STKDataSource* dataSource;
-(instancetype) initWithDataSource:(STKDataSource*)dataSource andQueueItemId:(NSObject*)queueItemId; -(id) initWithDataSource:(STKDataSource*)dataSource andQueueItemId:(NSObject*)queueItemId;
-(void) reset; -(void) reset;
-(double) duration; -(double) duration;
@ -46,5 +43,3 @@ NS_ASSUME_NONNULL_BEGIN
-(BOOL) isDefinitelyCompatible:(AudioStreamBasicDescription*)basicDescription; -(BOOL) isDefinitelyCompatible:(AudioStreamBasicDescription*)basicDescription;
@end @end
NS_ASSUME_NONNULL_END

11
StreamingKit/StreamingKit/STKQueueEntry.m Executable file → Normal file
View File

@ -14,16 +14,13 @@
@implementation STKQueueEntry @implementation STKQueueEntry
-(instancetype) initWithDataSource:(STKDataSource*)dataSourceIn andQueueItemId:(NSObject*)queueItemIdIn -(id) initWithDataSource:(STKDataSource*)dataSourceIn andQueueItemId:(NSObject*)queueItemIdIn
{ {
if (self = [super init]) if (self = [super init])
{ {
self->spinLock = OS_SPINLOCK_INIT;
self.dataSource = dataSourceIn; self.dataSource = dataSourceIn;
self.queueItemId = queueItemIdIn; self.queueItemId = queueItemIdIn;
self->lastFrameQueued = -1; self->lastFrameQueued = -1;
self->durationHint = dataSourceIn.durationHint;
} }
return self; return self;
@ -46,7 +43,7 @@
{ {
if (processedPacketsCount > STK_BIT_RATE_ESTIMATION_MIN_PACKETS_PREFERRED || (audioStreamBasicDescription.mBytesPerFrame == 0 && processedPacketsCount > STK_BIT_RATE_ESTIMATION_MIN_PACKETS_MIN)) if (processedPacketsCount > STK_BIT_RATE_ESTIMATION_MIN_PACKETS_PREFERRED || (audioStreamBasicDescription.mBytesPerFrame == 0 && processedPacketsCount > STK_BIT_RATE_ESTIMATION_MIN_PACKETS_MIN))
{ {
double averagePacketByteSize = (double)processedPacketsSizeTotal / (double)processedPacketsCount; double averagePacketByteSize = processedPacketsSizeTotal / processedPacketsCount;
retval = averagePacketByteSize / packetDuration * 8; retval = averagePacketByteSize / packetDuration * 8;
@ -61,8 +58,6 @@
-(double) duration -(double) duration
{ {
if (durationHint > 0.0) return durationHint;
if (self->sampleRate <= 0) if (self->sampleRate <= 0)
{ {
return 0; return 0;
@ -110,7 +105,7 @@
-(Float64) progressInFrames -(Float64) progressInFrames
{ {
OSSpinLockLock(&self->spinLock); OSSpinLockLock(&self->spinLock);
Float64 retval = (self->seekTime * self->audioStreamBasicDescription.mSampleRate) + self->framesPlayed; Float64 retval = self->seekTime + self->framesPlayed;
OSSpinLockUnlock(&self->spinLock); OSSpinLockUnlock(&self->spinLock);
return retval; return retval;

View File

@ -7,7 +7,7 @@
<key>CFBundleExecutable</key> <key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string> <string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key> <key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string> <string>com.abstractpath.${PRODUCT_NAME:rfc1034identifier}</string>
<key>CFBundleInfoDictionaryVersion</key> <key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string> <string>6.0</string>
<key>CFBundlePackageType</key> <key>CFBundlePackageType</key>

View File

@ -7,7 +7,7 @@
<key>CFBundleExecutable</key> <key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string> <string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key> <key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string> <string>abstractpath.com.${PRODUCT_NAME:rfc1034identifier}</string>
<key>CFBundleInfoDictionaryVersion</key> <key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string> <string>6.0</string>
<key>CFBundlePackageType</key> <key>CFBundlePackageType</key>