Commit f02ca2ec cgx

引入FreeStreamer,完成播放进度条、播放按钮、单节播放、单曲循环、顺序播放功能

1 个父辈 45be73fe
正在显示 76 个修改的文件 包含 12370 行增加21 行删除
......@@ -109,6 +109,7 @@
D09D0E96280D3FE9008DEDAB /* NSDate+Extras.m in Sources */ = {isa = PBXBuildFile; fileRef = D09D0E95280D3FE9008DEDAB /* NSDate+Extras.m */; };
D09D0E9A280D507F008DEDAB /* ProfileAlertView.m in Sources */ = {isa = PBXBuildFile; fileRef = D09D0E99280D507F008DEDAB /* ProfileAlertView.m */; };
D09D0E9D280D73B6008DEDAB /* InviteController.m in Sources */ = {isa = PBXBuildFile; fileRef = D09D0E9C280D73B6008DEDAB /* InviteController.m */; };
D0AE1E3528281B6F008CEF27 /* TimerProxy.m in Sources */ = {isa = PBXBuildFile; fileRef = D0AE1E3428281B6F008CEF27 /* TimerProxy.m */; };
D0AEFE79281781CF00230DC6 /* MyFeedModel.m in Sources */ = {isa = PBXBuildFile; fileRef = D0AEFE78281781CF00230DC6 /* MyFeedModel.m */; };
D0AEFE7C2817D13400230DC6 /* UITableViewCell+CardRadius.m in Sources */ = {isa = PBXBuildFile; fileRef = D0AEFE7A2817D13400230DC6 /* UITableViewCell+CardRadius.m */; };
D0AEFE812817DD1500230DC6 /* MyFeedCell.m in Sources */ = {isa = PBXBuildFile; fileRef = D0AEFE7E2817DD1500230DC6 /* MyFeedCell.m */; };
......@@ -369,6 +370,8 @@
D09D0E99280D507F008DEDAB /* ProfileAlertView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ProfileAlertView.m; sourceTree = "<group>"; };
D09D0E9B280D73B6008DEDAB /* InviteController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = InviteController.h; sourceTree = "<group>"; };
D09D0E9C280D73B6008DEDAB /* InviteController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = InviteController.m; sourceTree = "<group>"; };
D0AE1E3328281B6F008CEF27 /* TimerProxy.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = TimerProxy.h; sourceTree = "<group>"; };
D0AE1E3428281B6F008CEF27 /* TimerProxy.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = TimerProxy.m; sourceTree = "<group>"; };
D0AEFE77281781CF00230DC6 /* MyFeedModel.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MyFeedModel.h; sourceTree = "<group>"; };
D0AEFE78281781CF00230DC6 /* MyFeedModel.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MyFeedModel.m; sourceTree = "<group>"; };
D0AEFE7A2817D13400230DC6 /* UITableViewCell+CardRadius.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "UITableViewCell+CardRadius.m"; sourceTree = "<group>"; };
......@@ -1063,6 +1066,8 @@
D0F9AC532826563400FD7A3B /* MusicPlayerController.m */,
D0F9AC5C282660CC00FD7A3B /* MusicPlayerView.h */,
D0F9AC5D282660CC00FD7A3B /* MusicPlayerView.m */,
D0AE1E3328281B6F008CEF27 /* TimerProxy.h */,
D0AE1E3428281B6F008CEF27 /* TimerProxy.m */,
);
path = Home;
sourceTree = "<group>";
......@@ -1543,6 +1548,7 @@
D0F808F52803D4E70097899F /* Track.m in Sources */,
D0E65FFC2807A654006562F2 /* NSSet+HYBUnicodeReadable.m in Sources */,
D0C50B3C27FD2EFD00DC68F0 /* PrivacyViewController.m in Sources */,
D0AE1E3528281B6F008CEF27 /* TimerProxy.m in Sources */,
D0FAC41D281B817D00D4B859 /* GKPhotoBrowser.m in Sources */,
D07A4B2A280EA6B600BA0EC0 /* UserInfoTableView.m in Sources */,
D0930F122801124E006B497A /* BaseNaviController.m in Sources */,
......
......@@ -110,9 +110,15 @@
} else {
// 跳转到播放页面
MusicPlayerController *playerVC = [[MusicPlayerController alloc] init];
playerVC.audioModel = model;
playerVC.subAudioArr = self.subAudioArr;
playerVC.currentIndex = indexPath.row;
// 筛选已经解锁的音频
NSMutableArray *tmpArr = [NSMutableArray array];
[self.subAudioArr enumerateObjectsUsingBlock:^(SubAudioModel * obj, NSUInteger idx, BOOL * _Nonnull stop) {
if (obj.is_lock == 0) {
[tmpArr addObject:obj];
}
}];
playerVC.playAudios = [tmpArr copy];
UINavigationController *naviVC = [[UINavigationController alloc] initWithRootViewController:playerVC];
[self presentViewController:naviVC animated:YES completion:nil];
}
......
......@@ -6,14 +6,14 @@
//
#import <UIKit/UIKit.h>
#import "SubAudioModel.h"
NS_ASSUME_NONNULL_BEGIN
/// 音频播放界面
@interface MusicPlayerController : UIViewController
@property (nonatomic, strong) SubAudioModel *audioModel;
@property (nonatomic, strong) NSArray *subAudioArr;
/// 解锁的音频列表
@property (nonatomic, strong) NSArray *playAudios;
/// 当前播放音频下标
@property (nonatomic, assign) NSInteger currentIndex;
@end
......
......@@ -7,9 +7,18 @@
#import "MusicPlayerController.h"
#import "MusicPlayerView.h"
#import <FSAudioController.h>
#import "TimerProxy.h"
#import "SubAudioModel.h"
@interface MusicPlayerController ()
@interface MusicPlayerController () <MusicPlayerViewDelegate>
@property (nonatomic, strong) MusicPlayerView *playerView;
/// AudioStream 播放器
@property (nonatomic, strong) FSAudioStream *audioStream;
/// 播放进度定时器
@property (nonatomic, strong) CADisplayLink *progressTimer;
/// 是否正在拖动滑块
@property (nonatomic, assign) BOOL isDraging;
@end
@implementation MusicPlayerController
......@@ -21,7 +30,142 @@
- (void)viewDidLoad {
[super viewDidLoad];
[self.playerView updatePlayerView:self.audioModel];
if (self.currentIndex >= self.playAudios.count) { return; }
SubAudioModel *currentAudioModel = self.playAudios[self.currentIndex];
[self.playerView updatePlayerView:currentAudioModel];
WS(weakSelf);
[self.audioStream playFromURL:[NSURL URLWithString:currentAudioModel.audio_url]];
[self.audioStream setOnStateChange:^(FSAudioStreamState state) {
weakSelf.playerView.isPlaying = state == kFsAudioStreamPlaying;
switch (state) {
case kFsAudioStreamRetrievingURL:
[UIApplication sharedApplication].networkActivityIndicatorVisible = YES;
DSLog(@"retrieving URL -- 检索文件");
break;
case kFsAudioStreamStopped:
[UIApplication sharedApplication].networkActivityIndicatorVisible = NO;
DSLog(@"kFsAudioStreamStopped --- 停止播放了 ");
break;
case kFsAudioStreamBuffering: {
[UIApplication sharedApplication].networkActivityIndicatorVisible = YES;
DSLog(@"buffering --- 缓存中");
break;
}
case kFsAudioStreamPaused:
DSLog(@"暂停了");
break;
case kFsAudioStreamSeeking:
[UIApplication sharedApplication].networkActivityIndicatorVisible = YES;
DSLog(@"kFsAudioStreamSeeking -- 快进 或者 快退");
break;
case kFsAudioStreamPlaying:
[UIApplication sharedApplication].networkActivityIndicatorVisible = NO;
DSLog(@"播放ing -- ");
break;
case kFsAudioStreamFailed:
[UIApplication sharedApplication].networkActivityIndicatorVisible = NO;
DSLog(@"音频文件加载失败");
break;
case kFsAudioStreamPlaybackCompleted:
[weakSelf audioStreamPlaybackCompleted];
break;
case kFsAudioStreamRetryingStarted:
DSLog(@"回放失败");
break;
case kFsAudioStreamRetryingSucceeded:
DSLog(@"重试成功");
break;
case kFsAudioStreamRetryingFailed:
DSLog(@"Failed to retry playback -- 重试失败");
break;
default:
break;
}
}];
self.audioStream.onFailure = ^(FSAudioStreamError error, NSString *errorDescription) {
DSLog(@"音频加载失败:%ld", error);
};
TimerProxy *proxy = [TimerProxy proxyWithTarget:self];
self.progressTimer = [CADisplayLink displayLinkWithTarget:proxy selector:@selector(updateProgress)];
[self.progressTimer addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}
- (void)dealloc {
[self.progressTimer invalidate];
self.progressTimer = nil;
}
- (void)updateProgress {
if (self.isDraging == YES) return;
FSStreamPosition cur = self.audioStream.currentTimePlayed;
FSStreamPosition end = self.audioStream.duration;
// 音频播放进度、音频当前播放时间、音频总时间
[self.playerView updateProgress:cur.position currentTime:[NSString stringWithFormat:@"%02i:%02i", cur.minute, cur.second] totalTime:[NSString stringWithFormat:@"%02i:%02i", end.minute, end.second]];
}
- (void)audioStreamPlaybackCompleted {
DSLog(@"kFsAudioStreamPlaybackCompleted -- 回放完成");
[self.progressTimer setPaused:YES];
if (self.playerView.mode == SoundPlayModeSingle) { // 单节播放
// 判断是否有下一首
if (self.currentIndex < self.playAudios.count - 1) {
self.currentIndex++;
SubAudioModel *audioModel = self.playAudios[self.currentIndex];
[self.playerView updatePlayerView:audioModel];
[self.audioStream playFromURL:[NSURL URLWithString:audioModel.audio_url]];
[self.progressTimer setPaused:NO];
} else {
FSStreamPosition pos = {0};
pos.position = 0;
[self.audioStream seekToPosition:pos];
FSStreamPosition end = self.audioStream.duration;
[self.playerView updateProgress:0 currentTime:@"00:00" totalTime:[NSString stringWithFormat:@"%02i:%02i", end.minute, end.second]];
}
} else if (self.playerView.mode == SoundPlayModeCycle) { // 单曲循环
[self.audioStream play];
[self.progressTimer setPaused:NO];
} else if (self.playerView.mode == SoundPlayModeOrder) { // 顺序播放
// 获取下一首需要播放的音频
if (self.currentIndex + 1 >= self.playAudios.count) {
self.currentIndex = 0;
} else {
self.currentIndex++;
}
SubAudioModel *audioModel = self.playAudios[self.currentIndex];
[self.playerView updatePlayerView:audioModel];
[self.audioStream playFromURL:[NSURL URLWithString:audioModel.audio_url]];
[self.progressTimer setPaused:NO];
}
}
#pragma mark - MusicPlayerViewDelegate
- (void)didSliderTouchBegan:(float)value {
self.isDraging = YES;
}
- (void)didSliderTouchEnded:(float)value {
self.isDraging = NO;
// 避免用户拖动进度条出现极值导致音频文件加载失败问题
if (value == 0) value = 0.001;
if (value == 1) value = 0.999;
FSStreamPosition pos = {0};
pos.position = value;
[self.audioStream seekToPosition:pos];
}
- (void)didSliderValueChange:(float)value {
self.isDraging = YES;
}
- (void)didClickPlayBtn {
// 播放和暂停是同一个方法
[self.audioStream pause];
}
#pragma mark - 隐藏导航栏
......@@ -38,8 +182,18 @@
- (MusicPlayerView *)playerView {
if (!_playerView) {
_playerView = [MusicPlayerView new];
_playerView.delegate = self;
}
return _playerView;
}
- (FSAudioStream *)audioStream {
if (_audioStream == nil) {
_audioStream = [[FSAudioStream alloc] init];
_audioStream.strictContentTypeChecking = NO;
_audioStream.defaultContentType = @"audio/mpeg";
}
return _audioStream;
}
@end
......@@ -16,11 +16,35 @@ typedef NS_ENUM(NSInteger, SoundPlayMode) {
NS_ASSUME_NONNULL_BEGIN
@protocol MusicPlayerViewDelegate <NSObject>
- (void)didSliderTouchBegan:(float)value;
- (void)didSliderTouchEnded:(float)value;
- (void)didSliderValueChange:(float)value;
/// 点击播放按钮事件
- (void)didClickPlayBtn;
@end
/// 音频播放页面
@interface MusicPlayerView : UIView
@property (nonatomic, weak) id<MusicPlayerViewDelegate> delegate;
/// 播放状态
@property (nonatomic, assign) BOOL isPlaying;
/// 音频播放模式
@property (nonatomic, assign) SoundPlayMode mode;
/// 更新播放界面标题和图片
/// @param model model
- (void)updatePlayerView:(SubAudioModel *)model;
/// 更新进度条
/// @param progress progress
/// @param currentTime 当前时间
/// @param totalTime 总时间
- (void)updateProgress:(float)progress currentTime:(NSString *)currentTime totalTime:(NSString *)totalTime;
@end
NS_ASSUME_NONNULL_END
......@@ -122,6 +122,12 @@
self.audioNameLab.text = model.audio_name;
}
- (void)updateProgress:(float)progress currentTime:(NSString *)currentTime totalTime:(NSString *)totalTime {
self.progressV.value = progress;
self.proLeftLb.text = currentTime;
self.proRightLb.text = totalTime;
}
- (void)circelBtnClick:(UIButton *)sender {
SoundPlayMode mode = sender.tag;
NSString *title = sender.titleLabel.text;
......@@ -148,15 +154,57 @@
[sender dk_setImage:DKImagePickerWithNames(normalImgName, dkImgName, normalImgName) forState:UIControlStateNormal];
}
#pragma mark - 滑块事件
- (void)progresssBtnClick:(UISlider *)sender {
DSLog(@"progresssBtnClick:%f", sender.value);
if (self.delegate && [self.delegate respondsToSelector:@selector(didSliderValueChange:)]) {
[self.delegate didSliderValueChange:sender.value];
}
}
- (void)sliderTouchDown:(UISlider *)sender {
DSLog(@"sliderTouchDown");
_tapGesture.enabled = NO;
if (self.delegate && [self.delegate respondsToSelector:@selector(didSliderTouchBegan:)]) {
[self.delegate didSliderTouchBegan:sender.value];
}
}
- (void)sliderTouchUpInSide:(UISlider *)sender {
DSLog(@"sliderTouchUpInSide");
_tapGesture.enabled = YES;
if (self.delegate && [self.delegate respondsToSelector:@selector(didSliderTouchEnded:)]) {
[self.delegate didSliderTouchEnded:sender.value];
}
}
- (void)playerBtnClick:(UIButton *)sender {
if (self.delegate && [self.delegate respondsToSelector:@selector(didClickPlayBtn)]) {
[self.delegate didClickPlayBtn];
}
}
#pragma mark - UIGestureRecognizerDelegate
- (void)actionTapGesture:(UITapGestureRecognizer *)sender {
CGPoint touchPoint = [sender locationInView:self.progressV];
CGFloat value = (self.progressV.maximumValue - self.progressV.minimumValue) * (touchPoint.x / self.progressV.width);
float value = (self.progressV.maximumValue - self.progressV.minimumValue) * (touchPoint.x / self.progressV.width);
[self.progressV setValue:value animated:YES];
if (self.delegate && [self.delegate respondsToSelector:@selector(didSliderTouchEnded:)]) {
[self.delegate didSliderTouchEnded:value];
}
DSLog(@"actionTapGestureactionTapGesture:%f", value);
}
#pragma mark - public
- (void)setIsPlaying:(BOOL)isPlaying {
_isPlaying = isPlaying;
self.playerBtn.selected = isPlaying;
}
- (SoundPlayMode)mode {
return self.circleBtn.tag;
}
#pragma mark - lazy
......@@ -195,6 +243,8 @@
_progressV.dk_maximumTrackTintColorPicker = DKColorPickerWithColors(ColorFromHex(0xE3E1E1), ColorFromHex(0x131724), DSWhite);
[_progressV setThumbImage:[UIImage imageNamed:@"muse_slider_thumbImage"] forState:UIControlStateNormal];
[_progressV addTarget:self action:@selector(progresssBtnClick:) forControlEvents:UIControlEventValueChanged];
[_progressV addTarget:self action:@selector(sliderTouchDown:) forControlEvents:UIControlEventTouchDown];
[_progressV addTarget:self action:@selector(sliderTouchUpInSide:) forControlEvents:UIControlEventTouchUpInside];
// 为UISlider添加点击事件
_tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(actionTapGesture:)];
......@@ -226,7 +276,8 @@
if (!_playerBtn) {
_playerBtn = [UIButton new];
[_playerBtn addTarget:self action:@selector(playerBtnClick:) forControlEvents:UIControlEventTouchUpInside];
[_playerBtn dk_setBackgroundImage:DKImagePickerWithNames(@"audio_play_icon", @"dk_audio_play_icon", DSWhite) forState:UIControlStateNormal];
[_playerBtn dk_setBackgroundImage:DKImagePickerWithNames(@"audio_play_icon", @"dk_audio_play_icon", @"dk_audio_play_icon") forState:UIControlStateNormal];
[_playerBtn dk_setBackgroundImage:DKImagePickerWithNames(@"audio_pause", @"dk_audio_pause", @"dk_audio_pause") forState:UIControlStateSelected];
}
return _playerBtn;
}
......
//
// TimerProxy.h
// DreamSleep
//
// Created by peter on 2022/5/8.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface TimerProxy : NSProxy
+ (instancetype)proxyWithTarget:(id)target;
@end
NS_ASSUME_NONNULL_END
//
// TimerProxy.m
// DreamSleep
//
// Created by peter on 2022/5/8.
//
#import "TimerProxy.h"
@interface TimerProxy ()
@property(nonatomic, weak) id target;
@end
@implementation TimerProxy
+ (instancetype)proxyWithTarget:(id)target {
// 注意:没有init方法
TimerProxy *proxy = [TimerProxy alloc];
proxy.target = target;
return proxy;
}
// NSProxy接收到消息会自动进入到调用这个方法进入消息转发流程
- (nullable NSMethodSignature *)methodSignatureForSelector:(SEL)sel {
return [self.target methodSignatureForSelector:sel];
}
- (void)forwardInvocation:(NSInvocation *)invocation {
[invocation invokeWithTarget:self.target];
}
@end
......@@ -12,6 +12,7 @@ target 'DreamSleep' do
pod 'YYWebImage', '~> 1.0.5'
pod 'YYImage/WebP'
pod 'YYModel', '~> 1.0.4'
pod 'FreeStreamer', '~> 4.0.0'
end
# AFNetworking (4.0.1)
......@@ -26,3 +27,4 @@ end
# YYImage/WebP(模拟器上目前无法运行)
# YYModel (1.0.4)
# SDWebImage (5.12.5)(去掉)
# FreeStreamer(4.0.0)
......@@ -20,10 +20,13 @@ PODS:
- DKNightVersion/UIKit (2.4.3):
- DKNightVersion/Core
- DOUAudioStreamer (0.2.16)
- FreeStreamer (4.0.0):
- Reachability (~> 3.0)
- lottie-ios (2.5.3)
- Masonry (1.1.0)
- MBProgressHUD (1.2.0)
- MJRefresh (3.7.5)
- Reachability (3.2)
- YTKNetwork (3.0.6):
- AFNetworking/NSURLSession (~> 4.0)
- YYCache (1.0.4)
......@@ -40,6 +43,7 @@ PODS:
DEPENDENCIES:
- DKNightVersion (~> 2.4.3)
- DOUAudioStreamer (~> 0.2.16)
- FreeStreamer (~> 4.0.0)
- lottie-ios (~> 2.5.3)
- Masonry (~> 1.1.0)
- MBProgressHUD (~> 1.2.0)
......@@ -54,10 +58,12 @@ SPEC REPOS:
- AFNetworking
- DKNightVersion
- DOUAudioStreamer
- FreeStreamer
- lottie-ios
- Masonry
- MBProgressHUD
- MJRefresh
- Reachability
- YTKNetwork
- YYCache
- YYImage
......@@ -68,16 +74,18 @@ SPEC CHECKSUMS:
AFNetworking: 7864c38297c79aaca1500c33288e429c3451fdce
DKNightVersion: eaa80cc4014b4bae7d4b535fd87ecc6a3c2767b3
DOUAudioStreamer: c503ba2ecb9a54ff7bda0eff66963ad224f3c7dc
FreeStreamer: 7e9c976045701ac2f7e9c14c17245203c37bf2ea
lottie-ios: a50d5c0160425cd4b01b852bb9578963e6d92d31
Masonry: 678fab65091a9290e40e2832a55e7ab731aad201
MBProgressHUD: 3ee5efcc380f6a79a7cc9b363dd669c5e1ae7406
MJRefresh: fdf5e979eb406a0341468932d1dfc8b7f9fce961
Reachability: 33e18b67625424e47b6cde6d202dce689ad7af96
YTKNetwork: c16be90b06be003de9e9cd0d3b187cc8eaf35c04
YYCache: 8105b6638f5e849296c71f331ff83891a4942952
YYImage: 1e1b62a9997399593e4b9c4ecfbbabbf1d3f3b54
YYModel: 2a7fdd96aaa4b86a824e26d0c517de8928c04b30
YYWebImage: 5f7f36aee2ae293f016d418c7d6ba05c4863e928
PODFILE CHECKSUM: 5f273d0f03f58db41d7f0a6d3d96a8bd054ab744
PODFILE CHECKSUM: d78d9f7fd55a2a7be3fae24d212bdd5eab78666c
COCOAPODS: 1.11.3
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import <Foundation/Foundation.h>
#include "FSAudioStream.h"
@class FSCheckContentTypeRequest;
@class FSParsePlaylistRequest;
@class FSParseRssPodcastFeedRequest;
@class FSPlaylistItem;
@protocol FSAudioControllerDelegate;
/**
* FSAudioController is functionally equivalent to FSAudioStream with
* one addition: it can be directly fed with a playlist (PLS, M3U) URL
* or an RSS podcast feed. It determines the content type and forms
* a playlist for playback. Notice that this generates more traffic and
* is generally more slower than using an FSAudioStream directly.
*
* It is also possible to construct a playlist by yourself by providing
* the playlist items. In this case see the methods for managing the playlist.
*
* If you have a playlist with multiple items, FSAudioController attemps
* automatically preload the next item in the playlist. This helps to
* start the next item playback immediately without the need for the
* user to wait for buffering.
*
* Notice that do not attempt to set your own blocks to the audio stream
* owned by the controller. FSAudioController uses the blocks internally
* and any user set blocks will be overwritten. Instead use the blocks
* offered by FSAudioController.
*/
@interface FSAudioController : NSObject {
NSURL *_url;
NSMutableArray *_streams;
float _volume;
BOOL _readyToPlay;
FSCheckContentTypeRequest *_checkContentTypeRequest;
FSParsePlaylistRequest *_parsePlaylistRequest;
FSParseRssPodcastFeedRequest *_parseRssPodcastFeedRequest;
void (^_onStateChangeBlock)(FSAudioStreamState);
void (^_onMetaDataAvailableBlock)(NSDictionary*);
void (^_onFailureBlock)(FSAudioStreamError error, NSString *errorDescription);
}
/**
* Initializes the audio stream with an URL.
*
* @param url The URL from which the stream data is retrieved.
*/
- (id)initWithUrl:(NSURL *)url;
/**
* Starts playing the stream. Before the playback starts,
* the URL content type is checked and playlists resolved.
*/
- (void)play;
/**
* Starts playing the stream from an URL. Before the playback starts,
* the URL content type is checked and playlists resolved.
*
* @param url The URL from which the stream data is retrieved.
*/
- (void)playFromURL:(NSURL *)url;
/**
* Starts playing the stream from the given playlist. Each item in the array
* must an FSPlaylistItem.
*
* @param playlist The playlist items.
*/
- (void)playFromPlaylist:(NSArray *)playlist;
/**
* Starts playing the stream from the given playlist. Each item in the array
* must an FSPlaylistItem. The playback starts from the given index
* in the playlist.
*
* @param playlist The playlist items.
* @param index The playlist index where to start playback from.
*/
- (void)playFromPlaylist:(NSArray *)playlist itemIndex:(NSUInteger)index;
/**
* Plays a playlist item at the specified index.
*
* @param index The playlist index where to start playback from.
*/
- (void)playItemAtIndex:(NSUInteger)index;
/**
* Returns the count of playlist items.
*/
- (NSUInteger)countOfItems;
/**
* Adds an item to the playlist.
*
* @param item The playlist item to be added.
*/
- (void)addItem:(FSPlaylistItem *)item;
/**
* Adds an item to the playlist at a specific position.
*
* @param item The playlist item to be added.
* @param index The location in the playlist to place the new item
*/
- (void)insertItem:(FSPlaylistItem *)item atIndex:(NSInteger)index;
/**
* Moves an item already in the playlist to a different position in the playlist
*
* @param from The original index of the track to move
* @param to The destination of the the track at the index specified in `from`
*/
- (void)moveItemAtIndex:(NSUInteger)from toIndex:(NSUInteger)to;
/**
* Replaces a playlist item.
*
* @param index The index of the playlist item to be replaced.
* @param item The playlist item used the replace the existing one.
*/
- (void)replaceItemAtIndex:(NSUInteger)index withItem:(FSPlaylistItem *)item;
/**
* Removes a playlist item.
*
* @param index The index of the playlist item to be removed.
*/
- (void)removeItemAtIndex:(NSUInteger)index;
/**
* Stops the stream playback.
*/
- (void)stop;
/**
* If the stream is playing, the stream playback is paused upon calling pause.
* Otherwise (the stream is paused), calling pause will continue the playback.
*/
- (void)pause;
/**
* Returns the playback status: YES if the stream is playing, NO otherwise.
*/
- (BOOL)isPlaying;
/**
* Returns if the current multiple-item playlist has next item
*/
- (BOOL)hasNextItem;
/**
* Returns if the current multiple-item playlist has Previous item
*/
- (BOOL)hasPreviousItem;
/**
* Play the next item of multiple-item playlist
*/
- (void)playNextItem;
/**
* Play the previous item of multiple-item playlist
*/
- (void)playPreviousItem;
/**
* This property holds the current playback volume of the stream,
* from 0.0 to 1.0.
*
* Note that the overall volume is still constrained by the volume
* set by the user! So the actual volume cannot be higher
* than the volume currently set by the user. For example, if
* requesting a volume of 0.5, then the volume will be 50%
* lower than the current playback volume set by the user.
*/
@property (nonatomic,assign) float volume;
/**
* The controller URL.
*/
@property (nonatomic,assign) NSURL *url;
/**
* The the active playing stream, which may change
* from time to time during the playback. In this way, do not
* set your own blocks to the stream but use the blocks
* provides by FSAudioController.
*/
@property (readonly) FSAudioStream *activeStream;
/**
* The playlist item the controller is currently using.
*/
@property (nonatomic,readonly) FSPlaylistItem *currentPlaylistItem;
/**
* This property determines if the next playlist item should be loaded
* automatically. This is YES by default.
*/
@property (nonatomic,assign) BOOL preloadNextPlaylistItemAutomatically;
/**
* This property determines if the debug output is enabled. Disabled
* by default
*/
@property (nonatomic,assign) BOOL enableDebugOutput;
/**
* This property determines if automatic audio session handling is enabled.
* This is YES by default.
*/
@property (nonatomic,assign) BOOL automaticAudioSessionHandlingEnabled;
/**
* This property holds the configuration used for the streaming.
*/
@property (nonatomic,strong) FSStreamConfiguration *configuration;
/**
* Called upon a state change.
*/
@property (copy) void (^onStateChange)(FSAudioStreamState state);
/**
* Called upon a meta data is available.
*/
@property (copy) void (^onMetaDataAvailable)(NSDictionary *metadata);
/**
* Called upon a failure.
*/
@property (copy) void (^onFailure)(FSAudioStreamError error, NSString *errorDescription);
/**
* Delegate.
*/
@property (nonatomic,unsafe_unretained) IBOutlet id<FSAudioControllerDelegate> delegate;
@end
/**
* To check the preloading status, use this delegate.
*/
@protocol FSAudioControllerDelegate <NSObject>
@optional
/**
* Called when the controller wants to start preloading an item. Return YES or NO
* depending if you want this item to be preloaded.
*
* @param audioController The audio controller which is doing the preloading.
* @param stream The stream which is wanted to be preloaded.
*/
- (BOOL)audioController:(FSAudioController *)audioController allowPreloadingForStream:(FSAudioStream *)stream;
/**
* Called when the controller starts to preload an item.
*
* @param audioController The audio controller which is doing the preloading.
* @param stream The stream which is preloaded.
*/
- (void)audioController:(FSAudioController *)audioController preloadStartedForStream:(FSAudioStream *)stream;
@end
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import "FSAudioController.h"
#import "FSPlaylistItem.h"
#import "FSCheckContentTypeRequest.h"
#import "FSParsePlaylistRequest.h"
#import "FSParseRssPodcastFeedRequest.h"
#import <AVFoundation/AVFoundation.h>
/**
* Private interface for FSAudioController.
*/
@interface FSAudioController ()
- (void)notifyRetrievingURL;
@property (readonly) FSAudioStream *audioStream;
@property (readonly) FSCheckContentTypeRequest *checkContentTypeRequest;
@property (readonly) FSParsePlaylistRequest *parsePlaylistRequest;
@property (readonly) FSParseRssPodcastFeedRequest *parseRssPodcastFeedRequest;
@property (nonatomic,assign) BOOL readyToPlay;
@property (nonatomic,assign) NSUInteger currentPlaylistItemIndex;
@property (nonatomic,strong) NSMutableArray *playlistItems;
@property (nonatomic,strong) NSMutableArray *streams;
@property (nonatomic,assign) BOOL needToSetVolume;
@property (nonatomic,assign) BOOL songSwitchInProgress;
@property (nonatomic,assign) float outputVolume;
- (void)audioStreamStateDidChange:(NSNotification *)notification;
- (void)deactivateInactivateStreams:(NSUInteger)currentActiveStream;
- (void)setAudioSessionActive:(BOOL)active;
@end
/**
* Acts as a proxy object for FSAudioStream. Lazily initializes
* the stream when it is needed.
*
* A call to deactivate releases the stream.
*/
@interface FSAudioStreamProxy : NSObject {
FSAudioStream *_audioStream;
}
@property (readonly) FSAudioStream *audioStream;
@property (nonatomic,copy) NSURL *url;
@property (nonatomic,weak) FSAudioController *audioController;
- (void)deactivate;
@end
/*
* =======================================
* FSAudioStreamProxy implementation.
* =======================================
*/
@implementation FSAudioStreamProxy
- (id)init
{
if (self = [super init]) {
}
return self;
}
- (id)initWithAudioController:(FSAudioController *)controller
{
if (self = [self init]) {
self.audioController = controller;
}
return self;
}
- (void)dealloc
{
if (self.audioController.enableDebugOutput) {
NSLog(@"[FSAudioController.m:%i] FSAudioStreamProxy.dealloc: %@", __LINE__, self.url);
}
[self deactivate];
}
- (FSAudioStream *)audioStream
{
if (!_audioStream) {
FSStreamConfiguration *conf;
if (self.audioController.configuration) {
conf = self.audioController.configuration;
} else {
conf = [[FSStreamConfiguration alloc] init];
}
// Disable audio session handling for the audio stream; audio controller handles it
conf.automaticAudioSessionHandlingEnabled = NO;
_audioStream = [[FSAudioStream alloc] initWithConfiguration:conf];
if (self.audioController.needToSetVolume) {
_audioStream.volume = self.audioController.outputVolume;
}
if (self.url) {
_audioStream.url = self.url;
}
}
return _audioStream;
}
- (void)deactivate
{
[_audioStream stop];
_audioStream = nil;
}
@end
/*
* =======================================
* FSAudioController implementation
* =======================================
*/
@implementation FSAudioController
-(id)init
{
if (self = [super init]) {
_url = nil;
_checkContentTypeRequest = nil;
_parsePlaylistRequest = nil;
_readyToPlay = NO;
_playlistItems = [[NSMutableArray alloc] init];
_streams = [[NSMutableArray alloc] init];
self.preloadNextPlaylistItemAutomatically = YES;
self.enableDebugOutput = NO;
self.automaticAudioSessionHandlingEnabled = YES;
self.configuration = [[FSStreamConfiguration alloc] init];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(audioStreamStateDidChange:)
name:FSAudioStreamStateChangeNotification
object:nil];
}
return self;
}
- (id)initWithUrl:(NSURL *)url
{
if (self = [self init]) {
self.url = url;
}
return self;
}
- (void)dealloc
{
[[NSNotificationCenter defaultCenter] removeObserver:self];
[_checkContentTypeRequest cancel];
[_parsePlaylistRequest cancel];
[_parseRssPodcastFeedRequest cancel];
for (FSAudioStreamProxy *proxy in _streams) {
if (self.enableDebugOutput) {
NSLog(@"[FSAudioController.m:%i] dealloc. Deactivating stream %@", __LINE__, proxy.url);
}
[proxy deactivate];
}
[self setAudioSessionActive:NO];
}
- (void)audioStreamStateDidChange:(NSNotification *)notification
{
if (notification.object == self) {
// URL retrieving notification from ourselves, ignore
return;
}
if (!(notification.object == self.audioStream)) {
// This doesn't concern us, return
return;
}
NSDictionary *dict = [notification userInfo];
int state = [[dict valueForKey:FSAudioStreamNotificationKey_State] intValue];
if (state == kFSAudioStreamEndOfFile) {
if (self.enableDebugOutput) {
NSLog(@"[FSAudioController.m:%i] EOF reached for %@", __LINE__, self.audioStream.url);
}
if (!self.preloadNextPlaylistItemAutomatically) {
// No preloading wanted, skip
if (self.enableDebugOutput) {
NSLog(@"[FSAudioController.m:%i] Preloading disabled, return.", __LINE__);
}
return;
}
// Reached EOF for this stream, do we have another item waiting in the playlist?
if ([self hasNextItem]) {
FSAudioStreamProxy *proxy = [_streams objectAtIndex:self.currentPlaylistItemIndex + 1];
FSAudioStream *nextStream = proxy.audioStream;
if (self.enableDebugOutput) {
NSLog(@"[FSAudioController.m:%i] Preloading %@", __LINE__, nextStream.url);
}
if ([self.delegate respondsToSelector:@selector(audioController:allowPreloadingForStream:)]) {
if ([self.delegate audioController:self allowPreloadingForStream:nextStream]) {
[nextStream preload];
} else {
if (self.enableDebugOutput) {
NSLog(@"[FSAudioController.m:%i] Preloading disallowed for stream %@", __LINE__, nextStream.url);
}
}
} else {
// Start preloading the next stream; we can load this as there is no override
[nextStream preload];
}
if ([self.delegate respondsToSelector:@selector(audioController:preloadStartedForStream:)]) {
[self.delegate audioController:self preloadStartedForStream:nextStream];
}
}
} else if (state == kFsAudioStreamStopped && !self.songSwitchInProgress) {
if (self.enableDebugOutput) {
NSLog(@"Stream %@ stopped. No next playlist items. Deactivating audio session", self.audioStream.url);
}
[self setAudioSessionActive:NO];
} else if (state == kFsAudioStreamPlaybackCompleted && [self hasNextItem]) {
self.currentPlaylistItemIndex = self.currentPlaylistItemIndex + 1;
self.songSwitchInProgress = YES;
[self play];
} else if (state == kFsAudioStreamFailed) {
if (self.enableDebugOutput) {
NSLog(@"Stream %@ failed. Deactivating audio session", self.audioStream.url);
}
[self setAudioSessionActive:NO];
} else if (state == kFsAudioStreamBuffering) {
if (self.enableDebugOutput) {
NSLog(@"Stream buffering. Activating audio session");
}
self.songSwitchInProgress = NO;
if (self.automaticAudioSessionHandlingEnabled) {
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 60000)
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
#endif
}
[self setAudioSessionActive:YES];
} else if (state == kFsAudioStreamPlaying) {
self.currentPlaylistItem.audioDataByteCount = self.activeStream.audioDataByteCount;
}
}
- (void)deactivateInactivateStreams:(NSUInteger)currentActiveStream
{
NSUInteger streamIndex = 0;
for (FSAudioStreamProxy *proxy in _streams) {
if (streamIndex != currentActiveStream) {
if (self.enableDebugOutput) {
NSLog(@"[FSAudioController.m:%i] Deactivating stream %@", __LINE__, proxy.url);
}
[proxy deactivate];
}
streamIndex++;
}
}
- (void)setAudioSessionActive:(BOOL)active
{
if (self.automaticAudioSessionHandlingEnabled) {
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 60000)
[[AVAudioSession sharedInstance] setActive:active withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:nil];
#else
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000)
[[AVAudioSession sharedInstance] setActive:active error:nil];
#endif
#endif
}
}
/*
* =======================================
* Properties
* =======================================
*/
- (FSAudioStream *)audioStream
{
FSAudioStream *stream = nil;
if ([_streams count] == 0) {
if (self.enableDebugOutput) {
NSLog(@"[FSAudioController.m:%i] Stream count %lu, creating a proxy object", __LINE__, (unsigned long)[_streams count]);
}
FSAudioStreamProxy *proxy = [[FSAudioStreamProxy alloc] initWithAudioController:self];
[_streams addObject:proxy];
}
FSAudioStreamProxy *proxy = [_streams objectAtIndex:self.currentPlaylistItemIndex];
stream = proxy.audioStream;
return stream;
}
- (FSCheckContentTypeRequest *)checkContentTypeRequest
{
if (!_checkContentTypeRequest) {
__weak FSAudioController *weakSelf = self;
_checkContentTypeRequest = [[FSCheckContentTypeRequest alloc] init];
_checkContentTypeRequest.url = self.url;
_checkContentTypeRequest.onCompletion = ^() {
if (weakSelf.checkContentTypeRequest.playlist) {
// The URL is a playlist; retrieve the contents
[weakSelf.parsePlaylistRequest start];
} else if (weakSelf.checkContentTypeRequest.xml) {
// The URL may be an RSS feed, check the contents
[weakSelf.parseRssPodcastFeedRequest start];
} else {
// Not a playlist; try directly playing the URL
weakSelf.readyToPlay = YES;
[weakSelf play];
}
};
_checkContentTypeRequest.onFailure = ^() {
// Failed to check the format; try playing anyway
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioController: Failed to check the format, trying to play anyway, URL: %@", weakSelf.audioStream.url);
#endif
weakSelf.readyToPlay = YES;
[weakSelf play];
};
}
return _checkContentTypeRequest;
}
- (FSParsePlaylistRequest *)parsePlaylistRequest
{
if (!_parsePlaylistRequest) {
__weak FSAudioController *weakSelf = self;
_parsePlaylistRequest = [[FSParsePlaylistRequest alloc] init];
_parsePlaylistRequest.onCompletion = ^() {
[weakSelf playFromPlaylist:weakSelf.parsePlaylistRequest.playlistItems];
};
_parsePlaylistRequest.onFailure = ^() {
// Failed to parse the playlist; try playing anyway
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioController: Playlist parsing failed, trying to play anyway, URL: %@", weakSelf.audioStream.url);
#endif
weakSelf.readyToPlay = YES;
[weakSelf play];
};
}
return _parsePlaylistRequest;
}
- (FSParseRssPodcastFeedRequest *)parseRssPodcastFeedRequest
{
if (!_parseRssPodcastFeedRequest) {
__weak FSAudioController *weakSelf = self;
_parseRssPodcastFeedRequest = [[FSParseRssPodcastFeedRequest alloc] init];
_parseRssPodcastFeedRequest.onCompletion = ^() {
[weakSelf playFromPlaylist:weakSelf.parseRssPodcastFeedRequest.playlistItems];
};
_parseRssPodcastFeedRequest.onFailure = ^() {
// Failed to parse the XML file; try playing anyway
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioController: Failed to parse the RSS feed, trying to play anyway, URL: %@", weakSelf.audioStream.url);
#endif
weakSelf.readyToPlay = YES;
[weakSelf play];
};
}
return _parseRssPodcastFeedRequest;
}
- (void)notifyRetrievingURL
{
if (self.onStateChange) {
self.onStateChange(kFsAudioStreamRetrievingURL);
}
}
- (BOOL)isPlaying
{
return [self.audioStream isPlaying];
}
/*
* =======================================
* Public interface
* =======================================
*/
- (void)play
{
if (!self.readyToPlay) {
/*
* Not ready to play; start by checking the content type of the given
* URL.
*/
[self.checkContentTypeRequest start];
NSDictionary *userInfo = @{FSAudioStreamNotificationKey_State: @(kFsAudioStreamRetrievingURL)};
NSNotification *notification = [NSNotification notificationWithName:FSAudioStreamStateChangeNotification object:self userInfo:userInfo];
[[NSNotificationCenter defaultCenter] postNotification:notification];
[NSTimer scheduledTimerWithTimeInterval:0
target:self
selector:@selector(notifyRetrievingURL)
userInfo:nil
repeats:NO];
return;
}
if ([self.playlistItems count] > 0) {
if (self.currentPlaylistItem.originatingUrl) {
self.audioStream.url = self.currentPlaylistItem.originatingUrl;
} else {
self.audioStream.url = self.currentPlaylistItem.url;
}
} else {
self.audioStream.url = self.url;
}
if (self.onStateChange) {
self.audioStream.onStateChange = self.onStateChange;
}
if (self.onMetaDataAvailable) {
self.audioStream.onMetaDataAvailable = self.onMetaDataAvailable;
}
if (self.onFailure) {
self.audioStream.onFailure = self.onFailure;
}
FSAudioStream *stream = self.audioStream;
if (self.enableDebugOutput) {
NSLog(@"Playing %@", stream);
}
[stream play];
}
- (void)playFromURL:(NSURL*)url
{
if (!url) {
return;
}
[_playlistItems removeAllObjects];
[self stop];
self.url = url;
[self play];
}
- (void)playFromPlaylist:(NSArray *)playlist
{
[self playFromPlaylist:playlist itemIndex:0];
}
- (void)playFromPlaylist:(NSArray *)playlist itemIndex:(NSUInteger)index
{
[self stop];
self.playlistItems = [[NSMutableArray alloc] init];
_streams = [[NSMutableArray alloc] init];
self.currentPlaylistItemIndex = 0;
[self.playlistItems addObjectsFromArray:playlist];
for (FSPlaylistItem *item in playlist) {
FSAudioStreamProxy *proxy = [[FSAudioStreamProxy alloc] initWithAudioController:self];
proxy.url = item.url;
if (self.enableDebugOutput) {
NSLog(@"[FSAudioController.m:%i] playFromPlaylist. Adding stream proxy for %@", __LINE__, proxy.url);
}
[_streams addObject:proxy];
}
[self playItemAtIndex:index];
}
- (void)playItemAtIndex:(NSUInteger)index
{
NSUInteger count = [self countOfItems];
if (count == 0) {
return;
}
if (index >= count) {
return;
}
[self.audioStream stop];
self.currentPlaylistItemIndex = index;
self.readyToPlay = YES;
[self deactivateInactivateStreams:index];
[self play];
}
- (NSUInteger)countOfItems
{
return [self.playlistItems count];
}
- (void)addItem:(FSPlaylistItem *)item
{
if (!item) {
return;
}
[self.playlistItems addObject:item];
FSAudioStreamProxy *proxy = [[FSAudioStreamProxy alloc] initWithAudioController:self];
proxy.url = item.url;
if (self.enableDebugOutput) {
NSLog(@"[FSAudioController.m:%i] addItem. Adding stream proxy for %@", __LINE__, proxy.url);
}
[_streams addObject:proxy];
}
- (void)insertItem:(FSPlaylistItem *)item atIndex:(NSInteger)index
{
if (!item) {
return;
}
if (index > self.playlistItems.count) {
return;
}
if(self.playlistItems.count == 0 && index == 0) {
[self addItem:item];
return;
}
[self.playlistItems insertObject:item
atIndex:index];
FSAudioStreamProxy *proxy = [[FSAudioStreamProxy alloc] initWithAudioController:self];
proxy.url = item.url;
[_streams insertObject:proxy
atIndex:index];
if(index <= self.currentPlaylistItemIndex) {
_currentPlaylistItemIndex++;
}
}
- (void)replaceItemAtIndex:(NSUInteger)index withItem:(FSPlaylistItem *)item
{
NSUInteger count = [self countOfItems];
if (count == 0) {
return;
}
if (index >= count) {
return;
}
if (self.currentPlaylistItemIndex == index) {
// If the item is currently playing, do not allow the replacement
return;
}
[self.playlistItems replaceObjectAtIndex:index withObject:item];
FSAudioStreamProxy *proxy = [[FSAudioStreamProxy alloc] initWithAudioController:self];
proxy.url = item.url;
[_streams replaceObjectAtIndex:index withObject:proxy];
}
- (void)moveItemAtIndex:(NSUInteger)from toIndex:(NSUInteger)to {
NSUInteger count = [self countOfItems];
if (count == 0) {
return;
}
if (from >= count || to >= count) {
return;
}
if(from == self.currentPlaylistItemIndex) {
_currentPlaylistItemIndex = to;
}
else if(from < self.currentPlaylistItemIndex && to > self.currentPlaylistItemIndex) {
_currentPlaylistItemIndex--;
}
else if(from > self.currentPlaylistItemIndex && to <= self.currentPlaylistItemIndex) {
_currentPlaylistItemIndex++;
}
id object = [self.playlistItems objectAtIndex:from];
[self.playlistItems removeObjectAtIndex:from];
[self.playlistItems insertObject:object atIndex:to];
id obj = [_streams objectAtIndex:from];
[_streams removeObjectAtIndex:from];
[_streams insertObject:obj atIndex:to];
}
- (void)removeItemAtIndex:(NSUInteger)index
{
NSUInteger count = [self countOfItems];
if (count == 0) {
return;
}
if (index >= count) {
return;
}
if (self.currentPlaylistItemIndex == index && self.isPlaying) {
// If the item is currently playing, do not allow the removal
return;
}
FSPlaylistItem *current = self.currentPlaylistItem;
[self.playlistItems removeObjectAtIndex:index];
if (self.enableDebugOutput) {
FSAudioStreamProxy *proxy = [_streams objectAtIndex:index];
NSLog(@"[FSAudioController.m:%i] removeItemAtIndex. Removing stream proxy %@", __LINE__, proxy.url);
}
[_streams removeObjectAtIndex:index];
// Update the current playlist item to be correct after the removal
NSUInteger itemIndex = 0;
for (FSPlaylistItem *item in self.playlistItems) {
if (item == current) {
self.currentPlaylistItemIndex = itemIndex;
break;
}
itemIndex++;
}
}
- (void)stop
{
if ([_streams count] > 0) {
// Avoid creating an instance if we don't have it
[self.audioStream stop];
}
[_checkContentTypeRequest cancel];
[_parsePlaylistRequest cancel];
[_parseRssPodcastFeedRequest cancel];
self.readyToPlay = NO;
}
- (void)pause
{
[self.audioStream pause];
}
-(BOOL)hasMultiplePlaylistItems
{
return ([self.playlistItems count] > 1);
}
-(BOOL)hasNextItem
{
return [self hasMultiplePlaylistItems] && (self.currentPlaylistItemIndex + 1 < [self.playlistItems count]);
}
-(BOOL)hasPreviousItem
{
return ([self hasMultiplePlaylistItems] && (self.currentPlaylistItemIndex != 0));
}
-(void)playNextItem
{
if ([self hasNextItem]) {
if (self.enableDebugOutput) {
NSLog(@"[FSAudioController.m:%i] playNexItem. Stopping stream %@", __LINE__, self.audioStream.url);
}
[self.audioStream stop];
[self deactivateInactivateStreams:self.currentPlaylistItemIndex];
self.currentPlaylistItemIndex = self.currentPlaylistItemIndex + 1;
[self play];
}
}
-(void)playPreviousItem
{
if ([self hasPreviousItem]) {
if (self.enableDebugOutput) {
NSLog(@"[FSAudioController.m:%i] playPreviousItem. Stopping stream %@", __LINE__, self.audioStream.url);
}
[self.audioStream stop];
[self deactivateInactivateStreams:self.currentPlaylistItemIndex];
self.currentPlaylistItemIndex = self.currentPlaylistItemIndex - 1;
[self play];
}
}
/*
* =======================================
* Properties
* =======================================
*/
- (void)setVolume:(float)volume
{
self.outputVolume = volume;
self.needToSetVolume = YES;
if ([_streams count] > 0) {
self.audioStream.volume = self.outputVolume;
}
}
- (float)volume
{
return self.outputVolume;
}
- (void)setUrl:(NSURL *)url
{
[self stop];
if (url) {
NSURL *copyOfURL = [url copy];
_url = copyOfURL;
self.checkContentTypeRequest.url = _url;
self.parsePlaylistRequest.url = _url;
self.parseRssPodcastFeedRequest.url = _url;
if ([_url isFileURL]) {
/*
* Local file URLs can be directly played
*/
self.readyToPlay = YES;
}
} else {
_url = nil;
}
}
- (NSURL* )url
{
if (!_url) {
return nil;
}
NSURL *copyOfURL = [_url copy];
return copyOfURL;
}
- (FSAudioStream *)activeStream
{
if ([_streams count] > 0) {
return self.audioStream;
}
return nil;
}
- (FSPlaylistItem *)currentPlaylistItem
{
if (self.readyToPlay) {
if ([self.playlistItems count] > 0) {
FSPlaylistItem *playlistItem = (self.playlistItems)[self.currentPlaylistItemIndex];
return playlistItem;
}
}
return nil;
}
- (void (^)(FSAudioStreamState state))onStateChange
{
return _onStateChangeBlock;
}
- (void (^)(NSDictionary *metaData))onMetaDataAvailable
{
return _onMetaDataAvailableBlock;
}
- (void (^)(FSAudioStreamError error, NSString *errorDescription))onFailure
{
return _onFailureBlock;
}
- (void)setOnStateChange:(void (^)(FSAudioStreamState))newOnStateValue
{
_onStateChangeBlock = newOnStateValue;
if ([_streams count] > 0) {
self.audioStream.onStateChange = _onStateChangeBlock;
}
}
- (void)setOnMetaDataAvailable:(void (^)(NSDictionary *))newOnMetaDataAvailableValue
{
_onMetaDataAvailableBlock = newOnMetaDataAvailableValue;
if ([_streams count] > 0) {
self.audioStream.onMetaDataAvailable = _onMetaDataAvailableBlock;
}
}
- (void)setOnFailure:(void (^)(FSAudioStreamError error, NSString *errorDescription))newOnFailureValue
{
_onFailureBlock = newOnFailureValue;
if ([_streams count] > 0) {
self.audioStream.onFailure = _onFailureBlock;
}
}
@end
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import <Foundation/Foundation.h>
#import <CoreAudio/CoreAudioTypes.h>
/**
* The major version of the current release.
*/
#define FREESTREAMER_VERSION_MAJOR 4
/**
* The minor version of the current release.
*/
#define FREESTREAMER_VERSION_MINOR 0
/**
* The reversion of the current release
*/
#define FREESTREAMER_VERSION_REVISION 0
/**
* Follow this notification for the audio stream state changes.
*/
extern NSString* const FSAudioStreamStateChangeNotification;
extern NSString* const FSAudioStreamNotificationKey_State;
/**
* Follow this notification for the audio stream errors.
*/
extern NSString* const FSAudioStreamErrorNotification;
extern NSString* const FSAudioStreamNotificationKey_Error;
/**
* Follow this notification for the audio stream metadata.
*/
extern NSString* const FSAudioStreamMetaDataNotification;
extern NSString* const FSAudioStreamNotificationKey_MetaData;
/**
* The audio stream state.
*/
typedef NS_ENUM(NSInteger, FSAudioStreamState) {
/**
* Retrieving URL.
*/
kFsAudioStreamRetrievingURL,
/**
* Stopped.
*/
kFsAudioStreamStopped,
/**
* Buffering.
*/
kFsAudioStreamBuffering,
/**
* Playing.
*/
kFsAudioStreamPlaying,
/**
* Paused.
*/
kFsAudioStreamPaused,
/**
* Seeking.
*/
kFsAudioStreamSeeking,
/**
* The stream has received all the data for a file.
*/
kFSAudioStreamEndOfFile,
/**
* Failed.
*/
kFsAudioStreamFailed,
/**
* Started retrying.
*/
kFsAudioStreamRetryingStarted,
/**
* Retrying succeeded.
*/
kFsAudioStreamRetryingSucceeded,
/**
* Retrying failed.
*/
kFsAudioStreamRetryingFailed,
/**
* Playback completed.
*/
kFsAudioStreamPlaybackCompleted,
/**
* Unknown state.
*/
kFsAudioStreamUnknownState
};
/**
* The audio stream errors.
*/
typedef NS_ENUM(NSInteger, FSAudioStreamError) {
/**
* No error.
*/
kFsAudioStreamErrorNone = 0,
/**
* Error opening the stream.
*/
kFsAudioStreamErrorOpen = 1,
/**
* Error parsing the stream.
*/
kFsAudioStreamErrorStreamParse = 2,
/**
* Network error.
*/
kFsAudioStreamErrorNetwork = 3,
/**
* Unsupported format.
*/
kFsAudioStreamErrorUnsupportedFormat = 4,
/**
* Stream buffered too often.
*/
kFsAudioStreamErrorStreamBouncing = 5,
/**
* Stream playback was terminated by the operating system.
*/
kFsAudioStreamErrorTerminated = 6
};
@protocol FSPCMAudioStreamDelegate;
@class FSAudioStreamPrivate;
/**
* The audio stream playback position.
*/
typedef struct {
unsigned minute;
unsigned second;
/**
* Playback time in seconds.
*/
float playbackTimeInSeconds;
/**
* Position within the stream, where 0 is the beginning
* and 1.0 is the end.
*/
float position;
} FSStreamPosition;
/**
* The audio stream seek byte offset.
*/
typedef struct {
UInt64 start;
UInt64 end;
/**
* Position within the stream, where 0 is the beginning
* and 1.0 is the end.
*/
float position;
} FSSeekByteOffset;
/**
* Audio levels.
*/
typedef struct {
Float32 averagePower;
Float32 peakPower;
} FSLevelMeterState;
/**
* The low-level stream configuration.
*/
@interface FSStreamConfiguration : NSObject {
}
/**
* The number of buffers.
*/
@property (nonatomic,assign) unsigned bufferCount;
/**
* The size of each buffer.
*/
@property (nonatomic,assign) unsigned bufferSize;
/**
* The number of packet descriptions.
*/
@property (nonatomic,assign) unsigned maxPacketDescs;
/**
* The HTTP connection buffer size.
*/
@property (nonatomic,assign) unsigned httpConnectionBufferSize;
/**
* The output sample rate.
*/
@property (nonatomic,assign) double outputSampleRate;
/**
* The number of output channels.
*/
@property (nonatomic,assign) long outputNumChannels;
/**
* The interval within the stream may enter to the buffering state before it fails.
*/
@property (nonatomic,assign) int bounceInterval;
/**
* The number of times the stream may enter the buffering state before it fails.
*/
@property (nonatomic,assign) int maxBounceCount;
/**
* The stream must start within this seconds before it fails.
*/
@property (nonatomic,assign) int startupWatchdogPeriod;
/**
* Allow buffering of this many bytes before the cache is full.
*/
@property (nonatomic,assign) int maxPrebufferedByteCount;
/**
* Calculate prebuffer sizes dynamically using the stream bitrate in seconds instead of bytes.
*/
@property (nonatomic,assign) BOOL usePrebufferSizeCalculationInSeconds;
/**
* Calculate prebuffer sizes using the packet counts.
*/
@property (nonatomic,assign) BOOL usePrebufferSizeCalculationInPackets;
/**
* Require buffering of this many bytes before the playback can start for a continuous stream.
*/
@property (nonatomic,assign) float requiredPrebufferSizeInSeconds;
/**
* Require buffering of this many bytes before the playback can start for a continuous stream.
*/
@property (nonatomic,assign) int requiredInitialPrebufferedByteCountForContinuousStream;
/**
* Require buffering of this many bytes before the playback can start a non-continuous stream.
*/
@property (nonatomic,assign) int requiredInitialPrebufferedByteCountForNonContinuousStream;
/**
* Require buffering of this many packets before the playback can start.
*/
@property (nonatomic,assign) int requiredInitialPrebufferedPacketCount;
/**
* The HTTP user agent used for stream operations.
*/
@property (nonatomic,strong) NSString *userAgent;
/**
* The directory used for caching the streamed files.
*/
@property (nonatomic,strong) NSString *cacheDirectory;
/**
* The HTTP headers that are appended to the request when the streaming starts. Notice
* that the headers override any headers previously set by FreeStreamer.
*/
@property (nonatomic,strong) NSDictionary *predefinedHttpHeaderValues;
/**
* The property determining if caching the streams to the disk is enabled.
*/
@property (nonatomic,assign) BOOL cacheEnabled;
/**
* The property determining if seeking from the audio packets stored in cache is enabled.
* The benefit is that seeking is faster in the case the audio packets are already cached in memory.
*/
@property (nonatomic,assign) BOOL seekingFromCacheEnabled;
/**
* The property determining if FreeStreamer should handle audio session automatically.
* Leave it on if you don't want to handle the audio session by yourself.
*/
@property (nonatomic,assign) BOOL automaticAudioSessionHandlingEnabled;
/**
* The property enables time and pitch conversion for the audio queue. Put it on
* if you want to use the play rate setting.
*/
@property (nonatomic,assign) BOOL enableTimeAndPitchConversion;
/**
* Requires the content type given by the server to match an audio content type.
*/
@property (nonatomic,assign) BOOL requireStrictContentTypeChecking;
/**
* The maximum size of the disk cache in bytes.
*/
@property (nonatomic,assign) int maxDiskCacheSize;
@end
/**
* Statistics on the stream state.
*/
@interface FSStreamStatistics : NSObject {
}
/**
* Time when the statistics were gathered.
*/
@property (nonatomic,strong) NSDate *snapshotTime;
/**
* Time in a pretty format.
*/
@property (nonatomic,readonly) NSString *snapshotTimeFormatted;
/**
* Audio stream packet count.
*/
@property (nonatomic,assign) NSUInteger audioStreamPacketCount;
/**
* Audio queue used buffers count.
*/
@property (nonatomic,assign) NSUInteger audioQueueUsedBufferCount;
/**
* Audio stream PCM packet queue count.
*/
@property (nonatomic,assign) NSUInteger audioQueuePCMPacketQueueCount;
@end
NSString* freeStreamerReleaseVersion(void);
/**
* FSAudioStream is a class for streaming audio files from an URL.
* It must be directly fed with an URL, which contains audio. That is,
* playlists or other non-audio formats yield an error.
*
* To start playback, the stream must be either initialized with an URL
* or the playback URL can be set with the url property. The playback
* is started with the play method. It is possible to pause or stop
* the stream with the respective methods.
*
* Non-continuous streams (audio streams with a known duration) can be
* seeked with the seekToPosition method.
*
* Note that FSAudioStream is not designed to be thread-safe! That means
* that using the streamer from multiple threads without syncronization
* could cause problems. It is recommended to keep the streamer in the
* main thread and call the streamer methods only from the main thread
* (consider using performSelectorOnMainThread: if calls from multiple
* threads are needed).
*/
@interface FSAudioStream : NSObject {
FSAudioStreamPrivate *_private;
}
/**
* Initializes the audio stream with an URL.
*
* @param url The URL from which the stream data is retrieved.
*/
- (id)initWithUrl:(NSURL *)url;
/**
* Initializes the stream with a configuration.
*
* @param configuration The stream configuration.
*/
- (id)initWithConfiguration:(FSStreamConfiguration *)configuration;
/**
* Starts preload the stream. If no preload URL is
* defined, an error will occur.
*/
- (void)preload;
/**
* Starts playing the stream. If no playback URL is
* defined, an error will occur.
*/
- (void)play;
/**
* Starts playing the stream from the given URL.
*
* @param url The URL from which the stream data is retrieved.
*/
- (void)playFromURL:(NSURL*)url;
/**
* Starts playing the stream from the given offset.
* The offset can be retrieved from the stream with the
* currentSeekByteOffset property.
*
* @param offset The offset where to start playback from.
*/
- (void)playFromOffset:(FSSeekByteOffset)offset;
/**
* Stops the stream playback.
*/
- (void)stop;
/**
* If the stream is playing, the stream playback is paused upon calling pause.
* Otherwise (the stream is paused), calling pause will continue the playback.
*/
- (void)pause;
/**
* Rewinds the stream. Only possible for continuous streams.
*
* @param seconds Seconds to rewind the stream.
*/
- (void)rewind:(unsigned)seconds;
/**
* Seeks the stream to a given position. Requires a non-continuous stream
* (a stream with a known duration).
*
* @param position The stream position to seek to.
*/
- (void)seekToPosition:(FSStreamPosition)position;
/**
* Sets the audio stream playback rate from 0.5 to 2.0.
* Value 1.0 means the normal playback rate. Values below
* 1.0 means a slower playback rate than usual and above
* 1.0 a faster playback rate. Notice that using a faster
* playback rate than 1.0 may mean that you have to increase
* the buffer sizes for the stream still to play.
*
* The play rate has only effect if the stream is playing.
*
* @param playRate The playback rate.
*/
- (void)setPlayRate:(float)playRate;
/**
* Returns the playback status: YES if the stream is playing, NO otherwise.
*/
- (BOOL)isPlaying;
/**
* Cleans all cached data from the persistent storage.
*/
- (void)expungeCache;
/**
* The stream URL.
*/
@property (nonatomic,assign) NSURL *url;
/**
* Determines if strict content type checking is required. If the audio stream
* cannot determine that the stream is actually an audio stream, the stream
* does not play. Disabling strict content type checking bypasses the
* stream content type checks and tries to play the stream regardless
* of the content type information given by the server.
*/
@property (nonatomic,assign) BOOL strictContentTypeChecking;
/**
* Set an output file to store the stream contents to a file.
*/
@property (nonatomic,assign) NSURL *outputFile;
/**
* Sets a default content type for the stream. Used if
* the stream content type is not available.
*/
@property (nonatomic,assign) NSString *defaultContentType;
/**
* The property has the content type of the stream, for instance audio/mpeg.
*/
@property (nonatomic,readonly) NSString *contentType;
/**
* The property has the suggested file extension for the stream based on the stream content type.
*/
@property (nonatomic,readonly) NSString *suggestedFileExtension;
/**
* Sets a default content length for the stream. Used if
* the stream content-length is not available.
*/
@property (nonatomic, assign) UInt64 defaultContentLength;
/**
* The property has the content length of the stream (in bytes). The length is zero if
* the stream is continuous.
*/
@property (nonatomic,readonly) UInt64 contentLength;
/**
* The number of bytes of audio data. Notice that this may differ
* from the number of bytes the server returns for the content length!
* For instance audio file meta data is excluded from the count.
* Effectively you can use this property for seeking calculations.
*/
@property (nonatomic,readonly) UInt64 audioDataByteCount;
/**
* This property has the current playback position, if the stream is non-continuous.
* The current playback position cannot be determined for continuous streams.
*/
@property (nonatomic,readonly) FSStreamPosition currentTimePlayed;
/**
* This property has the duration of the stream, if the stream is non-continuous.
* Continuous streams do not have a duration.
*/
@property (nonatomic,readonly) FSStreamPosition duration;
/**
* This property has the current seek byte offset of the stream, if the stream is non-continuous.
* Continuous streams do not have a seek byte offset.
*/
@property (nonatomic,readonly) FSSeekByteOffset currentSeekByteOffset;
/**
* This property has the bit rate of the stream. The bit rate is initially 0,
* before the stream has processed enough packets to calculate the bit rate.
*/
@property (nonatomic,readonly) float bitRate;
/**
* The property is true if the stream is continuous (no known duration).
*/
@property (nonatomic,readonly) BOOL continuous;
/**
* The property is true if the stream has been cached locally.
*/
@property (nonatomic,readonly) BOOL cached;
/**
* This property has the number of bytes buffered for this stream.
*/
@property (nonatomic,readonly) size_t prebufferedByteCount;
/**
* This property holds the current playback volume of the stream,
* from 0.0 to 1.0.
*
* Note that the overall volume is still constrained by the volume
* set by the user! So the actual volume cannot be higher
* than the volume currently set by the user. For example, if
* requesting a volume of 0.5, then the volume will be 50%
* lower than the current playback volume set by the user.
*/
@property (nonatomic,assign) float volume;
/**
* The current size of the disk cache.
*/
@property (nonatomic,readonly) unsigned long long totalCachedObjectsSize;
/**
* The property determines the amount of times the stream has tried to retry the playback
* in case of failure.
*/
@property (nonatomic,readonly) NSUInteger retryCount;
/**
* Holds the maximum amount of playback retries that will be
* performed before entering kFsAudioStreamRetryingFailed state.
* Default is 3.
*/
@property (nonatomic,assign) NSUInteger maxRetryCount;
/**
* The property determines the current audio levels.
*/
@property (nonatomic,readonly) FSLevelMeterState levels;
/**
* This property holds the current statistics for the stream state.
*/
@property (nonatomic,readonly) FSStreamStatistics *statistics;
/**
* Called upon completion of the stream. Note that for continuous
* streams this is never called.
*/
@property (copy) void (^onCompletion)(void);
/**
* Called upon a state change.
*/
@property (copy) void (^onStateChange)(FSAudioStreamState state);
/**
* Called upon a meta data is available.
*/
@property (copy) void (^onMetaDataAvailable)(NSDictionary *metadata);
/**
* Called upon a failure.
*/
@property (copy) void (^onFailure)(FSAudioStreamError error, NSString *errorDescription);
/**
* The property has the low-level stream configuration.
*/
@property (readonly) FSStreamConfiguration *configuration;
/**
* Delegate.
*/
@property (nonatomic,unsafe_unretained) IBOutlet id<FSPCMAudioStreamDelegate> delegate;
@end
/**
* To access the PCM audio data, use this delegate.
*/
@protocol FSPCMAudioStreamDelegate <NSObject>
@optional
/**
* Called when there are PCM audio samples available. Do not do any blocking operations
* when you receive the data. Instead, copy the data and process it so that the
* main event loop doesn't block. Failing to do so may cause glitches to the audio playback.
*
* Notice that the delegate callback may occur from other than the main thread so make
* sure your delegate code is thread safe.
*
* @param audioStream The audio stream the samples are from.
* @param samples The samples as a buffer list.
* @param frames The number of frames.
* @param description Description of the data provided.
*/
- (void)audioStream:(FSAudioStream *)audioStream samplesAvailable:(AudioBufferList *)samples frames:(UInt32)frames description: (AudioStreamPacketDescription)description;
@end
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import "FSAudioStream.h"
#import "Reachability.h"
#include "audio_stream.h"
#include "stream_configuration.h"
#include "input_stream.h"
#import <AVFoundation/AVFoundation.h>
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000)
#import <AudioToolbox/AudioToolbox.h>
#import <UIKit/UIKit.h>
#endif
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000)
static NSMutableDictionary *fsAudioStreamPrivateActiveSessions = nil;
#endif
@interface FSCacheObject : NSObject {
}
@property (strong,nonatomic) NSString *path;
@property (strong,nonatomic) NSString *name;
@property (strong,nonatomic) NSDictionary *attributes;
@property (nonatomic,readonly) unsigned long long fileSize;
@property (nonatomic,readonly) NSDate *modificationDate;
@end
@implementation FSCacheObject
- (unsigned long long)fileSize
{
NSNumber *fileSizeNumber = [self.attributes objectForKey:NSFileSize];
return [fileSizeNumber longLongValue];
}
- (NSDate *)modificationDate
{
NSDate *date = [self.attributes objectForKey:NSFileModificationDate];
return date;
}
@end
static NSInteger sortCacheObjects(id co1, id co2, void *keyForSorting)
{
FSCacheObject *cached1 = (FSCacheObject *)co1;
FSCacheObject *cached2 = (FSCacheObject *)co2;
NSDate *d1 = cached1.modificationDate;
NSDate *d2 = cached2.modificationDate;
return [d1 compare:d2];
}
@implementation FSStreamConfiguration
- (id)init
{
self = [super init];
if (self) {
NSMutableString *systemVersion = [[NSMutableString alloc] init];
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000)
[systemVersion appendString:@"iOS "];
[systemVersion appendString:[[UIDevice currentDevice] systemVersion]];
#else
[systemVersion appendString:@"OS X"];
#endif
self.bufferCount = 64;
self.bufferSize = 8192;
self.maxPacketDescs = 512;
self.httpConnectionBufferSize = 8192;
self.outputSampleRate = 44100;
self.outputNumChannels = 2;
self.bounceInterval = 10;
self.maxBounceCount = 4; // Max number of bufferings in bounceInterval seconds
self.startupWatchdogPeriod = 30; // If the stream doesn't start to play in this seconds, the watchdog will fail it
#ifdef __LP64__
/* Increase the max in-memory cache to 10 MB with newer 64 bit devices */
self.maxPrebufferedByteCount = 10000000; // 10 MB
#else
self.maxPrebufferedByteCount = 1000000; // 1 MB
#endif
self.userAgent = [NSString stringWithFormat:@"FreeStreamer/%@ (%@)", freeStreamerReleaseVersion(), systemVersion];
self.cacheEnabled = YES;
self.seekingFromCacheEnabled = YES;
self.automaticAudioSessionHandlingEnabled = YES;
self.enableTimeAndPitchConversion = NO;
self.requireStrictContentTypeChecking = YES;
self.maxDiskCacheSize = 256000000; // 256 MB
self.usePrebufferSizeCalculationInSeconds = YES;
self.usePrebufferSizeCalculationInPackets = NO;
self.requiredInitialPrebufferedPacketCount = 32;
self.requiredPrebufferSizeInSeconds = 7;
// With dynamic calculation, these are actually the maximum sizes, the dynamic
// calculation may lower the sizes based on the stream bitrate
self.requiredInitialPrebufferedByteCountForContinuousStream = 256000;
self.requiredInitialPrebufferedByteCountForNonContinuousStream = 256000;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
if ([paths count] > 0) {
self.cacheDirectory = [paths objectAtIndex:0];
}
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 60000)
AVAudioSession *session = [AVAudioSession sharedInstance];
double sampleRate = session.sampleRate;
if (sampleRate > 0) {
self.outputSampleRate = sampleRate;
}
NSInteger channels = session.outputNumberOfChannels;
if (channels > 0) {
self.outputNumChannels = channels;
}
#endif
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000)
/* iOS */
#else
/* OS X */
self.requiredPrebufferSizeInSeconds = 3;
// No need to be so concervative with the cache sizes
self.maxPrebufferedByteCount = 16000000; // 16 MB
#endif
}
return self;
}
@end
static NSDateFormatter *statisticsDateFormatter = nil;
@implementation FSStreamStatistics
- (NSString *)snapshotTimeFormatted
{
if (!statisticsDateFormatter) {
statisticsDateFormatter = [[NSDateFormatter alloc] init];
[statisticsDateFormatter setDateFormat:@"yyyy-MM-dd HH:mm:ss"];
}
return [statisticsDateFormatter stringFromDate:self.snapshotTime];
}
- (NSString *)description
{
return [[NSString alloc] initWithFormat:@"%@\t%lu\t%lu\t%lu",
self.snapshotTimeFormatted,
(unsigned long)self.audioStreamPacketCount,
(unsigned long)self.audioQueueUsedBufferCount,
(unsigned long)self.audioQueuePCMPacketQueueCount];
}
@end
NSString *freeStreamerReleaseVersion()
{
NSString *version = [NSString stringWithFormat:@"%i.%i.%i",
FREESTREAMER_VERSION_MAJOR,
FREESTREAMER_VERSION_MINOR,
FREESTREAMER_VERSION_REVISION];
return version;
}
NSString* const FSAudioStreamStateChangeNotification = @"FSAudioStreamStateChangeNotification";
NSString* const FSAudioStreamNotificationKey_Stream = @"stream";
NSString* const FSAudioStreamNotificationKey_State = @"state";
NSString* const FSAudioStreamErrorNotification = @"FSAudioStreamErrorNotification";
NSString* const FSAudioStreamNotificationKey_Error = @"error";
NSString* const FSAudioStreamNotificationKey_ErrorDescription = @"errorDescription";
NSString* const FSAudioStreamMetaDataNotification = @"FSAudioStreamMetaDataNotification";
NSString* const FSAudioStreamNotificationKey_MetaData = @"metadata";
class AudioStreamStateObserver : public astreamer::Audio_Stream_Delegate
{
public:
astreamer::Audio_Stream *source;
FSAudioStreamPrivate *priv;
void audioStreamErrorOccurred(int errorCode, CFStringRef errorDescription);
void audioStreamStateChanged(astreamer::Audio_Stream::State state);
void audioStreamMetaDataAvailable(std::map<CFStringRef,CFStringRef> metaData);
void samplesAvailable(AudioBufferList *samples, UInt32 frames, AudioStreamPacketDescription description);
void bitrateAvailable();
};
/*
* ===============================================================
* FSAudioStream private implementation
* ===============================================================
*/
@interface FSAudioStreamPrivate : NSObject {
astreamer::Audio_Stream *_audioStream;
NSURL *_url;
AudioStreamStateObserver *_observer;
NSString *_defaultContentType;
Reachability *_reachability;
FSSeekByteOffset _lastSeekByteOffset;
BOOL _wasPaused;
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000)
UIBackgroundTaskIdentifier _backgroundTask;
#endif
}
@property (nonatomic,assign) NSURL *url;
@property (nonatomic,assign) BOOL strictContentTypeChecking;
@property (nonatomic,assign) NSString *defaultContentType;
@property (readonly) NSString *contentType;
@property (readonly) NSString *suggestedFileExtension;
@property (nonatomic, assign) UInt64 defaultContentLength;
@property (readonly) UInt64 contentLength;
@property (nonatomic,assign) NSURL *outputFile;
@property (nonatomic,assign) BOOL wasInterrupted;
@property (nonatomic,assign) BOOL wasDisconnected;
@property (nonatomic,assign) BOOL wasContinuousStream;
@property (nonatomic,assign) BOOL internetConnectionAvailable;
@property (nonatomic,assign) NSUInteger maxRetryCount;
@property (nonatomic,assign) NSUInteger retryCount;
@property (readonly) FSStreamStatistics *statistics;
@property (readonly) FSLevelMeterState levels;
@property (readonly) size_t prebufferedByteCount;
@property (readonly) FSSeekByteOffset currentSeekByteOffset;
@property (readonly) float bitRate;
@property (readonly) FSStreamConfiguration *configuration;
@property (readonly) NSString *formatDescription;
@property (readonly) BOOL cached;
@property (copy) void (^onCompletion)();
@property (copy) void (^onStateChange)(FSAudioStreamState state);
@property (copy) void (^onMetaDataAvailable)(NSDictionary *metaData);
@property (copy) void (^onFailure)(FSAudioStreamError error, NSString *errorDescription);
@property (nonatomic,unsafe_unretained) id<FSPCMAudioStreamDelegate> delegate;
@property (nonatomic,unsafe_unretained) FSAudioStream *stream;
- (AudioStreamStateObserver *)streamStateObserver;
- (void)endBackgroundTask;
- (void)reachabilityChanged:(NSNotification *)note;
- (void)interruptionOccurred:(NSNotification *)notification;
- (void)notifyPlaybackStopped;
- (void)notifyPlaybackBuffering;
- (void)notifyPlaybackPlaying;
- (void)notifyPlaybackPaused;
- (void)notifyPlaybackSeeking;
- (void)notifyPlaybackEndOfFile;
- (void)notifyPlaybackFailed;
- (void)notifyPlaybackCompletion;
- (void)notifyPlaybackUnknownState;
- (void)notifyRetryingStarted;
- (void)notifyRetryingSucceeded;
- (void)notifyRetryingFailed;
- (void)notifyStateChange:(FSAudioStreamState)streamerState;
- (void)attemptRestart;
- (void)expungeCache;
- (void)play;
- (void)playFromURL:(NSURL*)url;
- (void)playFromOffset:(FSSeekByteOffset)offset;
- (void)stop;
- (BOOL)isPlaying;
- (void)pause;
- (void)rewind:(unsigned)seconds;
- (void)seekToOffset:(float)offset;
- (float)currentVolume;
- (unsigned long long)totalCachedObjectsSize;
- (void)setVolume:(float)volume;
- (void)setPlayRate:(float)playRate;
- (astreamer::AS_Playback_Position)playbackPosition;
- (UInt64)audioDataByteCount;
- (float)durationInSeconds;
- (void)bitrateAvailable;
@end
@implementation FSAudioStreamPrivate
-(id)init
{
NSAssert([NSThread isMainThread], @"FSAudioStreamPrivate.init needs to be called in the main thread");
if (self = [super init]) {
_url = nil;
_observer = new AudioStreamStateObserver();
_observer->priv = self;
_audioStream = new astreamer::Audio_Stream();
_observer->source = _audioStream;
_audioStream->m_delegate = _observer;
_reachability = nil;
_delegate = nil;
_maxRetryCount = 3;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(reachabilityChanged:)
name:kReachabilityChangedNotification
object:nil];
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000)
_backgroundTask = UIBackgroundTaskInvalid;
@synchronized (self) {
if (!fsAudioStreamPrivateActiveSessions) {
fsAudioStreamPrivateActiveSessions = [[NSMutableDictionary alloc] init];
}
}
if (self.configuration.automaticAudioSessionHandlingEnabled) {
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
}
#endif
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 60000)
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(interruptionOccurred:)
name:AVAudioSessionInterruptionNotification
object:nil];
#endif
}
return self;
}
- (void)dealloc
{
NSAssert([NSThread isMainThread], @"FSAudioStreamPrivate.dealloc needs to be called in the main thread");
[[NSNotificationCenter defaultCenter] removeObserver:self];
[self stop];
_delegate = nil;
delete _audioStream;
_audioStream = nil;
delete _observer;
_observer = nil;
// Clean up the disk cache.
if (!self.configuration.cacheEnabled) {
// Don't clean up if cache not enabled
return;
}
unsigned long long totalCacheSize = 0;
NSMutableArray *cachedFiles = [[NSMutableArray alloc] init];
for (NSString *file in [[NSFileManager defaultManager] contentsOfDirectoryAtPath:self.configuration.cacheDirectory error:nil]) {
if ([file hasPrefix:@"FSCache-"]) {
FSCacheObject *cacheObj = [[FSCacheObject alloc] init];
cacheObj.name = file;
cacheObj.path = [NSString stringWithFormat:@"%@/%@", self.configuration.cacheDirectory, cacheObj.name];
cacheObj.attributes = [[NSFileManager defaultManager] attributesOfItemAtPath:cacheObj.path error:nil];
totalCacheSize += [cacheObj fileSize];
if (![cacheObj.name hasSuffix:@".metadata"]) {
[cachedFiles addObject:cacheObj];
}
}
}
// Sort by the modification date.
// In this way the older content will be removed first from the cache.
[cachedFiles sortUsingFunction:sortCacheObjects context:NULL];
for (FSCacheObject *cacheObj in cachedFiles) {
if (totalCacheSize < self.configuration.maxDiskCacheSize) {
break;
}
FSCacheObject *cachedMetaData = [[FSCacheObject alloc] init];
cachedMetaData.name = [NSString stringWithFormat:@"%@.metadata", cacheObj.name];
cachedMetaData.path = [NSString stringWithFormat:@"%@/%@", self.configuration.cacheDirectory, cachedMetaData.name];
cachedMetaData.attributes = [[NSFileManager defaultManager] attributesOfItemAtPath:cachedMetaData.path error:nil];
if (![[NSFileManager defaultManager] removeItemAtPath:cachedMetaData.path error:nil]) {
continue;
}
totalCacheSize -= [cachedMetaData fileSize];
if (![[NSFileManager defaultManager] removeItemAtPath:cacheObj.path error:nil]) {
continue;
}
totalCacheSize -= [cacheObj fileSize];
}
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000)
@synchronized (self) {
[fsAudioStreamPrivateActiveSessions removeObjectForKey:[NSNumber numberWithUnsignedLong:(unsigned long)self]];
if ([fsAudioStreamPrivateActiveSessions count] == 0) {
if (self.configuration.automaticAudioSessionHandlingEnabled) {
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 60000)
[[AVAudioSession sharedInstance] setActive:NO withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:nil];
#else
[[AVAudioSession sharedInstance] setActive:NO error:nil];
#endif
}
}
}
#endif
}
- (void)endBackgroundTask
{
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000)
if (_backgroundTask != UIBackgroundTaskInvalid) {
[[UIApplication sharedApplication] endBackgroundTask:_backgroundTask];
_backgroundTask = UIBackgroundTaskInvalid;
}
#endif
}
- (AudioStreamStateObserver *)streamStateObserver
{
return _observer;
}
- (void)setUrl:(NSURL *)url
{
if ([self isPlaying]) {
[self stop];
}
@synchronized (self) {
if ([url isEqual:_url]) {
return;
}
_url = [url copy];
_audioStream->setUrl((__bridge CFURLRef)_url);
}
if ([self isPlaying]) {
[self play];
}
}
- (NSURL*)url
{
if (!_url) {
return nil;
}
NSURL *copyOfURL = [_url copy];
return copyOfURL;
}
- (void)setStrictContentTypeChecking:(BOOL)strictContentTypeChecking
{
_audioStream->setStrictContentTypeChecking(strictContentTypeChecking);
}
- (BOOL)strictContentTypeChecking
{
return _audioStream->strictContentTypeChecking();
}
- (void)playFromURL:(NSURL*)url
{
[self setUrl:url];
[self play];
}
- (void)playFromOffset:(FSSeekByteOffset)offset
{
_wasPaused = NO;
if (_audioStream->isPreloading()) {
_audioStream->seekToOffset(offset.position);
_audioStream->setPreloading(false);
} else {
astreamer::Input_Stream_Position position;
position.start = offset.start;
position.end = offset.end;
_audioStream->open(&position);
_audioStream->setSeekOffset(offset.position);
_audioStream->setContentLength(offset.end);
}
if (!_reachability) {
_reachability = [Reachability reachabilityForInternetConnection];
[_reachability startNotifier];
}
}
- (void)setDefaultContentType:(NSString *)defaultContentType
{
if (defaultContentType) {
_defaultContentType = [defaultContentType copy];
_audioStream->setDefaultContentType((__bridge CFStringRef)_defaultContentType);
} else {
_audioStream->setDefaultContentType(NULL);
}
}
- (NSString*)defaultContentType
{
if (!_defaultContentType) {
return nil;
}
NSString *copyOfDefaultContentType = [_defaultContentType copy];
return copyOfDefaultContentType;
}
- (NSString*)contentType
{
CFStringRef c = _audioStream->contentType();
if (c) {
return CFBridgingRelease(CFStringCreateCopy(kCFAllocatorDefault, c));
}
return nil;
}
- (NSString*)suggestedFileExtension
{
NSString *contentType = [self contentType];
NSString *suggestedFileExtension = nil;
if ([contentType isEqualToString:@"audio/mpeg"]) {
suggestedFileExtension = @"mp3";
} else if ([contentType isEqualToString:@"audio/x-wav"]) {
suggestedFileExtension = @"wav";
} else if ([contentType isEqualToString:@"audio/x-aifc"]) {
suggestedFileExtension = @"aifc";
} else if ([contentType isEqualToString:@"audio/x-aiff"]) {
suggestedFileExtension = @"aiff";
} else if ([contentType isEqualToString:@"audio/x-m4a"]) {
suggestedFileExtension = @"m4a";
} else if ([contentType isEqualToString:@"audio/mp4"]) {
suggestedFileExtension = @"mp4";
} else if ([contentType isEqualToString:@"audio/x-caf"]) {
suggestedFileExtension = @"caf";
}
else if ([contentType isEqualToString:@"audio/aac"] ||
[contentType isEqualToString:@"audio/aacp"]) {
suggestedFileExtension = @"aac";
}
return suggestedFileExtension;
}
- (UInt64)defaultContentLength
{
return _audioStream->defaultContentLength();
}
- (UInt64)contentLength
{
return _audioStream->contentLength();
}
- (NSURL*)outputFile
{
CFURLRef url = _audioStream->outputFile();
if (url) {
NSURL *u = (__bridge NSURL*)url;
return [u copy];
}
return nil;
}
- (void)setOutputFile:(NSURL *)outputFile
{
if (!outputFile) {
_audioStream->setOutputFile(NULL);
return;
}
NSURL *copyOfURL = [outputFile copy];
_audioStream->setOutputFile((__bridge CFURLRef)copyOfURL);
}
- (FSStreamStatistics *)statistics
{
FSStreamStatistics *stats = [[FSStreamStatistics alloc] init];
stats.snapshotTime = [[NSDate alloc] init];
stats.audioStreamPacketCount = _audioStream->playbackDataCount();
return stats;
}
- (FSLevelMeterState)levels
{
AudioQueueLevelMeterState aqLevels = _audioStream->levels();
FSLevelMeterState l;
l.averagePower = aqLevels.mAveragePower;
l.peakPower = aqLevels.mPeakPower;
return l;
}
- (size_t)prebufferedByteCount
{
return _audioStream->cachedDataSize();
}
- (FSSeekByteOffset)currentSeekByteOffset
{
FSSeekByteOffset offset;
offset.start = 0;
offset.end = 0;
offset.position = 0;
// If continuous
if (!([self durationInSeconds] > 0)) {
return offset;
}
offset.position = _audioStream->playbackPosition().offset;
astreamer::Input_Stream_Position httpStreamPos = _audioStream->streamPositionForOffset(offset.position);
offset.start = httpStreamPos.start;
offset.end = httpStreamPos.end;
return offset;
}
- (float)bitRate
{
return _audioStream->bitrate();
}
- (FSStreamConfiguration *)configuration
{
FSStreamConfiguration *config = [[FSStreamConfiguration alloc] init];
astreamer::Stream_Configuration *c = astreamer::Stream_Configuration::configuration();
config.bufferCount = c->bufferCount;
config.bufferSize = c->bufferSize;
config.maxPacketDescs = c->maxPacketDescs;
config.httpConnectionBufferSize = c->httpConnectionBufferSize;
config.outputSampleRate = c->outputSampleRate;
config.outputNumChannels = c->outputNumChannels;
config.bounceInterval = c->bounceInterval;
config.maxBounceCount = c->maxBounceCount;
config.startupWatchdogPeriod = c->startupWatchdogPeriod;
config.maxPrebufferedByteCount = c->maxPrebufferedByteCount;
config.usePrebufferSizeCalculationInSeconds = c->usePrebufferSizeCalculationInSeconds;
config.usePrebufferSizeCalculationInPackets = c->usePrebufferSizeCalculationInPackets;
config.requiredInitialPrebufferedByteCountForContinuousStream = c->requiredInitialPrebufferedByteCountForContinuousStream;
config.requiredInitialPrebufferedByteCountForNonContinuousStream = c->requiredInitialPrebufferedByteCountForNonContinuousStream;
config.requiredPrebufferSizeInSeconds = c->requiredPrebufferSizeInSeconds;
config.requiredInitialPrebufferedPacketCount = c->requiredInitialPrebufferedPacketCount;
config.cacheEnabled = c->cacheEnabled;
config.seekingFromCacheEnabled = c->seekingFromCacheEnabled;
config.automaticAudioSessionHandlingEnabled = c->automaticAudioSessionHandlingEnabled;
config.enableTimeAndPitchConversion = c->enableTimeAndPitchConversion;
config.requireStrictContentTypeChecking = c->requireStrictContentTypeChecking;
config.maxDiskCacheSize = c->maxDiskCacheSize;
if (c->userAgent) {
// Let the Objective-C side handle the memory for the copy of the original user-agent
config.userAgent = (__bridge_transfer NSString *)CFStringCreateCopy(kCFAllocatorDefault, c->userAgent);
}
if (c->cacheDirectory) {
config.cacheDirectory = (__bridge_transfer NSString *)CFStringCreateCopy(kCFAllocatorDefault, c->cacheDirectory);
}
if (c->predefinedHttpHeaderValues) {
config.predefinedHttpHeaderValues = (__bridge_transfer NSDictionary *)CFDictionaryCreateCopy(kCFAllocatorDefault, c->predefinedHttpHeaderValues);
}
return config;
}
- (NSString *)formatDescription
{
return CFBridgingRelease(_audioStream->sourceFormatDescription());
}
- (BOOL)cached
{
BOOL cachedFileExists = NO;
if (self.url) {
NSString *cacheIdentifier = (NSString*)CFBridgingRelease(_audioStream->createCacheIdentifierForURL((__bridge CFURLRef)self.url));
NSString *fullPath = [NSString stringWithFormat:@"%@/%@.metadata", self.configuration.cacheDirectory, cacheIdentifier];
cachedFileExists = [[NSFileManager defaultManager] fileExistsAtPath:fullPath];
}
return cachedFileExists;
}
- (void)reachabilityChanged:(NSNotification *)note
{
NSAssert([NSThread isMainThread], @"FSAudioStreamPrivate.reachabilityChanged needs to be called in the main thread");
Reachability *reach = [note object];
NetworkStatus netStatus = [reach currentReachabilityStatus];
self.internetConnectionAvailable = (netStatus == ReachableViaWiFi || netStatus == ReachableViaWWAN);
if ([self isPlaying] && !self.internetConnectionAvailable) {
self.wasDisconnected = YES;
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Error: Internet connection disconnected while playing a stream.");
#endif
}
if (self.wasDisconnected && self.internetConnectionAvailable) {
self.wasDisconnected = NO;
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Internet connection available again.");
#endif
[self attemptRestart];
}
}
- (void)interruptionOccurred:(NSNotification *)notification
{
NSAssert([NSThread isMainThread], @"FSAudioStreamPrivate.interruptionOccurred needs to be called in the main thread");
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 60000)
NSNumber *interruptionType = [[notification userInfo] valueForKey:AVAudioSessionInterruptionTypeKey];
NSNumber *interruptionResume = [[notification userInfo] valueForKey:AVAudioSessionInterruptionOptionKey];
if ([interruptionType intValue] == AVAudioSessionInterruptionTypeBegan) {
if ([self isPlaying] && !_wasPaused) {
self.wasInterrupted = YES;
// Continuous streams do not have a duration.
self.wasContinuousStream = !([self durationInSeconds] > 0);
if (self.wasContinuousStream) {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Interruption began. Continuous stream. Stopping the stream.");
#endif
[self stop];
} else {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Interruption began. Non-continuous stream. Stopping the stream and saving the offset.");
#endif
_lastSeekByteOffset = [self currentSeekByteOffset];
[self stop];
}
}
} else if ([interruptionType intValue] == AVAudioSessionInterruptionTypeEnded) {
if (self.wasInterrupted) {
self.wasInterrupted = NO;
if ([interruptionResume intValue] == AVAudioSessionInterruptionOptionShouldResume) {
@synchronized (self) {
if (self.configuration.automaticAudioSessionHandlingEnabled) {
[[AVAudioSession sharedInstance] setActive:YES error:nil];
}
fsAudioStreamPrivateActiveSessions[[NSNumber numberWithUnsignedLong:(unsigned long)self]] = @"";
}
if (self.wasContinuousStream) {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Interruption ended. Continuous stream. Starting the playback.");
#endif
/*
* Resume playing.
*/
[self play];
} else {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Interruption ended. Continuous stream. Playing from the offset");
#endif
/*
* Resume playing.
*/
[self playFromOffset:_lastSeekByteOffset];
}
} else {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Interruption ended. Continuous stream. Not resuming.");
#endif
}
}
}
#endif
}
- (void)notifyPlaybackStopped
{
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000)
@synchronized (self) {
[fsAudioStreamPrivateActiveSessions removeObjectForKey:[NSNumber numberWithUnsignedLong:(unsigned long)self]];
if ([fsAudioStreamPrivateActiveSessions count] == 0) {
if (self.configuration.automaticAudioSessionHandlingEnabled) {
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 60000)
[[AVAudioSession sharedInstance] setActive:NO withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:nil];
#else
[[AVAudioSession sharedInstance] setActive:NO error:nil];
#endif
}
}
}
#endif
[self notifyStateChange:kFsAudioStreamStopped];
}
- (void)notifyPlaybackBuffering
{
self.internetConnectionAvailable = YES;
[self notifyStateChange:kFsAudioStreamBuffering];
}
- (void)notifyPlaybackPlaying
{
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000)
@synchronized (self) {
if (self.configuration.automaticAudioSessionHandlingEnabled) {
[[AVAudioSession sharedInstance] setActive:YES error:nil];
}
fsAudioStreamPrivateActiveSessions[[NSNumber numberWithUnsignedLong:(unsigned long)self]] = @"";
}
#endif
if (self.retryCount > 0) {
[NSTimer scheduledTimerWithTimeInterval:0.1
target:self
selector:@selector(notifyRetryingSucceeded)
userInfo:nil
repeats:NO];
}
self.retryCount = 0;
[self notifyStateChange:kFsAudioStreamPlaying];
}
- (void)notifyPlaybackPaused
{
[self notifyStateChange:kFsAudioStreamPaused];
}
- (void)notifyPlaybackSeeking
{
[self notifyStateChange:kFsAudioStreamSeeking];
}
- (void)notifyPlaybackEndOfFile
{
[self notifyStateChange:kFSAudioStreamEndOfFile];
}
- (void)notifyPlaybackFailed
{
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000)
@synchronized (self) {
[fsAudioStreamPrivateActiveSessions removeObjectForKey:[NSNumber numberWithUnsignedLong:(unsigned long)self]];
if ([fsAudioStreamPrivateActiveSessions count] == 0) {
if (self.configuration.automaticAudioSessionHandlingEnabled) {
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 60000)
[[AVAudioSession sharedInstance] setActive:NO withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:nil];
#else
[[AVAudioSession sharedInstance] setActive:NO error:nil];
#endif
}
}
}
#endif
[self notifyStateChange:kFsAudioStreamFailed];
}
- (void)notifyPlaybackCompletion
{
[self notifyStateChange:kFsAudioStreamPlaybackCompleted];
if (self.onCompletion) {
self.onCompletion();
}
}
- (void)notifyPlaybackUnknownState
{
[self notifyStateChange:kFsAudioStreamUnknownState];
}
- (void)notifyRetryingStarted
{
[self notifyStateChange:kFsAudioStreamRetryingStarted];
}
- (void)notifyRetryingSucceeded
{
[self notifyStateChange:kFsAudioStreamRetryingSucceeded];
}
- (void)notifyRetryingFailed
{
[self notifyStateChange:kFsAudioStreamRetryingFailed];
}
- (void)notifyStateChange:(FSAudioStreamState)streamerState
{
if (self.onStateChange) {
self.onStateChange(streamerState);
}
NSDictionary *userInfo = @{FSAudioStreamNotificationKey_State: [NSNumber numberWithInt:streamerState],
FSAudioStreamNotificationKey_Stream: [NSValue valueWithPointer:_audioStream]};
NSNotification *notification = [NSNotification notificationWithName:FSAudioStreamStateChangeNotification object:self.stream userInfo:userInfo];
[[NSNotificationCenter defaultCenter] postNotification:notification];
}
- (void)preload
{
_audioStream->setPreloading(true);
_audioStream->open();
}
- (void)attemptRestart
{
if (_audioStream->isPreloading()) {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Stream is preloading. Not attempting a restart");
#endif
return;
}
if (_wasPaused) {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Stream was paused. Not attempting a restart");
#endif
return;
}
if (!self.internetConnectionAvailable) {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Internet connection not available. Not attempting a restart");
#endif
return;
}
if (self.retryCount >= self.maxRetryCount) {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Retry count %lu. Giving up.", (unsigned long)self.retryCount);
#endif
[NSTimer scheduledTimerWithTimeInterval:0.1
target:self
selector:@selector(notifyRetryingFailed)
userInfo:nil
repeats:NO];
return;
}
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Attempting restart.");
#endif
[NSTimer scheduledTimerWithTimeInterval:0.1
target:self
selector:@selector(notifyRetryingStarted)
userInfo:nil
repeats:NO];
[NSTimer scheduledTimerWithTimeInterval:1
target:self
selector:@selector(play)
userInfo:nil
repeats:NO];
self.retryCount++;
}
- (void)expungeCache
{
for (NSString *file in [[NSFileManager defaultManager] contentsOfDirectoryAtPath:self.configuration.cacheDirectory error:nil]) {
NSString *fullPath = [NSString stringWithFormat:@"%@/%@", self.configuration.cacheDirectory, file];
if ([file hasPrefix:@"FSCache-"]) {
if (![[NSFileManager defaultManager] removeItemAtPath:fullPath error:nil]) {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"Failed expunging %@ from the cache", fullPath);
#endif
}
}
}
}
- (void)play
{
_wasPaused = NO;
if (_audioStream->isPreloading()) {
_audioStream->startCachedDataPlayback();
return;
}
#if (__IPHONE_OS_VERSION_MIN_REQUIRED >= 40000)
[self endBackgroundTask];
_backgroundTask = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:^{
[self endBackgroundTask];
}];
#endif
_audioStream->open();
if (!_reachability) {
_reachability = [Reachability reachabilityForInternetConnection];
[_reachability startNotifier];
}
}
- (void)stop
{
_audioStream->close(true);
[self endBackgroundTask];
[_reachability stopNotifier];
_reachability = nil;
}
- (BOOL)isPlaying
{
const astreamer::Audio_Stream::State currentState = _audioStream->state();
return (currentState == astreamer::Audio_Stream::PLAYING ||
currentState == astreamer::Audio_Stream::END_OF_FILE);
}
- (void)pause
{
_wasPaused = YES;
_audioStream->pause();
}
- (void)rewind:(unsigned int)seconds
{
if (([self durationInSeconds] > 0)) {
// Rewinding only possible for continuous streams
return;
}
const float originalVolume = [self currentVolume];
// Set volume to 0 to avoid glitches
_audioStream->setVolume(0);
_audioStream->rewind(seconds);
__weak FSAudioStreamPrivate *weakSelf = self;
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 1 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
FSAudioStreamPrivate *strongSelf = weakSelf;
// Return the original volume back
strongSelf->_audioStream->setVolume(originalVolume);
});
}
- (void)seekToOffset:(float)offset
{
_audioStream->seekToOffset(offset);
}
- (float)currentVolume
{
return _audioStream->currentVolume();
}
- (unsigned long long)totalCachedObjectsSize
{
unsigned long long totalCacheSize = 0;
for (NSString *file in [[NSFileManager defaultManager] contentsOfDirectoryAtPath:self.configuration.cacheDirectory error:nil]) {
if ([file hasPrefix:@"FSCache-"]) {
NSString *fullPath = [NSString stringWithFormat:@"%@/%@", self.configuration.cacheDirectory, file];
NSDictionary *attributes = [[NSFileManager defaultManager] attributesOfItemAtPath:fullPath error:nil];
totalCacheSize += [[attributes objectForKey:NSFileSize] longLongValue];
}
}
return totalCacheSize;
}
- (void)setVolume:(float)volume
{
_audioStream->setVolume(volume);
}
- (void)setPlayRate:(float)playRate
{
_audioStream->setPlayRate(playRate);
}
- (astreamer::AS_Playback_Position)playbackPosition
{
return _audioStream->playbackPosition();
}
- (UInt64)audioDataByteCount
{
return _audioStream->audioDataByteCount();
}
- (float)durationInSeconds
{
return _audioStream->durationInSeconds();
}
- (void)bitrateAvailable
{
if (!self.configuration.usePrebufferSizeCalculationInSeconds) {
return;
}
float bitrate = (int)_audioStream->bitrate();
if (!(bitrate > 0)) {
// No bitrate provided, use the defaults
return;
}
const Float64 bufferSizeForSecond = bitrate / 8.0;
int bufferSize = (bufferSizeForSecond * self.configuration.requiredPrebufferSizeInSeconds);
// Check that we still got somewhat sane buffer size
if (bufferSize < 50000) {
bufferSize = 50000;
}
if (!([self durationInSeconds] > 0)) {
// continuous
if (bufferSize > self.configuration.requiredInitialPrebufferedByteCountForContinuousStream) {
bufferSize = self.configuration.requiredInitialPrebufferedByteCountForContinuousStream;
}
} else {
if (bufferSize > self.configuration.requiredInitialPrebufferedByteCountForNonContinuousStream) {
bufferSize = self.configuration.requiredInitialPrebufferedByteCountForNonContinuousStream;
}
}
// Update the configuration
self.configuration.requiredInitialPrebufferedByteCountForContinuousStream = bufferSize;
self.configuration.requiredInitialPrebufferedByteCountForNonContinuousStream = bufferSize;
astreamer::Stream_Configuration *c = astreamer::Stream_Configuration::configuration();
c->requiredInitialPrebufferedByteCountForContinuousStream = bufferSize;
c->requiredInitialPrebufferedByteCountForNonContinuousStream = bufferSize;
}
-(NSString *)description
{
return [NSString stringWithFormat:@"[FreeStreamer %@] URL: %@\nbufferCount: %i\nbufferSize: %i\nmaxPacketDescs: %i\nhttpConnectionBufferSize: %i\noutputSampleRate: %f\noutputNumChannels: %ld\nbounceInterval: %i\nmaxBounceCount: %i\nstartupWatchdogPeriod: %i\nmaxPrebufferedByteCount: %i\nformat: %@\nbit rate: %f\nuserAgent: %@\ncacheDirectory: %@\npredefinedHttpHeaderValues: %@\ncacheEnabled: %@\nseekingFromCacheEnabled: %@\nautomaticAudioSessionHandlingEnabled: %@\nenableTimeAndPitchConversion: %@\nrequireStrictContentTypeChecking: %@\nmaxDiskCacheSize: %i\nusePrebufferSizeCalculationInSeconds: %@\nusePrebufferSizeCalculationInPackets: %@\nrequiredPrebufferSizeInSeconds: %f\nrequiredInitialPrebufferedByteCountForContinuousStream: %i\nrequiredInitialPrebufferedByteCountForNonContinuousStream: %i\nrequiredInitialPrebufferedPacketCount: %i",
freeStreamerReleaseVersion(),
self.url,
self.configuration.bufferCount,
self.configuration.bufferSize,
self.configuration.maxPacketDescs,
self.configuration.httpConnectionBufferSize,
self.configuration.outputSampleRate,
self.configuration.outputNumChannels,
self.configuration.bounceInterval,
self.configuration.maxBounceCount,
self.configuration.startupWatchdogPeriod,
self.configuration.maxPrebufferedByteCount,
self.formatDescription,
self.bitRate,
self.configuration.userAgent,
self.configuration.cacheDirectory,
self.configuration.predefinedHttpHeaderValues,
(self.configuration.cacheEnabled ? @"YES" : @"NO"),
(self.configuration.seekingFromCacheEnabled ? @"YES" : @"NO"),
(self.configuration.automaticAudioSessionHandlingEnabled ? @"YES" : @"NO"),
(self.configuration.enableTimeAndPitchConversion ? @"YES" : @"NO"),
(self.configuration.requireStrictContentTypeChecking ? @"YES" : @"NO"),
self.configuration.maxDiskCacheSize,
(self.configuration.usePrebufferSizeCalculationInSeconds ? @"YES" : @"NO"),
(self.configuration.usePrebufferSizeCalculationInPackets ? @"YES" : @"NO"),
self.configuration.requiredPrebufferSizeInSeconds,
self.configuration.requiredInitialPrebufferedByteCountForContinuousStream,
self.configuration.requiredInitialPrebufferedByteCountForNonContinuousStream,
self.configuration.requiredInitialPrebufferedPacketCount];
}
@end
/*
* ===============================================================
* FSAudioStream public implementation, merely wraps the
* private class.
* ===============================================================
*/
@implementation FSAudioStream
-(id)init
{
NSAssert([NSThread isMainThread], @"FSAudioStream.init needs to be called in the main thread");
FSStreamConfiguration *defaultConfiguration = [[FSStreamConfiguration alloc] init];
if (self = [self initWithConfiguration:defaultConfiguration]) {
}
return self;
}
- (id)initWithUrl:(NSURL *)url
{
NSAssert([NSThread isMainThread], @"FSAudioStream.initWithURL needs to be called in the main thread");
if (self = [self init]) {
_private.url = url;
}
return self;
}
- (id)initWithConfiguration:(FSStreamConfiguration *)configuration
{
NSAssert([NSThread isMainThread], @"FSAudioStream.initWithConfiguration needs to be called in the main thread");
if (self = [super init]) {
astreamer::Stream_Configuration *c = astreamer::Stream_Configuration::configuration();
c->bufferCount = configuration.bufferCount;
c->bufferSize = configuration.bufferSize;
c->maxPacketDescs = configuration.maxPacketDescs;
c->httpConnectionBufferSize = configuration.httpConnectionBufferSize;
c->outputSampleRate = configuration.outputSampleRate;
c->outputNumChannels = configuration.outputNumChannels;
c->maxBounceCount = configuration.maxBounceCount;
c->bounceInterval = configuration.bounceInterval;
c->startupWatchdogPeriod = configuration.startupWatchdogPeriod;
c->maxPrebufferedByteCount = configuration.maxPrebufferedByteCount;
c->usePrebufferSizeCalculationInSeconds = configuration.usePrebufferSizeCalculationInSeconds;
c->usePrebufferSizeCalculationInPackets = configuration.usePrebufferSizeCalculationInPackets;
c->cacheEnabled = configuration.cacheEnabled;
c->seekingFromCacheEnabled = configuration.seekingFromCacheEnabled;
c->automaticAudioSessionHandlingEnabled = configuration.automaticAudioSessionHandlingEnabled;
c->enableTimeAndPitchConversion = configuration.enableTimeAndPitchConversion;
c->requireStrictContentTypeChecking = configuration.requireStrictContentTypeChecking;
c->maxDiskCacheSize = configuration.maxDiskCacheSize;
c->requiredInitialPrebufferedByteCountForContinuousStream = configuration.requiredInitialPrebufferedByteCountForContinuousStream;
c->requiredInitialPrebufferedByteCountForNonContinuousStream = configuration.requiredInitialPrebufferedByteCountForNonContinuousStream;
c->requiredPrebufferSizeInSeconds = configuration.requiredPrebufferSizeInSeconds;
c->requiredInitialPrebufferedPacketCount = configuration.requiredInitialPrebufferedPacketCount;
if (c->userAgent) {
CFRelease(c->userAgent);
}
c->userAgent = CFStringCreateCopy(kCFAllocatorDefault, (__bridge CFStringRef)configuration.userAgent);
if (c->cacheDirectory) {
CFRelease(c->cacheDirectory);
}
if (configuration.cacheDirectory) {
c->cacheDirectory = CFStringCreateCopy(kCFAllocatorDefault, (__bridge CFStringRef)configuration.cacheDirectory);
} else {
c->cacheDirectory = NULL;
}
if (c->predefinedHttpHeaderValues) {
CFRelease(c->predefinedHttpHeaderValues);
}
if (configuration.predefinedHttpHeaderValues) {
c->predefinedHttpHeaderValues = CFDictionaryCreateCopy(kCFAllocatorDefault, (__bridge CFDictionaryRef)configuration.predefinedHttpHeaderValues);
} else {
c->predefinedHttpHeaderValues = NULL;
}
_private = [[FSAudioStreamPrivate alloc] init];
_private.stream = self;
}
return self;
}
- (void)dealloc
{
NSAssert([NSThread isMainThread], @"FSAudioStream.dealloc needs to be called in the main thread");
AudioStreamStateObserver *observer = [_private streamStateObserver];
// Break the cyclic loop so that dealloc() may be called
observer->priv = nil;
_private.stream = nil;
_private.delegate = nil;
_private = nil;
}
- (void)setUrl:(NSURL *)url
{
NSAssert([NSThread isMainThread], @"FSAudioStream.setUrl needs to be called in the main thread");
[_private setUrl:url];
}
- (NSURL*)url
{
NSAssert([NSThread isMainThread], @"FSAudioStream.url needs to be called in the main thread");
return [_private url];
}
- (void)setStrictContentTypeChecking:(BOOL)strictContentTypeChecking
{
NSAssert([NSThread isMainThread], @"FSAudioStream.setStrictContentTypeChecking needs to be called in the main thread");
[_private setStrictContentTypeChecking:strictContentTypeChecking];
}
- (BOOL)strictContentTypeChecking
{
NSAssert([NSThread isMainThread], @"FSAudioStream.strictContentTypeChecking needs to be called in the main thread");
return [_private strictContentTypeChecking];
}
- (NSURL*)outputFile
{
NSAssert([NSThread isMainThread], @"FSAudioStream.outputFile needs to be called in the main thread");
return [_private outputFile];
}
- (void)setOutputFile:(NSURL *)outputFile
{
NSAssert([NSThread isMainThread], @"FSAudioStream.setOutputFile needs to be called in the main thread");
[_private setOutputFile:outputFile];
}
- (void)setDefaultContentType:(NSString *)defaultContentType
{
NSAssert([NSThread isMainThread], @"FSAudioStream.setDefaultContentType needs to be called in the main thread");
[_private setDefaultContentType:defaultContentType];
}
- (NSString*)defaultContentType
{
NSAssert([NSThread isMainThread], @"FSAudioStream.defaultContentType needs to be called in the main thread");
return [_private defaultContentType];
}
- (NSString*)contentType
{
NSAssert([NSThread isMainThread], @"FSAudioStream.contentType needs to be called in the main thread");
return [_private contentType];
}
- (NSString*)suggestedFileExtension
{
NSAssert([NSThread isMainThread], @"FSAudioStream.suggestedFileExtension needs to be called in the main thread");
return [_private suggestedFileExtension];
}
- (UInt64)defaultContentLength
{
NSAssert([NSThread isMainThread], @"FSAudioStream.defaultContentLength needs to be called in the main thread");
return [_private defaultContentLength];
}
- (UInt64)contentLength
{
NSAssert([NSThread isMainThread], @"FSAudioStream.contentLength needs to be called in the main thread");
return [_private contentLength];
}
- (UInt64)audioDataByteCount
{
NSAssert([NSThread isMainThread], @"FSAudioStream.audioDataByteCount needs to be called in the main thread");
return [_private audioDataByteCount];
}
- (void)preload
{
NSAssert([NSThread isMainThread], @"FSAudioStream.preload needs to be called in the main thread");
[_private preload];
}
- (void)play
{
NSAssert([NSThread isMainThread], @"FSAudioStream.play needs to be called in the main thread");
[_private play];
}
- (void)playFromURL:(NSURL*)url
{
NSAssert([NSThread isMainThread], @"FSAudioStream.playFromURL needs to be called in the main thread");
[_private playFromURL:url];
}
- (void)playFromOffset:(FSSeekByteOffset)offset
{
NSAssert([NSThread isMainThread], @"FSAudioStream.playFromOffset needs to be called in the main thread");
[_private playFromOffset:offset];
}
- (void)stop
{
NSAssert([NSThread isMainThread], @"FSAudioStream.stop needs to be called in the main thread");
[_private stop];
}
- (void)pause
{
NSAssert([NSThread isMainThread], @"FSAudioStream.pause needs to be called in the main thread");
[_private pause];
}
- (void)rewind:(unsigned int)seconds
{
NSAssert([NSThread isMainThread], @"FSAudioStream.rewind needs to be called in the main thread");
[_private rewind:seconds];
}
- (void)seekToPosition:(FSStreamPosition)position
{
NSAssert([NSThread isMainThread], @"FSAudioStream.seekToPosition needs to be called in the main thread");
if (!(position.position > 0)) {
// To retain compatibility with older implementations,
// fallback to using less accurate position.minute and position.second, if needed
const float seekTime = position.minute * 60 + position.second;
position.position = seekTime / [_private durationInSeconds];
}
[_private seekToOffset:position.position];
}
- (void)setPlayRate:(float)playRate
{
NSAssert([NSThread isMainThread], @"FSAudioStream.setPlayRate needs to be called in the main thread");
[_private setPlayRate:playRate];
}
- (BOOL)isPlaying
{
NSAssert([NSThread isMainThread], @"FSAudioStream.isPlaying needs to be called in the main thread");
return [_private isPlaying];
}
- (void)expungeCache
{
NSAssert([NSThread isMainThread], @"FSAudioStream.expungeCache needs to be called in the main thread");
[_private expungeCache];
}
- (NSUInteger)retryCount
{
NSAssert([NSThread isMainThread], @"FSAudioStream.retryCount needs to be called in the main thread");
return _private.retryCount;
}
- (FSStreamStatistics *)statistics
{
return _private.statistics;
}
- (FSLevelMeterState)levels
{
return _private.levels;
}
- (FSStreamPosition)currentTimePlayed
{
NSAssert([NSThread isMainThread], @"FSAudioStream.currentTimePlayed needs to be called in the main thread");
FSStreamPosition pos;
pos.position = 0;
pos.playbackTimeInSeconds = [_private playbackPosition].timePlayed;
pos.minute = 0;
pos.second = 0;
const float durationInSeconds = [_private durationInSeconds];
if (durationInSeconds > 0) {
pos.position = pos.playbackTimeInSeconds / [_private durationInSeconds];
}
// Extract the minutes and seconds for convenience
if (pos.playbackTimeInSeconds > 0) {
unsigned u = pos.playbackTimeInSeconds;
unsigned s,m;
s = u % 60;
u /= 60;
m = u;
pos.minute = m;
pos.second = s;
}
return pos;
}
- (FSStreamPosition)duration
{
NSAssert([NSThread isMainThread], @"FSAudioStream.duration needs to be called in the main thread");
FSStreamPosition pos;
pos.minute = 0;
pos.second = 0;
pos.playbackTimeInSeconds = 0;
pos.position = 0;
const float durationInSeconds = [_private durationInSeconds];
if (durationInSeconds > 0) {
unsigned u = durationInSeconds;
unsigned s,m;
s = u % 60;
u /= 60;
m = u;
pos.minute = m;
pos.second = s;
}
pos.playbackTimeInSeconds = durationInSeconds;
return pos;
}
- (FSSeekByteOffset)currentSeekByteOffset
{
NSAssert([NSThread isMainThread], @"FSAudioStream.currentSeekByteOffset needs to be called in the main thread");
return _private.currentSeekByteOffset;
}
- (float)bitRate
{
NSAssert([NSThread isMainThread], @"FSAudioStream.bitRate needs to be called in the main thread");
return _private.bitRate;
}
- (BOOL)continuous
{
NSAssert([NSThread isMainThread], @"FSAudioStream.continuous needs to be called in the main thread");
return !([_private durationInSeconds] > 0);
}
- (BOOL)cached
{
NSAssert([NSThread isMainThread], @"FSAudioStream.cached needs to be called in the main thread");
return _private.cached;
}
- (size_t)prebufferedByteCount
{
NSAssert([NSThread isMainThread], @"FSAudioStream.prebufferedByteCount needs to be called in the main thread");
return _private.prebufferedByteCount;
}
- (float)volume
{
NSAssert([NSThread isMainThread], @"FSAudioStream.volume needs to be called in the main thread");
return [_private currentVolume];
}
- (unsigned long long)totalCachedObjectsSize
{
NSAssert([NSThread isMainThread], @"FSAudioStream.totalCachedObjectsSize needs to be called in the main thread");
return [_private totalCachedObjectsSize];
}
- (void)setVolume:(float)volume
{
NSAssert([NSThread isMainThread], @"FSAudioStream.setVolume needs to be called in the main thread");
[_private setVolume:volume];
}
- (void (^)())onCompletion
{
NSAssert([NSThread isMainThread], @"FSAudioStream.onCompletion needs to be called in the main thread");
return _private.onCompletion;
}
- (void)setOnCompletion:(void (^)())onCompletion
{
NSAssert([NSThread isMainThread], @"FSAudioStream.setOnCompletion needs to be called in the main thread");
_private.onCompletion = onCompletion;
}
- (void (^)(FSAudioStreamState state))onStateChange
{
NSAssert([NSThread isMainThread], @"FSAudioStream.onStateChange needs to be called in the main thread");
return _private.onStateChange;
}
- (void (^)(NSDictionary *metaData))onMetaDataAvailable
{
NSAssert([NSThread isMainThread], @"FSAudioStream.onMetaDataAvailable needs to be called in the main thread");
return _private.onMetaDataAvailable;
}
- (void (^)(FSAudioStreamError error, NSString *errorDescription))onFailure
{
NSAssert([NSThread isMainThread], @"FSAudioStream.onFailure needs to be called in the main thread");
return _private.onFailure;
}
- (void)setOnStateChange:(void (^)(FSAudioStreamState))onStateChange
{
NSAssert([NSThread isMainThread], @"FSAudioStream.setOnStateChange needs to be called in the main thread");
_private.onStateChange = onStateChange;
}
- (void)setOnMetaDataAvailable:(void (^)(NSDictionary *))onMetaDataAvailable
{
NSAssert([NSThread isMainThread], @"FSAudioStream.setOnMetaDataAvailable needs to be called in the main thread");
_private.onMetaDataAvailable = onMetaDataAvailable;
}
- (void)setOnFailure:(void (^)(FSAudioStreamError error, NSString *errorDescription))onFailure
{
NSAssert([NSThread isMainThread], @"FSAudioStream.setOnFailure needs to be called in the main thread");
_private.onFailure = onFailure;
}
- (FSStreamConfiguration *)configuration
{
NSAssert([NSThread isMainThread], @"FSAudioStream.configuration needs to be called in the main thread");
return _private.configuration;
}
- (void)setDelegate:(id<FSPCMAudioStreamDelegate>)delegate
{
NSAssert([NSThread isMainThread], @"FSAudioStream.setDelegate needs to be called in the main thread");
_private.delegate = delegate;
}
- (id<FSPCMAudioStreamDelegate>)delegate
{
NSAssert([NSThread isMainThread], @"FSAudioStream.delegate needs to be called in the main thread");
return _private.delegate;
}
-(NSString *)description
{
NSAssert([NSThread isMainThread], @"FSAudioStream.description needs to be called in the main thread");
return [_private description];
}
-(NSUInteger)maxRetryCount
{
NSAssert([NSThread isMainThread], @"FSAudioStream.maxRetryCount needs to be called in the main thread");
return [_private maxRetryCount];
}
-(void)setMaxRetryCount:(NSUInteger)maxRetryCount
{
NSAssert([NSThread isMainThread], @"FSAudioStream.setMaxRetryCount needs to be called in the main thread");
[_private setMaxRetryCount:maxRetryCount];
}
@end
/*
* ===============================================================
* AudioStreamStateObserver: listen to the state from the audio stream.
* ===============================================================
*/
void AudioStreamStateObserver::audioStreamErrorOccurred(int errorCode, CFStringRef errorDescription)
{
FSAudioStreamError error = kFsAudioStreamErrorNone;
NSString *errorForObjC = @"";
if (errorDescription) {
errorForObjC = CFBridgingRelease(CFStringCreateCopy(kCFAllocatorDefault, errorDescription));
}
switch (errorCode) {
case kFsAudioStreamErrorOpen:
error = kFsAudioStreamErrorOpen;
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Error opening the stream: %@ %@", errorForObjC, priv);
#endif
break;
case kFsAudioStreamErrorStreamParse:
error = kFsAudioStreamErrorStreamParse;
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Error parsing the stream: %@ %@", errorForObjC, priv);
#endif
break;
case kFsAudioStreamErrorNetwork:
error = kFsAudioStreamErrorNetwork;
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Network error: %@ %@", errorForObjC, priv);
#endif
break;
case kFsAudioStreamErrorUnsupportedFormat:
error = kFsAudioStreamErrorUnsupportedFormat;
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Unsupported format error: %@ %@", errorForObjC, priv);
#endif
break;
case kFsAudioStreamErrorStreamBouncing:
error = kFsAudioStreamErrorStreamBouncing;
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Stream bounced: %@ %@", errorForObjC, priv);
#endif
break;
case kFsAudioStreamErrorTerminated:
error = kFsAudioStreamErrorTerminated;
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSAudioStream: Stream terminated: %@ %@", errorForObjC, priv);
#endif
break;
default:
break;
}
if (priv.onFailure) {
priv.onFailure(error, errorForObjC);
}
NSDictionary *userInfo = @{FSAudioStreamNotificationKey_Error: @(errorCode),
FSAudioStreamNotificationKey_ErrorDescription: errorForObjC,
FSAudioStreamNotificationKey_Stream: [NSValue valueWithPointer:source]};
NSNotification *notification = [NSNotification notificationWithName:FSAudioStreamErrorNotification object:priv.stream userInfo:userInfo];
[[NSNotificationCenter defaultCenter] postNotification:notification];
if (error == kFsAudioStreamErrorNetwork ||
error == kFsAudioStreamErrorUnsupportedFormat ||
error == kFsAudioStreamErrorOpen ||
error == kFsAudioStreamErrorTerminated) {
if (!source->isPreloading()) {
[priv attemptRestart];
}
}
}
void AudioStreamStateObserver::audioStreamStateChanged(astreamer::Audio_Stream::State state)
{
SEL notificationHandler;
switch (state) {
case astreamer::Audio_Stream::STOPPED:
notificationHandler = @selector(notifyPlaybackStopped);
break;
case astreamer::Audio_Stream::BUFFERING:
notificationHandler = @selector(notifyPlaybackBuffering);
break;
case astreamer::Audio_Stream::PLAYING:
[priv endBackgroundTask];
notificationHandler = @selector(notifyPlaybackPlaying);
break;
case astreamer::Audio_Stream::PAUSED:
notificationHandler = @selector(notifyPlaybackPaused);
break;
case astreamer::Audio_Stream::SEEKING:
notificationHandler = @selector(notifyPlaybackSeeking);
break;
case astreamer::Audio_Stream::END_OF_FILE:
notificationHandler = @selector(notifyPlaybackEndOfFile);
break;
case astreamer::Audio_Stream::FAILED:
[priv endBackgroundTask];
notificationHandler = @selector(notifyPlaybackFailed);
break;
case astreamer::Audio_Stream::PLAYBACK_COMPLETED:
notificationHandler = @selector(notifyPlaybackCompletion);
break;
default:
// Unknown state
notificationHandler = @selector(notifyPlaybackUnknownState);
break;
}
// Detach from the player so that the event loop can complete its cycle.
// This ensures that the stream gets closed, if needs to be.
[NSTimer scheduledTimerWithTimeInterval:0
target:priv
selector:notificationHandler
userInfo:nil
repeats:NO];
}
void AudioStreamStateObserver::audioStreamMetaDataAvailable(std::map<CFStringRef,CFStringRef> metaData)
{
NSMutableDictionary *metaDataDictionary = [[NSMutableDictionary alloc] init];
for (std::map<CFStringRef,CFStringRef>::iterator iter = metaData.begin(); iter != metaData.end(); ++iter) {
CFStringRef key = iter->first;
CFStringRef value = iter->second;
metaDataDictionary[CFBridgingRelease(key)] = CFBridgingRelease(value);
}
if (priv.onMetaDataAvailable) {
priv.onMetaDataAvailable(metaDataDictionary);
}
NSDictionary *userInfo = @{FSAudioStreamNotificationKey_MetaData: metaDataDictionary,
FSAudioStreamNotificationKey_Stream: [NSValue valueWithPointer:source]};
NSNotification *notification = [NSNotification notificationWithName:FSAudioStreamMetaDataNotification object:priv.stream userInfo:userInfo];
[[NSNotificationCenter defaultCenter] postNotification:notification];
}
void AudioStreamStateObserver::samplesAvailable(AudioBufferList *samples, UInt32 frames, AudioStreamPacketDescription description)
{
if ([priv.delegate respondsToSelector:@selector(audioStream:samplesAvailable:frames:description:)]) {
[priv.delegate audioStream:priv.stream samplesAvailable:samples frames:frames description:description];
}
}
void AudioStreamStateObserver::bitrateAvailable()
{
[priv bitrateAvailable];
}
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import <Foundation/Foundation.h>
/**
* Content type format.
*/
typedef NS_ENUM(NSInteger, FSFileFormat) {
/**
* Unknown format.
*/
kFSFileFormatUnknown = 0,
/**
* M3U playlist.
*/
kFSFileFormatM3UPlaylist,
/**
* PLS playlist.
*/
kFSFileFormatPLSPlaylist,
/**
* XML file.
*/
kFSFileFormatXML,
/**
* MP3 file.
*/
kFSFileFormatMP3,
/**
* WAVE file.
*/
kFSFileFormatWAVE,
/**
* AIFC file.
*/
kFSFileFormatAIFC,
/**
* AIFF file.
*/
kFSFileFormatAIFF,
/**
* M4A file.
*/
kFSFileFormatM4A,
/**
* MPEG4 file.
*/
kFSFileFormatMPEG4,
/**
* CAF file.
*/
kFSFileFormatCAF,
/**
* AAC_ADTS file.
*/
kFSFileFormatAAC_ADTS,
/**
* Total number of formats.
*/
kFSFileFormatCount
};
/**
* FSCheckContentTypeRequest is a class for checking the content type
* of a URL. It makes an HTTP HEAD request and parses the header information
* from the server. The resulting format is stored in the format property.
*
* To use the class, define the URL for checking the content type using
* the url property. Then, define the onCompletion and onFailure handlers.
* To start the request, use the start method.
*/
@interface FSCheckContentTypeRequest : NSObject <NSURLSessionDelegate> {
NSURLSessionTask *_task;
FSFileFormat _format;
NSString *_contentType;
BOOL _playlist;
BOOL _xml;
}
/**
* The URL of this request.
*/
@property (nonatomic,copy) NSURL *url;
/**
* Called when the content type determination is completed.
*/
@property (copy) void (^onCompletion)(void);
/**
* Called if the content type determination failed.
*/
@property (copy) void (^onFailure)(void);
/**
* Contains the format of the URL upon completion of the request.
*/
@property (nonatomic,readonly) FSFileFormat format;
/**
* Containts the content type of the URL upon completion of the request.
*/
@property (nonatomic,readonly) NSString *contentType;
/**
* The property is true if the URL contains a playlist.
*/
@property (nonatomic,readonly) BOOL playlist;
/**
* The property is true if the URL contains XML data.
*/
@property (nonatomic,readonly) BOOL xml;
/**
* Starts the request.
*/
- (void)start;
/**
* Cancels the request.
*/
- (void)cancel;
@end
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import "FSCheckContentTypeRequest.h"
@interface FSCheckContentTypeRequest ()
- (BOOL)guessContentTypeByUrl:(NSURLResponse *)response;
@end
@implementation FSCheckContentTypeRequest
- (id)init
{
self = [super init];
if (self) {
_format = kFSFileFormatUnknown;
_playlist = NO;
_xml = NO;
}
return self;
}
- (void)start
{
if (_task) {
return;
}
_format = kFSFileFormatUnknown;
_playlist = NO;
_contentType = @"";
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:_url
cachePolicy:NSURLRequestReloadIgnoringCacheData
timeoutInterval:10.0];
[request setHTTPMethod:@"HEAD"];
NSURLSession *session = [NSURLSession sessionWithConfiguration:[NSURLSessionConfiguration defaultSessionConfiguration]
delegate:self
delegateQueue:[NSOperationQueue mainQueue]];
@synchronized (self) {
_task = [session dataTaskWithRequest:request];
}
[_task resume];
if (!_task) {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSCheckContentTypeRequest: Unable to open connection for URL: %@", _url);
#endif
self.onFailure();
return;
}
}
- (void)cancel
{
if (!_task) {
return;
}
@synchronized (self) {
[_task cancel];
_task = nil;
}
}
/*
* =======================================
* Properties
* =======================================
*/
- (FSFileFormat)format
{
return _format;
}
- (NSString *)contentType
{
return _contentType;
}
- (BOOL)playlist
{
return _playlist;
}
- (BOOL)xml
{
return _xml;
}
/*
* =======================================
* NSURLSessionDelegate
* =======================================
*/
- (void)URLSession:(NSURLSession *)session dataTask:(NSURLSessionDataTask *)dataTask
didReceiveResponse:(NSURLResponse *)response
completionHandler:(void (^)(NSURLSessionResponseDisposition disposition))completionHandler {
_contentType = response.MIMEType;
_format = kFSFileFormatUnknown;
_playlist = NO;
NSInteger statusCode = ((NSHTTPURLResponse *)response).statusCode;
if (statusCode >= 200 && statusCode <= 299) {
// Only use the content type if the response indicated success (2xx)
if ([_contentType isEqualToString:@"audio/mpeg"]) {
_format = kFSFileFormatMP3;
} else if ([_contentType isEqualToString:@"audio/x-wav"]) {
_format = kFSFileFormatWAVE;
} else if ([_contentType isEqualToString:@"audio/x-aifc"]) {
_format = kFSFileFormatAIFC;
} else if ([_contentType isEqualToString:@"audio/x-aiff"]) {
_format = kFSFileFormatAIFF;
} else if ([_contentType isEqualToString:@"audio/x-m4a"]) {
_format = kFSFileFormatM4A;
} else if ([_contentType isEqualToString:@"audio/mp4"]) {
_format = kFSFileFormatMPEG4;
} else if ([_contentType isEqualToString:@"audio/x-caf"]) {
_format = kFSFileFormatCAF;
} else if ([_contentType isEqualToString:@"audio/aac"] ||
[_contentType isEqualToString:@"audio/aacp"]) {
_format = kFSFileFormatAAC_ADTS;
} else if ([_contentType isEqualToString:@"audio/x-mpegurl"] ||
[_contentType isEqualToString:@"application/x-mpegurl"]) {
_format = kFSFileFormatM3UPlaylist;
_playlist = YES;
} else if ([_contentType isEqualToString:@"audio/x-scpls"] ||
[_contentType isEqualToString:@"application/pls+xml"]) {
_format = kFSFileFormatPLSPlaylist;
_playlist = YES;
} else if ([_contentType isEqualToString:@"text/xml"] ||
[_contentType isEqualToString:@"application/xml"]) {
_format = kFSFileFormatXML;
_xml = YES;
} else {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSCheckContentTypeRequest: Cannot resolve %@, guessing the content type by URL: %@", _contentType, _url);
#endif
[self guessContentTypeByUrl:response];
}
} else {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSCheckContentTypeRequest: Invalid HTTP status code received %li, guessing the content type by URL: %@", (long)statusCode, _url);
#endif
[self guessContentTypeByUrl:response];
}
_task = nil;
self.onCompletion();
}
- (void)URLSession:(NSURLSession *)session task:(NSURLSessionTask *)task
didCompleteWithError:(nullable NSError *)error {
@synchronized (self) {
_task = nil;
_format = kFSFileFormatUnknown;
_playlist = NO;
}
// Still, try if we could resolve the content type by the URL
if ([self guessContentTypeByUrl:nil]) {
self.onCompletion();
} else {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSCheckContentTypeRequest: Unable to determine content-type for the URL: %@, error %@", _url, [error localizedDescription]);
#endif
self.onFailure();
}
}
/*
* =======================================
* Private
* =======================================
*/
- (BOOL)guessContentTypeByUrl:(NSURLResponse *)response
{
/* The server did not provide meaningful content type;
last resort: check the file suffix, if there is one */
NSString *absoluteUrl;
if (response) {
absoluteUrl = [response.URL absoluteString];
} else {
absoluteUrl = [_url absoluteString];
}
if ([absoluteUrl hasSuffix:@".mp3"]) {
_format = kFSFileFormatMP3;
} else if ([absoluteUrl hasSuffix:@".mp4"]) {
_format = kFSFileFormatMPEG4;
} else if ([absoluteUrl hasSuffix:@".m3u"]) {
_format = kFSFileFormatM3UPlaylist;
_playlist = YES;
} else if ([absoluteUrl hasSuffix:@".pls"]) {
_format = kFSFileFormatPLSPlaylist;
_playlist = YES;
} else if ([absoluteUrl hasSuffix:@".xml"]) {
_format = kFSFileFormatXML;
_xml = YES;
} else {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSCheckContentTypeRequest: Failed to determine content type from the URL: %@", _url);
#endif
/*
* Failed to guess the content type based on the URL.
*/
return NO;
}
/*
* We have determined a content-type.
*/
return YES;
}
@end
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import <Foundation/Foundation.h>
/**
* The playlist format.
*/
typedef NS_ENUM(NSInteger, FSPlaylistFormat) {
/**
* Unknown playlist format.
*/
kFSPlaylistFormatNone,
/**
* M3U playlist.
*/
kFSPlaylistFormatM3U,
/**
* PLS playlist.
*/
kFSPlaylistFormatPLS
};
/**
* FSParsePlaylistRequest is a class for parsing a playlist. It supports
* the M3U and PLS formats.
*
* To use the class, define the URL for retrieving the playlist using
* the url property. Then, define the onCompletion and onFailure handlers.
* To start the request, use the start method.
*/
@interface FSParsePlaylistRequest : NSObject<NSURLSessionDelegate> {
NSURLSessionTask *_task;
NSInteger _httpStatus;
NSMutableData *_receivedData;
NSMutableArray *_playlistItems;
FSPlaylistFormat _format;
}
/**
* The URL of this request.
*/
@property (nonatomic,copy) NSURL *url;
/**
* Called when the playlist parsing is completed.
*/
@property (copy) void (^onCompletion)(void);
/**
* Called if the playlist parsing failed.
*/
@property (copy) void (^onFailure)(void);
/**
* The playlist items stored in the FSPlaylistItem class.
*/
@property (readonly) NSMutableArray *playlistItems;
/**
* Starts the request.
*/
- (void)start;
/**
* Cancels the request.
*/
- (void)cancel;
@end
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import "FSParsePlaylistRequest.h"
#import "FSPlaylistItem.h"
@interface FSParsePlaylistRequest ()
- (void)parsePlaylistFromData:(NSData *)data;
- (void)parsePlaylistM3U:(NSString *)playlist;
- (void)parsePlaylistPLS:(NSString *)playlist;
- (NSURL *)parseLocalFileUrl:(NSString *)fileUrl;
@property (readonly) FSPlaylistFormat format;
@end
@implementation FSParsePlaylistRequest
- (id)init
{
self = [super init];
if (self) {
}
return self;
}
- (void)start
{
if (_task) {
return;
}
NSURLRequest *request = [NSURLRequest requestWithURL:self.url
cachePolicy:NSURLRequestUseProtocolCachePolicy
timeoutInterval:10.0];
NSURLSession *session = [NSURLSession sessionWithConfiguration:[NSURLSessionConfiguration defaultSessionConfiguration]
delegate:self
delegateQueue:[NSOperationQueue mainQueue]];
@synchronized (self) {
_receivedData = [NSMutableData data];
_task = [session dataTaskWithRequest:request];
_playlistItems = [[NSMutableArray alloc] init];
_format = kFSPlaylistFormatNone;
}
[_task resume];
}
- (void)cancel
{
if (!_task) {
return;
}
@synchronized (self) {
[_task cancel];
_task = nil;
}
}
/*
* =======================================
* Properties
* =======================================
*/
- (NSMutableArray *)playlistItems
{
return [_playlistItems copy];
}
- (FSPlaylistFormat)format
{
return _format;
}
/*
* =======================================
* Private
* =======================================
*/
- (void)parsePlaylistFromData:(NSData *)data
{
NSString *playlistData = [[NSString alloc] initWithData:data encoding:NSASCIIStringEncoding];
if (_format == kFSPlaylistFormatM3U) {
[self parsePlaylistM3U:playlistData];
if ([_playlistItems count] == 0) {
// If we failed to grab any playlist items, still try
// to parse it in another format; perhaps the server
// mistakingly identified the playlist format
[self parsePlaylistPLS:playlistData];
}
} else if (_format == kFSPlaylistFormatPLS) {
[self parsePlaylistPLS:playlistData];
if ([_playlistItems count] == 0) {
// If we failed to grab any playlist items, still try
// to parse it in another format; perhaps the server
// mistakingly identified the playlist format
[self parsePlaylistM3U:playlistData];
}
}
if ([_playlistItems count] == 0) {
/*
* Fail if we failed to parse any items from the playlist.
*/
self.onFailure();
}
}
- (void)parsePlaylistM3U:(NSString *)playlist
{
[_playlistItems removeAllObjects];
for (NSString *line in [playlist componentsSeparatedByString:@"\n"]) {
if ([line hasPrefix:@"#"]) {
/* metadata, skip */
continue;
}
if ([line hasPrefix:@"http://"] ||
[line hasPrefix:@"https://"]) {
FSPlaylistItem *item = [[FSPlaylistItem alloc] init];
item.url = [NSURL URLWithString:[line stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]]];
[_playlistItems addObject:item];
} else if ([line hasPrefix:@"file://"]) {
FSPlaylistItem *item = [[FSPlaylistItem alloc] init];
item.url = [self parseLocalFileUrl:line];
[_playlistItems addObject:item];
}
}
}
- (void)parsePlaylistPLS:(NSString *)playlist
{
[_playlistItems removeAllObjects];
NSMutableDictionary *props = [[NSMutableDictionary alloc] init];
size_t i = 0;
for (NSString *rawLine in [playlist componentsSeparatedByString:@"\n"]) {
NSString *line = [rawLine stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]];
if (i == 0) {
if ([[line lowercaseString] hasPrefix:@"[playlist]"]) {
i++;
continue;
} else {
// Invalid playlist; the first line should indicate that this is a playlist
return;
}
}
// Ignore empty lines
if ([line length] == 0) {
i++;
continue;
}
// Not an empty line; so expect that this is a key/value pair
NSRange r = [line rangeOfString:@"="];
// Invalid format, key/value pair not found
if (r.length == 0) {
return;
}
NSString *key = [[line substringToIndex:r.location] lowercaseString];
NSString *value = [line substringFromIndex:r.location + 1];
props[key] = value;
i++;
}
NSInteger numItems = [[props valueForKey:@"numberofentries"] integerValue];
if (numItems == 0) {
// Invalid playlist; number of playlist items not defined
return;
}
for (i=0; i < numItems; i++) {
FSPlaylistItem *item = [[FSPlaylistItem alloc] init];
NSString *title = [props valueForKey:[NSString stringWithFormat:@"title%lu", (i+1)]];
item.title = title;
NSString *file = [props valueForKey:[NSString stringWithFormat:@"file%lu", (i+1)]];
if ([file hasPrefix:@"http://"] ||
[file hasPrefix:@"https://"]) {
item.url = [NSURL URLWithString:file];
[_playlistItems addObject:item];
} else if ([file hasPrefix:@"file://"]) {
item.url = [self parseLocalFileUrl:file];
[_playlistItems addObject:item];
}
}
}
- (NSURL *)parseLocalFileUrl:(NSString *)fileUrl
{
// Resolve the local bundle URL
NSString *path = [fileUrl substringFromIndex:7];
NSRange range = [path rangeOfString:@"." options:NSBackwardsSearch];
NSString *fileName = [path substringWithRange:NSMakeRange(0, range.location)];
NSString *suffix = [path substringWithRange:NSMakeRange(range.location + 1, [path length] - [fileName length] - 1)];
return [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:fileName ofType:suffix]];
}
/*
* =======================================
* NSURLSessionDelegate
* =======================================
*/
- (void)URLSession:(NSURLSession *)session dataTask:(NSURLSessionDataTask *)dataTask
didReceiveResponse:(NSURLResponse *)response
completionHandler:(void (^)(NSURLSessionResponseDisposition disposition))completionHandler {
NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response;
_httpStatus = [httpResponse statusCode];
NSString *contentType = response.MIMEType;
NSString *absoluteUrl = [response.URL absoluteString];
_format = kFSPlaylistFormatNone;
if ([contentType isEqualToString:@"audio/x-mpegurl"] ||
[contentType isEqualToString:@"application/x-mpegurl"]) {
_format = kFSPlaylistFormatM3U;
} else if ([contentType isEqualToString:@"audio/x-scpls"] ||
[contentType isEqualToString:@"application/pls+xml"]) {
_format = kFSPlaylistFormatPLS;
} else if ([contentType isEqualToString:@"text/plain"]) {
/* The server did not provide meaningful content type;
last resort: check the file suffix, if there is one */
if ([absoluteUrl hasSuffix:@".m3u"]) {
_format = kFSPlaylistFormatM3U;
} else if ([absoluteUrl hasSuffix:@".pls"]) {
_format = kFSPlaylistFormatPLS;
}
}
if (_format == kFSPlaylistFormatNone) {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSParsePlaylistRequest: Unable to determine the type of the playlist for URL: %@", _url);
#endif
self.onFailure();
} else {
completionHandler(NSURLSessionResponseAllow);
}
[_receivedData setLength:0];
}
- (void)URLSession:(NSURLSession *)session dataTask:(NSURLSessionDataTask *)dataTask didBecomeDownloadTask:(NSURLSessionDownloadTask *)downloadTask
{
// Resume the Download Task manually because apparently iOS does not do it automatically?!
[downloadTask resume];
}
- (void)URLSession:(NSURLSession *)session
dataTask:(NSURLSessionDataTask *)dataTask
didReceiveData:(NSData *)data {
[_receivedData appendData:data];
}
- (void)URLSession:(NSURLSession *)session task:(NSURLSessionTask *)task
didCompleteWithError:(nullable NSError *)error {
if(error) {
@synchronized (self) {
_task = nil;
_receivedData = nil;
}
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSParsePlaylistRequest: Connection failed for URL: %@, error %@", _url, [error localizedDescription]);
#endif
self.onFailure();
} else {
@synchronized (self) {
_task = nil;
}
if (_httpStatus != 200) {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSParsePlaylistRequest: Unable to receive playlist from URL: %@", _url);
#endif
self.onFailure();
return;
}
[self parsePlaylistFromData:_receivedData];
self.onCompletion();
}
}
@end
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import "FSXMLHttpRequest.h"
/**
* Use this request for retrieving the contents for a podcast RSS feed.
* Upon request completion, the resulting playlist items are
* in the playlistItems property.
*
* See the FSXMLHttpRequest class how to form a request to retrieve
* the RSS feed.
*/
@interface FSParseRssPodcastFeedRequest : FSXMLHttpRequest {
NSMutableArray *_playlistItems;
}
/**
* The playlist items stored in the FSPlaylistItem class.
*/
@property (readonly) NSMutableArray *playlistItems;
@end
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import <libxml/parser.h>
#import <libxml/xpath.h>
#import "FSParseRssPodcastFeedRequest.h"
#import "FSPlaylistItem.h"
static NSString *const kXPathQueryItems = @"/rss/channel/item";
@interface FSParseRssPodcastFeedRequest ()
- (NSURL *)parseLocalFileUrl:(NSString *)fileUrl;
- (void)parseItems:(xmlNodePtr)node;
@end
@implementation FSParseRssPodcastFeedRequest
- (NSURL *)parseLocalFileUrl:(NSString *)fileUrl
{
// Resolve the local bundle URL
NSString *path = [fileUrl substringFromIndex:7];
NSRange range = [path rangeOfString:@"." options:NSBackwardsSearch];
NSString *fileName = [path substringWithRange:NSMakeRange(0, range.location)];
NSString *suffix = [path substringWithRange:NSMakeRange(range.location + 1, [path length] - [fileName length] - 1)];
return [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:fileName ofType:suffix]];
}
- (void)parseItems:(xmlNodePtr)node
{
FSPlaylistItem *item = [[FSPlaylistItem alloc] init];
for (xmlNodePtr n = node->children; n != NULL; n = n->next) {
NSString *nodeName = @((const char *)n->name);
if ([nodeName isEqualToString:@"title"]) {
item.title = [self contentForNode:n];
} else if ([nodeName isEqualToString:@"enclosure"]) {
NSString *url = [self contentForNodeAttribute:n attribute:"url"];
if ([url hasPrefix:@"file://"]) {
item.url = [self parseLocalFileUrl:url];
} else {
item.url = [NSURL URLWithString:url];
}
} else if ([nodeName isEqualToString:@"link"]) {
NSString *url = [self contentForNode:n];
if ([url hasPrefix:@"file://"]) {
item.originatingUrl = [self parseLocalFileUrl:url];
} else {
item.originatingUrl = [NSURL URLWithString:url];
}
}
}
if (nil == item.url &&
nil == item.originatingUrl) {
// Not a valid item, as there is no URL. Skip.
return;
}
[_playlistItems addObject:item];
}
- (void)parseResponseData
{
if (!_playlistItems) {
_playlistItems = [[NSMutableArray alloc] init];
}
[_playlistItems removeAllObjects];
// RSS feed publication date format:
// Sun, 22 Jul 2012 17:35:05 GMT
[_dateFormatter setDateFormat:@"EEE, dd MMMM yyyy HH:mm:ss V"];
[_dateFormatter setLocale:[[NSLocale alloc] initWithLocaleIdentifier:@"en_GB"]];
[self performXPathQuery:kXPathQueryItems];
}
- (void)parseXMLNode:(xmlNodePtr)node xPathQuery:(NSString *)xPathQuery
{
if ([xPathQuery isEqualToString:kXPathQueryItems]) {
[self parseItems:node];
}
}
- (NSArray *)playlistItems
{
return _playlistItems;
}
@end
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import <Foundation/Foundation.h>
/**
* A playlist item. Each item has a title and url.
*/
@interface FSPlaylistItem : NSObject {
}
/**
* The title of the playlist item.
*/
@property (nonatomic,copy) NSString *title;
/**
* The URL of the playlist item.
*/
@property (nonatomic,copy) NSURL *url;
/**
* The originating URL of the playlist item.
*/
@property (nonatomic,copy) NSURL *originatingUrl;
/**
* The number of bytes of audio data. Notice that this may differ
* from the number of bytes the server returns for the content length!
* For instance audio file meta data is excluded from the count.
* Effectively you can use this property for seeking calculations.
*
* The property is only available for non-continuous streams which
* have been in the "playing" state.
*/
@property (nonatomic,assign) UInt64 audioDataByteCount;
@end
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import "FSPlaylistItem.h"
@implementation FSPlaylistItem
- (BOOL)isEqual:(id)anObject
{
FSPlaylistItem *otherObject = anObject;
if ([otherObject.title isEqual:self.title] &&
[otherObject.url isEqual:self.url]) {
return YES;
}
return NO;
}
@end
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import <Foundation/Foundation.h>
typedef struct _xmlDoc xmlDoc;
typedef xmlDoc *xmlDocPtr;
typedef struct _xmlNode xmlNode;
typedef xmlNode *xmlNodePtr;
/**
* XML HTTP request error status.
*/
typedef NS_ENUM(NSInteger, FSXMLHttpRequestError) {
/**
* No error.
*/
FSXMLHttpRequestError_NoError = 0,
/**
* Connection failed.
*/
FSXMLHttpRequestError_Connection_Failed,
/**
* Invalid HTTP status.
*/
FSXMLHttpRequestError_Invalid_Http_Status,
/**
* XML parser failed.
*/
FSXMLHttpRequestError_XML_Parser_Failed
};
/**
* FSXMLHttpRequest is a class for retrieving data in the XML
* format over a HTTP or HTTPS connection. It provides
* the necessary foundation for parsing the retrieved XML data.
* This class is not meant to be used directly but subclassed
* to a specific requests.
*
* The usage pattern is the following:
*
* 1. Specify the URL with the url property.
* 2. Define the onCompletion and onFailure handlers.
* 3. Call the start method.
*/
@interface FSXMLHttpRequest : NSObject {
NSURLSessionTask *_task;
xmlDocPtr _xmlDocument;
NSDateFormatter *_dateFormatter;
}
/**
* The URL of the request.
*/
@property (nonatomic,copy) NSURL *url;
/**
* Called upon completion of the request.
*/
@property (copy) void (^onCompletion)(void);
/**
* Called upon a failure.
*/
@property (copy) void (^onFailure)(void);
/**
* If the request fails, contains the latest error status.
*/
@property (readonly) FSXMLHttpRequestError lastError;
/**
* Starts the request.
*/
- (void)start;
/**
* Cancels the request.
*/
- (void)cancel;
/**
* Performs an XPath query on the parsed XML data.
* Yields a parseXMLNode method call, which must be
* defined in the subclasses.
*
* @param query The XPath query to be performed.
*/
- (NSArray *)performXPathQuery:(NSString *)query;
/**
* Retrieves content for the given XML node.
*
* @param node The node for content retreval.
*/
- (NSString *)contentForNode:(xmlNodePtr)node;
/**
* Retrieves content for the given XML node attribute.
*
* @param node The node for content retrieval.
* @param attr The attribute from which the content is retrieved.
*/
- (NSString *)contentForNodeAttribute:(xmlNodePtr)node attribute:(const char *)attr;
/**
* Retrieves date from the given XML node.
*
* @param node The node for retrieving the date.
*/
- (NSDate *)dateFromNode:(xmlNodePtr)node;
@end
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#import "FSXMLHttpRequest.h"
#import <libxml/parser.h>
#import <libxml/xpath.h>
#define DATE_COMPONENTS (NSYearCalendarUnit| NSMonthCalendarUnit | NSDayCalendarUnit | NSWeekCalendarUnit | NSHourCalendarUnit | NSMinuteCalendarUnit | NSSecondCalendarUnit | NSWeekdayCalendarUnit | NSWeekdayOrdinalCalendarUnit)
#define CURRENT_CALENDAR [NSCalendar currentCalendar]
@interface FSXMLHttpRequest (PrivateMethods)
- (const char *)detectEncoding;
- (void)parseResponseData;
- (void)parseXMLNode:(xmlNodePtr)node xPathQuery:(NSString *)xPathQuery;
@end
@implementation FSXMLHttpRequest
- (id)init
{
self = [super init];
if (self) {
_dateFormatter = [[NSDateFormatter alloc] init];
}
return self;
}
- (void)start
{
if (_task) {
return;
}
_lastError = FSXMLHttpRequestError_NoError;
NSURLRequest *request = [NSURLRequest requestWithURL:self.url
cachePolicy:NSURLRequestUseProtocolCachePolicy
timeoutInterval:10.0];
NSURLSession *session = [NSURLSession sharedSession];
__weak FSXMLHttpRequest *weakSelf = self;
@synchronized (self) {
_task = [session dataTaskWithRequest:request
completionHandler:
^(NSData *data, NSURLResponse *response, NSError *error) {
FSXMLHttpRequest *strongSelf = weakSelf;
NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *) response;
if(error) {
strongSelf->_lastError = FSXMLHttpRequestError_Connection_Failed;
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSXMLHttpRequest: Request failed for URL: %@, error %@", strongSelf.url, [error localizedDescription]);
#endif
dispatch_async(dispatch_get_main_queue(), ^(){
strongSelf.onFailure();
});
} else {
if (httpResponse.statusCode != 200) {
strongSelf->_lastError = FSXMLHttpRequestError_Invalid_Http_Status;
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSXMLHttpRequest: Unable to receive content for URL: %@", strongSelf.url);
#endif
dispatch_async(dispatch_get_main_queue(), ^(){
strongSelf.onFailure();
});
return;
}
const char *encoding = [self detectEncoding:data];
strongSelf->_xmlDocument = xmlReadMemory([data bytes],
(int)[data length],
"",
encoding,
0);
if (!strongSelf->_xmlDocument) {
strongSelf->_lastError = FSXMLHttpRequestError_XML_Parser_Failed;
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
NSLog(@"FSXMLHttpRequest: Unable to parse the content for URL: %@", strongSelf.url);
#endif
dispatch_async(dispatch_get_main_queue(), ^(){
strongSelf.onFailure();
});
return;
}
[strongSelf parseResponseData];
xmlFreeDoc(strongSelf->_xmlDocument);
strongSelf->_xmlDocument = nil;
dispatch_async(dispatch_get_main_queue(), ^(){
strongSelf.onCompletion();
});
}
}];
}
[_task resume];
}
- (void)cancel
{
if (!_task) {
return;
}
@synchronized (self) {
[_task cancel];
_task = nil;
}
}
/*
* =======================================
* XML handling
* =======================================
*/
- (NSArray *)performXPathQuery:(NSString *)query
{
NSMutableArray *resultNodes = [NSMutableArray array];
xmlXPathContextPtr xpathCtx = NULL;
xmlXPathObjectPtr xpathObj = NULL;
xpathCtx = xmlXPathNewContext(_xmlDocument);
if (xpathCtx == NULL) {
goto cleanup;
}
xpathObj = xmlXPathEvalExpression((xmlChar *)[query cStringUsingEncoding:NSUTF8StringEncoding], xpathCtx);
if (xpathObj == NULL) {
goto cleanup;
}
xmlNodeSetPtr nodes = xpathObj->nodesetval;
if (!nodes) {
goto cleanup;
}
for (size_t i = 0; i < nodes->nodeNr; i++) {
[self parseXMLNode:nodes->nodeTab[i] xPathQuery:query];
}
cleanup:
if (xpathObj) {
xmlXPathFreeObject(xpathObj);
}
if (xpathCtx) {
xmlXPathFreeContext(xpathCtx);
}
return resultNodes;
}
- (NSString *)contentForNode:(xmlNodePtr)node
{
NSString *stringWithContent;
if (!node) {
stringWithContent = [[NSString alloc] init];
} else {
xmlChar *content = xmlNodeGetContent(node);
if (!content) {
return stringWithContent;
}
stringWithContent = @((const char *)content);
xmlFree(content);
}
return stringWithContent;
}
- (NSString *)contentForNodeAttribute:(xmlNodePtr)node attribute:(const char *)attr
{
NSString *stringWithContent;
if (!node) {
stringWithContent = [[NSString alloc] init];
} else {
xmlChar *content = xmlGetProp(node, (const xmlChar *)attr);
if (!content) {
return stringWithContent;
}
stringWithContent = @((const char *)content);
xmlFree(content);
}
return stringWithContent;
}
/*
* =======================================
* Helpers
* =======================================
*/
- (const char *)detectEncoding:(NSData *)receivedData
{
const char *encoding = 0;
const char *header = strndup([receivedData bytes], 60);
if (strstr(header, "utf-8") || strstr(header, "UTF-8")) {
encoding = "UTF-8";
} else if (strstr(header, "iso-8859-1") || strstr(header, "ISO-8859-1")) {
encoding = "ISO-8859-1";
}
free((void *)header);
return encoding;
}
- (NSDate *)dateFromNode:(xmlNodePtr)node
{
NSString *dateString = [self contentForNode:node];
/*
* For some NSDateFormatter date parsing oddities: http://www.openradar.me/9944011
*
* Engineering has determined that this issue behaves as intended based on the following information:
*
* This is an intentional change in iOS 5. The issue is this: With the short formats as specified by z (=zzz) or v (=vvv),
* there can be a lot of ambiguity. For example, "ET" for Eastern Time" could apply to different time zones in many different regions.
* To improve formatting and parsing reliability, the short forms are only used in a locale if the "cu" (commonly used) flag is set
* for the locale. Otherwise, only the long forms are used (for both formatting and parsing). This is a change in
* open-source CLDR 2.0 / ICU 4.8, which is the basis for the ICU in iOS 5, which in turn is the basis of NSDateFormatter behavior.
*
* For the "en" locale (= "en_US"), the cu flag is set for metazones such as Alaska, America_Central, America_Eastern, America_Mountain,
* America_Pacific, Atlantic, Hawaii_Aleutian, and GMT. It is not set for Europe_Central.
*
* However, for the "en_GB" locale, the cu flag is set for Europe_Central.
*
* So, a formatter set for short timezone style "z" or "zzz" and locale "en" or "en_US" will not parse "CEST" or "CET", but if the
* locale is instead set to "en_GB" it will parse those. The "GMT" style will be parsed by all.
*
* If the formatter is set for the long timezone style "zzzz", and the locale is any of "en", "en_US", or "en_GB", then any of the
* following will be parsed, because they are unambiguous:
*
* "Pacific Daylight Time" "Central European Summer Time" "Central European Time"
*
*/
return [_dateFormatter dateFromString:dateString];
}
@end
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#include "audio_queue.h"
#include "stream_configuration.h"
#include <pthread.h>
//#define AQ_DEBUG 1
//#define AQ_DEBUG_LOCKS 1
#if !defined (AQ_DEBUG)
#define AQ_TRACE(...) do {} while (0)
#define AQ_ASSERT(...) do {} while (0)
#else
#include <cassert>
#define AQ_TRACE(...) printf(__VA_ARGS__)
#define AQ_ASSERT(...) assert(__VA_ARGS__)
#endif
#if !defined (AQ_DEBUG_LOCKS)
#define AQ_LOCK_TRACE(...) do {} while (0)
#else
#define AQ_LOCK_TRACE(...) printf(__VA_ARGS__)
#endif
namespace astreamer {
/* public */
Audio_Queue::Audio_Queue()
: m_delegate(0),
m_state(IDLE),
m_outAQ(0),
m_fillBufferIndex(0),
m_bytesFilled(0),
m_packetsFilled(0),
m_buffersUsed(0),
m_audioQueueStarted(false),
m_levelMeteringEnabled(false),
m_lastError(noErr),
m_initialOutputVolume(1.0)
{
Stream_Configuration *config = Stream_Configuration::configuration();
m_audioQueueBuffer = new AudioQueueBufferRef[config->bufferCount];
m_packetDescs = new AudioStreamPacketDescription[config->maxPacketDescs];
m_bufferInUse = new bool[config->bufferCount];
for (size_t i=0; i < config->bufferCount; i++) {
m_bufferInUse[i] = false;
}
if (pthread_mutex_init(&m_mutex, NULL) != 0) {
AQ_TRACE("m_mutex init failed!\n");
}
if (pthread_mutex_init(&m_bufferInUseMutex, NULL) != 0) {
AQ_TRACE("m_bufferInUseMutex init failed!\n");
}
if (pthread_cond_init(&m_bufferFreeCondition, NULL) != 0) {
AQ_TRACE("m_bufferFreeCondition init failed!\n");
}
}
Audio_Queue::~Audio_Queue()
{
stop(true);
cleanup();
delete [] m_audioQueueBuffer;
delete [] m_packetDescs;
delete [] m_bufferInUse;
pthread_mutex_destroy(&m_mutex);
pthread_mutex_destroy(&m_bufferInUseMutex);
pthread_cond_destroy(&m_bufferFreeCondition);
}
bool Audio_Queue::initialized()
{
return (m_outAQ != 0);
}
void Audio_Queue::start()
{
// start the queue if it has not been started already
if (m_audioQueueStarted) {
return;
}
OSStatus err = AudioQueueStart(m_outAQ, NULL);
if (!err) {
m_audioQueueStarted = true;
m_levelMeteringEnabled = false;
m_lastError = noErr;
} else {
AQ_TRACE("%s: AudioQueueStart failed!\n", __PRETTY_FUNCTION__);
m_lastError = err;
}
}
void Audio_Queue::pause()
{
if (m_state == RUNNING) {
if (AudioQueuePause(m_outAQ) != 0) {
AQ_TRACE("%s: AudioQueuePause failed!\n", __PRETTY_FUNCTION__);
}
setState(PAUSED);
} else if (m_state == PAUSED) {
AudioQueueStart(m_outAQ, NULL);
setState(RUNNING);
}
}
void Audio_Queue::stop()
{
stop(true);
}
float Audio_Queue::volume()
{
if (!m_outAQ) {
return 1.0;
}
float vol;
OSStatus err = AudioQueueGetParameter(m_outAQ, kAudioQueueParam_Volume, &vol);
if (!err) {
return vol;
}
return 1.0;
}
void Audio_Queue::setVolume(float volume)
{
if (!m_outAQ) {
return;
}
AudioQueueSetParameter(m_outAQ, kAudioQueueParam_Volume, volume);
}
void Audio_Queue::setPlayRate(float playRate)
{
Stream_Configuration *configuration = Stream_Configuration::configuration();
if (!configuration->enableTimeAndPitchConversion) {
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
printf("*** FreeStreamer notification: Trying to set play rate for audio queue but enableTimeAndPitchConversion is disabled from configuration. Play rate settign will not work.\n");
#endif
return;
}
if (!m_outAQ) {
return;
}
if (playRate < 0.5) {
playRate = 0.5;
}
if (playRate > 2.0) {
playRate = 2.0;
}
AudioQueueSetParameter(m_outAQ, kAudioQueueParam_PlayRate, playRate);
}
void Audio_Queue::stop(bool stopImmediately)
{
if (!m_audioQueueStarted) {
AQ_TRACE("%s: audio queue already stopped, return!\n", __PRETTY_FUNCTION__);
return;
}
m_audioQueueStarted = false;
m_levelMeteringEnabled = false;
pthread_mutex_lock(&m_bufferInUseMutex);
pthread_cond_signal(&m_bufferFreeCondition);
pthread_mutex_unlock(&m_bufferInUseMutex);
AQ_TRACE("%s: enter\n", __PRETTY_FUNCTION__);
if (AudioQueueFlush(m_outAQ) != 0) {
AQ_TRACE("%s: AudioQueueFlush failed!\n", __PRETTY_FUNCTION__);
}
if (stopImmediately) {
AudioQueueRemovePropertyListener(m_outAQ,
kAudioQueueProperty_IsRunning,
audioQueueIsRunningCallback,
this);
}
if (AudioQueueStop(m_outAQ, stopImmediately) != 0) {
AQ_TRACE("%s: AudioQueueStop failed!\n", __PRETTY_FUNCTION__);
}
if (stopImmediately) {
setState(IDLE);
}
AQ_TRACE("%s: leave\n", __PRETTY_FUNCTION__);
}
AudioTimeStamp Audio_Queue::currentTime()
{
AudioTimeStamp queueTime;
Boolean discontinuity;
memset(&queueTime, 0, sizeof queueTime);
OSStatus err = AudioQueueGetCurrentTime(m_outAQ, NULL, &queueTime, &discontinuity);
if (err) {
AQ_TRACE("AudioQueueGetCurrentTime failed\n");
}
return queueTime;
}
AudioQueueLevelMeterState Audio_Queue::levels()
{
if (!m_levelMeteringEnabled) {
UInt32 enabledLevelMeter = true;
AudioQueueSetProperty(m_outAQ,
kAudioQueueProperty_EnableLevelMetering,
&enabledLevelMeter,
sizeof(UInt32));
m_levelMeteringEnabled = true;
}
AudioQueueLevelMeterState levelMeter;
UInt32 levelMeterSize = sizeof(AudioQueueLevelMeterState);
AudioQueueGetProperty(m_outAQ, kAudioQueueProperty_CurrentLevelMeterDB, &levelMeter, &levelMeterSize);
return levelMeter;
}
void Audio_Queue::init()
{
OSStatus err = noErr;
cleanup();
// create the audio queue
err = AudioQueueNewOutput(&m_streamDesc, audioQueueOutputCallback, this, CFRunLoopGetCurrent(), NULL, 0, &m_outAQ);
if (err) {
AQ_TRACE("%s: error in AudioQueueNewOutput\n", __PRETTY_FUNCTION__);
m_lastError = err;
if (m_delegate) {
m_delegate->audioQueueInitializationFailed();
}
return;
}
Stream_Configuration *configuration = Stream_Configuration::configuration();
// allocate audio queue buffers
for (unsigned int i = 0; i < configuration->bufferCount; ++i) {
err = AudioQueueAllocateBuffer(m_outAQ, configuration->bufferSize, &m_audioQueueBuffer[i]);
if (err) {
/* If allocating the buffers failed, everything else will fail, too.
* Dispose the queue so that we can later on detect that this
* queue in fact has not been initialized.
*/
AQ_TRACE("%s: error in AudioQueueAllocateBuffer\n", __PRETTY_FUNCTION__);
(void)AudioQueueDispose(m_outAQ, true);
m_outAQ = 0;
m_lastError = err;
if (m_delegate) {
m_delegate->audioQueueInitializationFailed();
}
return;
}
}
// listen for kAudioQueueProperty_IsRunning
err = AudioQueueAddPropertyListener(m_outAQ, kAudioQueueProperty_IsRunning, audioQueueIsRunningCallback, this);
if (err) {
AQ_TRACE("%s: error in AudioQueueAddPropertyListener\n", __PRETTY_FUNCTION__);
m_lastError = err;
return;
}
if (configuration->enableTimeAndPitchConversion) {
UInt32 enableTimePitchConversion = 1;
err = AudioQueueSetProperty (m_outAQ, kAudioQueueProperty_EnableTimePitch, &enableTimePitchConversion, sizeof(enableTimePitchConversion));
if (err != noErr) {
AQ_TRACE("Failed to enable time and pitch conversion. Play rate setting will fail\n");
}
}
if (m_initialOutputVolume != 1.0) {
setVolume(m_initialOutputVolume);
}
}
void Audio_Queue::handleAudioPackets(UInt32 inNumberBytes, UInt32 inNumberPackets, const void *inInputData, AudioStreamPacketDescription *inPacketDescriptions)
{
if (!initialized()) {
AQ_TRACE("%s: warning: attempt to handle audio packets with uninitialized audio queue. return.\n", __PRETTY_FUNCTION__);
return;
}
// this is called by audio file stream when it finds packets of audio
AQ_TRACE("got data. bytes: %u packets: %u\n", inNumberBytes, (unsigned int)inNumberPackets);
/* Place each packet into a buffer and then send each buffer into the audio
queue */
UInt32 i;
for (i = 0; i < inNumberPackets; i++) {
AudioStreamPacketDescription *desc = &inPacketDescriptions[i];
const void *data = (const char*)inInputData + desc->mStartOffset;
if (!initialized()) {
AQ_TRACE("%s: warning: attempt to handle audio packets with uninitialized audio queue. return.\n", __PRETTY_FUNCTION__);
return;
}
Stream_Configuration *config = Stream_Configuration::configuration();
AQ_TRACE("%s: enter\n", __PRETTY_FUNCTION__);
UInt32 packetSize = desc->mDataByteSize;
/* This shouldn't happen because most of the time we read the packet buffer
size from the file stream, but if we restored to guessing it we could
come up too small here */
if (packetSize > config->bufferSize) {
AQ_TRACE("%s: packetSize %u > AQ_BUFSIZ %li\n", __PRETTY_FUNCTION__, (unsigned int)packetSize, config->bufferSize);
return;
}
// if the space remaining in the buffer is not enough for this packet, then
// enqueue the buffer and wait for another to become available.
if (config->bufferSize - m_bytesFilled < packetSize) {
enqueueBuffer();
if (!m_audioQueueStarted) {
return;
}
} else {
AQ_TRACE("%s: skipped enqueueBuffer AQ_BUFSIZ - m_bytesFilled %lu, packetSize %u\n", __PRETTY_FUNCTION__, (config->bufferSize - m_bytesFilled), (unsigned int)packetSize);
}
// copy data to the audio queue buffer
AudioQueueBufferRef buf = m_audioQueueBuffer[m_fillBufferIndex];
memcpy((char*)buf->mAudioData, data, packetSize);
// fill out packet description to pass to enqueue() later on
m_packetDescs[m_packetsFilled] = *desc;
// Make sure the offset is relative to the start of the audio buffer
m_packetDescs[m_packetsFilled].mStartOffset = m_bytesFilled;
// keep track of bytes filled and packets filled
m_bytesFilled += packetSize;
m_packetsFilled++;
/* If filled our buffer with packets, then commit it to the system */
if (m_packetsFilled >= config->maxPacketDescs) {
enqueueBuffer();
}
}
}
/* private */
void Audio_Queue::cleanup()
{
if (!initialized()) {
AQ_TRACE("%s: warning: attempt to cleanup an uninitialized audio queue. return.\n", __PRETTY_FUNCTION__);
return;
}
Stream_Configuration *config = Stream_Configuration::configuration();
if (m_state != IDLE) {
AQ_TRACE("%s: attemping to cleanup the audio queue when it is still playing, force stopping\n",
__PRETTY_FUNCTION__);
AudioQueueRemovePropertyListener(m_outAQ,
kAudioQueueProperty_IsRunning,
audioQueueIsRunningCallback,
this);
AudioQueueStop(m_outAQ, true);
setState(IDLE);
}
if (AudioQueueDispose(m_outAQ, true) != 0) {
AQ_TRACE("%s: AudioQueueDispose failed!\n", __PRETTY_FUNCTION__);
}
m_outAQ = 0;
m_fillBufferIndex = m_bytesFilled = m_packetsFilled = m_buffersUsed = 0;
for (size_t i=0; i < config->bufferCount; i++) {
m_bufferInUse[i] = false;
}
m_lastError = noErr;
}
void Audio_Queue::setState(State state)
{
if (m_state == state) {
/* We are already in this state! */
return;
}
m_state = state;
if (m_delegate) {
m_delegate->audioQueueStateChanged(state);
}
}
void Audio_Queue::enqueueBuffer()
{
AQ_ASSERT(!m_bufferInUse[m_fillBufferIndex]);
Stream_Configuration *config = Stream_Configuration::configuration();
AQ_TRACE("%s: enter\n", __PRETTY_FUNCTION__);
pthread_mutex_lock(&m_bufferInUseMutex);
m_bufferInUse[m_fillBufferIndex] = true;
m_buffersUsed++;
// enqueue buffer
AudioQueueBufferRef fillBuf = m_audioQueueBuffer[m_fillBufferIndex];
fillBuf->mAudioDataByteSize = m_bytesFilled;
pthread_mutex_unlock(&m_bufferInUseMutex);
AQ_ASSERT(m_packetsFilled > 0);
OSStatus err = AudioQueueEnqueueBuffer(m_outAQ, fillBuf, m_packetsFilled, m_packetDescs);
if (!err) {
m_lastError = noErr;
start();
} else {
/* If we get an error here, it very likely means that the audio queue is no longer
running */
AQ_TRACE("%s: error in AudioQueueEnqueueBuffer\n", __PRETTY_FUNCTION__);
m_lastError = err;
return;
}
pthread_mutex_lock(&m_bufferInUseMutex);
// go to next buffer
if (++m_fillBufferIndex >= config->bufferCount) {
m_fillBufferIndex = 0;
}
// reset bytes filled
m_bytesFilled = 0;
// reset packets filled
m_packetsFilled = 0;
// wait until next buffer is not in use
while (m_bufferInUse[m_fillBufferIndex]) {
AQ_TRACE("waiting for buffer %u\n", (unsigned int)m_fillBufferIndex);
pthread_cond_wait(&m_bufferFreeCondition, &m_bufferInUseMutex);
}
pthread_mutex_unlock(&m_bufferInUseMutex);
}
// this is called by the audio queue when it has finished decoding our data.
// The buffer is now free to be reused.
void Audio_Queue::audioQueueOutputCallback(void *inClientData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer)
{
Audio_Queue *audioQueue = static_cast<Audio_Queue*>(inClientData);
Stream_Configuration *config = Stream_Configuration::configuration();
int bufIndex = -1;
for (unsigned int i = 0; i < config->bufferCount; ++i) {
if (inBuffer == audioQueue->m_audioQueueBuffer[i]) {
AQ_TRACE("findQueueBuffer %i\n", i);
bufIndex = i;
break;
}
}
if (bufIndex == -1) {
return;
}
pthread_mutex_lock(&audioQueue->m_bufferInUseMutex);
AQ_ASSERT(audioQueue->m_bufferInUse[bufIndex]);
audioQueue->m_bufferInUse[bufIndex] = false;
audioQueue->m_buffersUsed--;
AQ_TRACE("signaling buffer free for inuse %i....\n", bufIndex);
pthread_cond_signal(&audioQueue->m_bufferFreeCondition);
AQ_TRACE("signal sent!\n");
if (audioQueue->m_buffersUsed == 0 && audioQueue->m_delegate) {
AQ_LOCK_TRACE("audioQueueOutputCallback: unlock 2\n");
pthread_mutex_unlock(&audioQueue->m_bufferInUseMutex);
if (audioQueue->m_delegate) {
audioQueue->m_delegate->audioQueueBuffersEmpty();
}
} else {
pthread_mutex_unlock(&audioQueue->m_bufferInUseMutex);
if (audioQueue->m_delegate) {
audioQueue->m_delegate->audioQueueFinishedPlayingPacket();
}
}
AQ_LOCK_TRACE("audioQueueOutputCallback: unlock\n");
}
void Audio_Queue::audioQueueIsRunningCallback(void *inClientData, AudioQueueRef inAQ, AudioQueuePropertyID inID)
{
Audio_Queue *audioQueue = static_cast<Audio_Queue*>(inClientData);
AQ_TRACE("%s: enter\n", __PRETTY_FUNCTION__);
UInt32 running;
UInt32 output = sizeof(running);
OSStatus err = AudioQueueGetProperty(inAQ, kAudioQueueProperty_IsRunning, &running, &output);
if (err) {
AQ_TRACE("%s: error in kAudioQueueProperty_IsRunning\n", __PRETTY_FUNCTION__);
return;
}
if (running) {
AQ_TRACE("audio queue running!\n");
audioQueue->setState(RUNNING);
} else {
audioQueue->setState(IDLE);
}
}
} // namespace astreamer
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#ifndef ASTREAMER_AUDIO_QUEUE_H
#define ASTREAMER_AUDIO_QUEUE_H
#include <AudioToolbox/AudioToolbox.h> /* AudioFileStreamID */
namespace astreamer {
class Audio_Queue_Delegate;
struct queued_packet;
class Audio_Queue {
public:
Audio_Queue_Delegate *m_delegate;
enum State {
IDLE,
RUNNING,
PAUSED
};
Audio_Queue();
virtual ~Audio_Queue();
bool initialized();
void init();
// Notice: the queue blocks if it has no free buffers
void handleAudioPackets(UInt32 inNumberBytes, UInt32 inNumberPackets, const void *inInputData, AudioStreamPacketDescription *inPacketDescriptions);
void start();
void pause();
void stop(bool stopImmediately);
void stop();
float volume();
void setVolume(float volume);
void setPlayRate(float playRate);
AudioTimeStamp currentTime();
AudioQueueLevelMeterState levels();
private:
Audio_Queue(const Audio_Queue&);
Audio_Queue& operator=(const Audio_Queue&);
State m_state;
AudioQueueRef m_outAQ; // the audio queue
AudioQueueBufferRef *m_audioQueueBuffer; // audio queue buffers
AudioStreamPacketDescription *m_packetDescs; // packet descriptions for enqueuing audio
UInt32 m_fillBufferIndex; // the index of the audioQueueBuffer that is being filled
UInt32 m_bytesFilled; // how many bytes have been filled
UInt32 m_packetsFilled; // how many packets have been filled
UInt32 m_buffersUsed; // how many buffers are used
bool m_audioQueueStarted; // flag to indicate that the queue has been started
bool *m_bufferInUse; // flags to indicate that a buffer is still in use
bool m_levelMeteringEnabled;
pthread_mutex_t m_mutex;
pthread_mutex_t m_bufferInUseMutex;
pthread_cond_t m_bufferFreeCondition;
public:
OSStatus m_lastError;
AudioStreamBasicDescription m_streamDesc;
float m_initialOutputVolume;
private:
void cleanup();
void setCookiesForStream(AudioFileStreamID inAudioFileStream);
void setState(State state);
void enqueueBuffer();
static void audioQueueOutputCallback(void *inClientData, AudioQueueRef inAQ, AudioQueueBufferRef inBuffer);
static void audioQueueIsRunningCallback(void *inClientData, AudioQueueRef inAQ, AudioQueuePropertyID inID);
};
class Audio_Queue_Delegate {
public:
virtual void audioQueueStateChanged(Audio_Queue::State state) = 0;
virtual void audioQueueBuffersEmpty() = 0;
virtual void audioQueueInitializationFailed() = 0;
virtual void audioQueueFinishedPlayingPacket() = 0;
};
} // namespace astreamer
#endif // ASTREAMER_AUDIO_QUEUE_H
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#include "audio_stream.h"
#include "file_output.h"
#include "stream_configuration.h"
#include "http_stream.h"
#include "file_stream.h"
#include "caching_stream.h"
#include <CommonCrypto/CommonDigest.h>
#include <pthread.h>
/*
* Some servers may send an incorrect MIME type for the audio stream.
* By uncommenting the following line, relaxed checks will be
* performed for the MIME type. This allows playing more
* streams:
*/
//#define AS_RELAX_CONTENT_TYPE_CHECK 1
//#define AS_DEBUG 1
//#define AS_LOCK_DEBUG 1
#if !defined (AS_DEBUG)
#define AS_TRACE(...) do {} while (0)
#else
#define AS_TRACE(...) printf("[audio_stream.cpp:%i thread %x] ", __LINE__, pthread_mach_thread_np(pthread_self())); printf(__VA_ARGS__)
#endif
#if !defined (AS_LOCK_DEBUG)
#define AS_LOCK_TRACE(...) do {} while (0)
#else
#define AS_LOCK_TRACE(...) printf("[audio_stream.cpp:%i thread %x] ", __LINE__, pthread_mach_thread_np(pthread_self())); printf(__VA_ARGS__)
#endif
#if defined(DEBUG) || (TARGET_IPHONE_SIMULATOR)
#define AS_WARN(...) printf("[audio_stream.cpp:%i thread %x] ", __LINE__, pthread_mach_thread_np(pthread_self())); printf(__VA_ARGS__)
#else
#define AS_WARN(...) do {} while (0)
#endif
namespace astreamer {
static void fsTick(CFRunLoopTimerRef timer, void *info);
static void fsTick(CFRunLoopTimerRef timer, void *info)
{
// Dummy function just to keep the decoder runloop running
}
static CFStringRef coreAudioErrorToCFString(CFStringRef basicErrorDescription, OSStatus error)
{
char str[20] = {0};
*(UInt32 *) (str + 1) = CFSwapInt32HostToBig(error);
if (isprint(str[1]) && isprint(str[2]) && isprint(str[3]) && isprint(str[4])) {
str[0] = str[5] = '\'';
str[6] = '\0';
} else {
sprintf(str, "%d", (int)error);
}
CFStringRef formattedError = CFStringCreateWithFormat(NULL,
NULL,
CFSTR("%@: error code %s"),
basicErrorDescription,
str);
return formattedError;
}
/* Create HTTP stream as Audio_Stream (this) as the delegate */
Audio_Stream::Audio_Stream() :
m_delegate(0),
m_inputStreamRunning(false),
m_audioStreamParserRunning(false),
m_initialBufferingCompleted(false),
m_discontinuity(false),
m_preloading(false),
m_audioQueueConsumedPackets(false),
m_contentLength(0),
m_defaultContentLength(0),
m_bytesReceived(0),
m_state(STOPPED),
m_inputStream(0),
m_audioQueue(0),
m_watchdogTimer(0),
m_seekTimer(0),
m_inputStreamTimer(0),
m_stateSetTimer(0),
m_decodeTimer(0),
m_audioFileStream(0),
m_audioConverter(0),
m_initializationError(noErr),
m_outputBufferSize(Stream_Configuration::configuration()->bufferSize),
m_outputBuffer(new UInt8[m_outputBufferSize]),
m_packetIdentifier(0),
m_playingPacketIdentifier(0),
m_dataOffset(0),
m_seekOffset(0),
m_bounceCount(0),
m_firstBufferingTime(0),
m_strictContentTypeChecking(Stream_Configuration::configuration()->requireStrictContentTypeChecking),
m_defaultContentType(CFSTR("audio/mpeg")),
m_contentType(NULL),
m_fileOutput(0),
m_outputFile(NULL),
m_queuedHead(0),
m_queuedTail(0),
m_playPacket(0),
m_cachedDataSize(0),
m_numPacketsToRewind(0),
m_audioDataByteCount(0),
m_audioDataPacketCount(0),
m_bitRate(0),
m_metaDataSizeInBytes(0),
m_packetDuration(0),
m_bitrateBufferIndex(0),
m_outputVolume(1.0),
m_converterRunOutOfData(false),
m_decoderShouldRun(false),
m_decoderFailed(false),
m_decoderThreadCreated(false),
m_mainRunLoop(CFRunLoopGetCurrent()),
m_decodeRunLoop(NULL)
{
memset(&m_srcFormat, 0, sizeof m_srcFormat);
memset(&m_dstFormat, 0, sizeof m_dstFormat);
Stream_Configuration *config = Stream_Configuration::configuration();
m_dstFormat.mSampleRate = config->outputSampleRate;
m_dstFormat.mFormatID = kAudioFormatLinearPCM;
m_dstFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
m_dstFormat.mBytesPerPacket = 4;
m_dstFormat.mFramesPerPacket = 1;
m_dstFormat.mBytesPerFrame = 4;
m_dstFormat.mChannelsPerFrame = 2;
m_dstFormat.mBitsPerChannel = 16;
if (pthread_mutex_init(&m_packetQueueMutex, NULL) != 0) {
AS_TRACE("m_packetQueueMutex init failed!\n");
}
if (pthread_mutex_init(&m_streamStateMutex, NULL) != 0) {
AS_TRACE("m_streamStateMutex init failed!\n");
}
m_decoderThreadCreated = (pthread_create(&m_decodeThread, NULL, decodeLoop, this) == 0);
}
Audio_Stream::~Audio_Stream()
{
setDecoderRunState(false);
if (m_decoderThreadCreated) {
while (m_decodeRunLoop == NULL || !CFRunLoopIsWaiting(m_decodeRunLoop)) {
usleep(0);
}
CFRunLoopStop(m_decodeRunLoop);
pthread_join(m_decodeThread, NULL);
m_decodeRunLoop = NULL;
m_decoderThreadCreated = false;
}
if (m_defaultContentType) {
CFRelease(m_defaultContentType);
m_defaultContentType = NULL;
}
if (m_contentType) {
CFRelease(m_contentType);
m_contentType = NULL;
}
close(true);
delete [] m_outputBuffer;
m_outputBuffer = 0;
if (m_inputStream) {
m_inputStream->m_delegate = 0;
delete m_inputStream;
m_inputStream = 0;
}
if (m_fileOutput) {
delete m_fileOutput;
m_fileOutput = 0;
}
pthread_mutex_destroy(&m_packetQueueMutex);
pthread_mutex_destroy(&m_streamStateMutex);
}
void Audio_Stream::open()
{
open(0);
}
void Audio_Stream::open(Input_Stream_Position *position)
{
if (m_inputStreamRunning || m_audioStreamParserRunning) {
AS_TRACE("%s: already running: return\n", __PRETTY_FUNCTION__);
return;
}
m_contentLength = 0;
m_bytesReceived = 0;
m_seekOffset = 0;
m_bounceCount = 0;
m_firstBufferingTime = 0;
m_bitrateBufferIndex = 0;
m_initializationError = noErr;
m_converterRunOutOfData = false;
m_audioDataPacketCount = 0;
m_bitRate = 0;
m_metaDataSizeInBytes = 0;
m_discontinuity = true;
setDecoderRunState(false);
pthread_mutex_lock(&m_streamStateMutex);
m_audioQueueConsumedPackets = false;
m_decoderFailed = false;
pthread_mutex_unlock(&m_streamStateMutex);
pthread_mutex_lock(&m_packetQueueMutex);
m_numPacketsToRewind = 0;
pthread_mutex_unlock(&m_packetQueueMutex);
invalidateWatchdogTimer();
Stream_Configuration *config = Stream_Configuration::configuration();
if (m_contentType) {
CFRelease(m_contentType);
m_contentType = NULL;
}
bool success = false;
if (position) {
m_initialBufferingCompleted = false;
if (m_inputStream) {
success = m_inputStream->open(*position);
}
} else {
m_initialBufferingCompleted = false;
m_packetIdentifier = 0;
if (m_inputStream) {
success = m_inputStream->open();
}
}
if (success) {
AS_TRACE("%s: HTTP stream opened, buffering...\n", __PRETTY_FUNCTION__);
m_inputStreamRunning = true;
setState(BUFFERING);
pthread_mutex_lock(&m_streamStateMutex);
if (!m_preloading && config->startupWatchdogPeriod > 0) {
pthread_mutex_unlock(&m_streamStateMutex);
createWatchdogTimer();
} else {
pthread_mutex_unlock(&m_streamStateMutex);
}
} else {
closeAndSignalError(AS_ERR_OPEN, CFSTR("Input stream open error"));
}
}
void Audio_Stream::close(bool closeParser)
{
AS_TRACE("%s: enter\n", __PRETTY_FUNCTION__);
invalidateWatchdogTimer();
if (m_seekTimer) {
CFRunLoopTimerInvalidate(m_seekTimer);
CFRelease(m_seekTimer);
m_seekTimer = 0;
}
if (m_inputStreamTimer) {
CFRunLoopTimerInvalidate(m_inputStreamTimer);
CFRelease(m_inputStreamTimer);
m_inputStreamTimer = 0;
}
pthread_mutex_lock(&m_streamStateMutex);
if (m_stateSetTimer) {
CFRunLoopTimerInvalidate(m_stateSetTimer);
CFRelease(m_stateSetTimer);
m_stateSetTimer = 0;
}
pthread_mutex_unlock(&m_streamStateMutex);
/* Close the HTTP stream first so that the audio stream parser
isn't fed with more data to parse */
if (m_inputStreamRunning) {
if (m_inputStream) {
m_inputStream->close();
}
m_inputStreamRunning = false;
}
if (closeParser && m_audioStreamParserRunning) {
if (m_audioFileStream) {
if (AudioFileStreamClose(m_audioFileStream) != 0) {
AS_TRACE("%s: AudioFileStreamClose failed\n", __PRETTY_FUNCTION__);
}
m_audioFileStream = 0;
}
m_audioStreamParserRunning = false;
}
setDecoderRunState(false);
pthread_mutex_lock(&m_packetQueueMutex);
m_playPacket = 0;
pthread_mutex_unlock(&m_packetQueueMutex);
closeAudioQueue();
const State currentState = state();
if (FAILED != currentState && SEEKING != currentState) {
/*
* Set the stream state to stopped if the stream was stopped successfully.
* We don't want to cause a spurious stopped state as the fail state should
* be the final state in case the stream failed.
*/
setState(STOPPED);
}
if (m_audioConverter) {
AudioConverterDispose(m_audioConverter);
m_audioConverter = 0;
}
/*
* Free any remaining queud packets for encoding.
*/
pthread_mutex_lock(&m_packetQueueMutex);
queued_packet_t *cur = m_queuedHead;
while (cur) {
queued_packet_t *tmp = cur->next;
free(cur);
cur = tmp;
}
m_queuedHead = 0;
m_queuedTail = 0;
m_cachedDataSize = 0;
m_numPacketsToRewind = 0;
m_processedPackets.clear();
pthread_mutex_unlock(&m_packetQueueMutex);
AS_TRACE("%s: leave\n", __PRETTY_FUNCTION__);
}
void Audio_Stream::pause()
{
audioQueue()->pause();
}
void Audio_Stream::rewind(unsigned seconds)
{
const bool continuous = (!(contentLength() > 0));
if (!continuous) {
return;
}
const int packetCount = cachedDataCount();
if (packetCount == 0) {
return;
}
const Float64 averagePacketSize = (Float64)cachedDataSize() / (Float64)packetCount;
const Float64 bufferSizeForSecond = bitrate() / 8.0;
const Float64 totalAudioRequiredInBytes = seconds * bufferSizeForSecond;
const int packetsToRewind = totalAudioRequiredInBytes / averagePacketSize;
if (packetCount - packetsToRewind >= 16) {
// Leave some safety margin so that the stream doesn't immediately start buffering
pthread_mutex_lock(&m_packetQueueMutex);
m_numPacketsToRewind = packetsToRewind;
pthread_mutex_unlock(&m_packetQueueMutex);
}
}
void Audio_Stream::startCachedDataPlayback()
{
pthread_mutex_lock(&m_streamStateMutex);
m_preloading = false;
pthread_mutex_unlock(&m_streamStateMutex);
if (!m_inputStreamRunning) {
// Already reached EOF, restart
open();
} else {
determineBufferingLimits();
}
}
AS_Playback_Position Audio_Stream::playbackPosition()
{
AS_Playback_Position playbackPosition;
playbackPosition.offset = 0;
playbackPosition.timePlayed = 0;
if (m_audioStreamParserRunning) {
AudioTimeStamp queueTime = audioQueue()->currentTime();
playbackPosition.timePlayed = (durationInSeconds() * m_seekOffset) +
(queueTime.mSampleTime / m_dstFormat.mSampleRate);
float duration = durationInSeconds();
if (duration > 0) {
playbackPosition.offset = playbackPosition.timePlayed / durationInSeconds();
}
}
return playbackPosition;
}
UInt64 Audio_Stream::audioDataByteCount()
{
UInt64 audioDataBytes = 0;
if (m_audioDataByteCount > 0) {
audioDataBytes = m_audioDataByteCount;
} else {
audioDataBytes = contentLength() - m_metaDataSizeInBytes;
}
return audioDataBytes;
}
float Audio_Stream::durationInSeconds()
{
if (m_audioDataPacketCount > 0 && m_srcFormat.mFramesPerPacket > 0) {
return m_audioDataPacketCount * m_srcFormat.mFramesPerPacket / m_srcFormat.mSampleRate;
}
// Not enough data provided by the format, use bit rate based estimation
UInt64 audioDataBytes = audioDataByteCount();
if (audioDataBytes > 0) {
float bitrate = this->bitrate();
if (bitrate > 0) {
return audioDataBytes / (bitrate * 0.125);
}
}
// No file length available, cannot calculate the duration
return 0;
}
void Audio_Stream::seekToOffset(float offset)
{
const State currentState = this->state();
if (!(currentState == PLAYING || currentState == END_OF_FILE)) {
// Do not allow seeking if we are not currently playing the stream
// This allows a previous seek to be completed
return;
}
setState(SEEKING);
m_originalContentLength = contentLength();
setDecoderRunState(false);
pthread_mutex_lock(&m_packetQueueMutex);
m_numPacketsToRewind = 0;
pthread_mutex_unlock(&m_packetQueueMutex);
m_inputStream->setScheduledInRunLoop(false);
setSeekOffset(offset);
if (m_seekTimer) {
CFRunLoopTimerInvalidate(m_seekTimer);
CFRelease(m_seekTimer);
m_seekTimer = 0;
}
CFRunLoopTimerContext ctx = {0, this, NULL, NULL, NULL};
m_seekTimer = CFRunLoopTimerCreate(NULL,
CFAbsoluteTimeGetCurrent(),
0.050, // 50 ms
0,
0,
seekTimerCallback,
&ctx);
CFRunLoopAddTimer(CFRunLoopGetCurrent(), m_seekTimer, kCFRunLoopCommonModes);
}
Input_Stream_Position Audio_Stream::streamPositionForOffset(float offset)
{
Input_Stream_Position position;
position.start = 0;
position.end = 0;
const float duration = durationInSeconds();
if (!(duration > 0)) {
return position;
}
UInt64 seekByteOffset = m_dataOffset + offset * (contentLength() - m_dataOffset);
position.start = seekByteOffset;
position.end = contentLength();
return position;
}
float Audio_Stream::currentVolume()
{
return m_outputVolume;
}
void Audio_Stream::setDecoderRunState(bool decoderShouldRun)
{
pthread_mutex_lock(&m_streamStateMutex);
if (decoderShouldRun && m_decoderThreadCreated) {
if (m_decodeTimer == NULL) {
CFRunLoopTimerContext ctx = {0, this, NULL, NULL, NULL};
m_decodeTimer = CFRunLoopTimerCreate (NULL, CFAbsoluteTimeGetCurrent() + 0.02, 0.02, 0, 0, decodeSinglePacket, &ctx);
pthread_mutex_unlock(&m_streamStateMutex);
while (m_decodeRunLoop == NULL || !CFRunLoopIsWaiting(m_decodeRunLoop)) {
usleep(0);
}
pthread_mutex_lock(&m_streamStateMutex);
CFRunLoopAddTimer(m_decodeRunLoop, m_decodeTimer, kCFRunLoopCommonModes);
AS_TRACE("decoder timer added!!!\n");
}
} else {
if (m_decodeTimer != NULL) {
CFRunLoopRemoveTimer(m_decodeRunLoop, m_decodeTimer, kCFRunLoopCommonModes);
CFRelease(m_decodeTimer);
m_decodeTimer = 0;
AS_TRACE("decoder timer removed!!!\n");
}
}
m_decoderShouldRun = decoderShouldRun;
pthread_mutex_unlock(&m_streamStateMutex);
}
void Audio_Stream::setVolume(float volume)
{
if (volume < 0) {
volume = 0;
}
if (volume > 1.0) {
volume = 1.0;
}
// Store the volume so it will be used consequently when the queue plays
m_outputVolume = volume;
if (m_audioQueue) {
m_audioQueue->setVolume(volume);
}
}
void Audio_Stream::setPlayRate(float playRate)
{
if (m_audioQueue) {
m_audioQueue->setPlayRate(playRate);
}
}
void Audio_Stream::setUrl(CFURLRef url)
{
if (m_inputStream) {
delete m_inputStream;
m_inputStream = 0;
}
if (HTTP_Stream::canHandleUrl(url)) {
Stream_Configuration *config = Stream_Configuration::configuration();
if (config->cacheEnabled) {
Caching_Stream *cache = new Caching_Stream(new HTTP_Stream());
CFStringRef cacheIdentifier = createCacheIdentifierForURL(url);
cache->setCacheIdentifier(cacheIdentifier);
CFRelease(cacheIdentifier);
m_inputStream = cache;
} else {
m_inputStream = new HTTP_Stream();
}
m_inputStream->m_delegate = this;
} else if (File_Stream::canHandleUrl(url)) {
m_inputStream = new File_Stream();
m_inputStream->m_delegate = this;
}
if (m_inputStream) {
m_inputStream->setUrl(url);
}
}
void Audio_Stream::setStrictContentTypeChecking(bool strictChecking)
{
m_strictContentTypeChecking = strictChecking;
}
void Audio_Stream::setDefaultContentType(CFStringRef defaultContentType)
{
if (m_defaultContentType) {
CFRelease(m_defaultContentType);
m_defaultContentType = 0;
}
if (defaultContentType) {
m_defaultContentType = CFStringCreateCopy(kCFAllocatorDefault, defaultContentType);
}
}
void Audio_Stream::setSeekOffset(float offset)
{
m_seekOffset = offset;
}
void Audio_Stream::setDefaultContentLength(UInt64 defaultContentLength)
{
m_defaultContentLength = defaultContentLength;
}
void Audio_Stream::setContentLength(UInt64 contentLength)
{
pthread_mutex_lock(&m_streamStateMutex);
m_contentLength = contentLength;
pthread_mutex_unlock(&m_streamStateMutex);
}
void Audio_Stream::setPreloading(bool preloading)
{
pthread_mutex_lock(&m_streamStateMutex);
m_preloading = preloading;
pthread_mutex_unlock(&m_streamStateMutex);
}
bool Audio_Stream::isPreloading()
{
pthread_mutex_lock(&m_streamStateMutex);
bool preloading = m_preloading;
pthread_mutex_unlock(&m_streamStateMutex);
return preloading;
}
void Audio_Stream::setOutputFile(CFURLRef url)
{
if (m_fileOutput) {
delete m_fileOutput;
m_fileOutput = 0;
}
if (url) {
m_fileOutput = new File_Output(url);
}
m_outputFile = url;
}
CFURLRef Audio_Stream::outputFile()
{
return m_outputFile;
}
Audio_Stream::State Audio_Stream::state()
{
pthread_mutex_lock(&m_streamStateMutex);
const State currentState = m_state;
pthread_mutex_unlock(&m_streamStateMutex);
return currentState;
}
CFStringRef Audio_Stream::sourceFormatDescription()
{
unsigned char formatID[5];
*(UInt32 *)formatID = OSSwapHostToBigInt32(m_srcFormat.mFormatID);
formatID[4] = '\0';
CFStringRef formatDescription = CFStringCreateWithFormat(NULL,
NULL,
CFSTR("formatID: %s, sample rate: %f"),
formatID,
m_srcFormat.mSampleRate);
return formatDescription;
}
CFStringRef Audio_Stream::contentType()
{
return m_contentType;
}
CFStringRef Audio_Stream::createCacheIdentifierForURL(CFURLRef url)
{
CFStringRef urlString = CFURLGetString(url);
CFStringRef urlHash = createHashForString(urlString);
CFStringRef cacheIdentifier = CFStringCreateWithFormat(NULL, NULL, CFSTR("FSCache-%@"), urlHash);
CFRelease(urlHash);
return cacheIdentifier;
}
size_t Audio_Stream::cachedDataSize()
{
size_t dataSize = 0;
pthread_mutex_lock(&m_packetQueueMutex);
dataSize = m_cachedDataSize;
pthread_mutex_unlock(&m_packetQueueMutex);
return dataSize;
}
bool Audio_Stream::strictContentTypeChecking()
{
return m_strictContentTypeChecking;
}
AudioFileTypeID Audio_Stream::audioStreamTypeFromContentType(CFStringRef contentType)
{
AudioFileTypeID fileTypeHint = kAudioFileMP3Type;
if (!contentType) {
AS_TRACE("***** Unable to detect the audio stream type: missing content-type! *****\n");
goto out;
}
if (CFStringCompare(contentType, CFSTR("audio/mpeg"), 0) == kCFCompareEqualTo) {
fileTypeHint = kAudioFileMP3Type;
AS_TRACE("kAudioFileMP3Type detected\n");
} else if (CFStringCompare(contentType, CFSTR("audio/x-wav"), 0) == kCFCompareEqualTo) {
fileTypeHint = kAudioFileWAVEType;
AS_TRACE("kAudioFileWAVEType detected\n");
} else if (CFStringCompare(contentType, CFSTR("audio/x-aifc"), 0) == kCFCompareEqualTo) {
fileTypeHint = kAudioFileAIFCType;
AS_TRACE("kAudioFileAIFCType detected\n");
} else if (CFStringCompare(contentType, CFSTR("audio/x-aiff"), 0) == kCFCompareEqualTo) {
fileTypeHint = kAudioFileAIFFType;
AS_TRACE("kAudioFileAIFFType detected\n");
} else if (CFStringCompare(contentType, CFSTR("audio/x-m4a"), 0) == kCFCompareEqualTo) {
fileTypeHint = kAudioFileM4AType;
AS_TRACE("kAudioFileM4AType detected\n");
} else if (CFStringCompare(contentType, CFSTR("audio/mp4"), 0) == kCFCompareEqualTo ||
CFStringCompare(contentType, CFSTR("video/mp4"), 0) == kCFCompareEqualTo) {
fileTypeHint = kAudioFileMPEG4Type;
AS_TRACE("kAudioFileMPEG4Type detected\n");
} else if (CFStringCompare(contentType, CFSTR("audio/x-caf"), 0) == kCFCompareEqualTo) {
fileTypeHint = kAudioFileCAFType;
AS_TRACE("kAudioFileCAFType detected\n");
} else if (CFStringCompare(contentType, CFSTR("audio/aac"), 0) == kCFCompareEqualTo ||
CFStringCompare(contentType, CFSTR("audio/aacp"), 0) == kCFCompareEqualTo) {
fileTypeHint = kAudioFileAAC_ADTSType;
AS_TRACE("kAudioFileAAC_ADTSType detected\n");
} else {
AS_TRACE("***** Unable to detect the audio stream type *****\n");
}
out:
return fileTypeHint;
}
void Audio_Stream::audioQueueStateChanged(Audio_Queue::State aqState)
{
if (aqState == Audio_Queue::RUNNING && SEEKING != state()) {
invalidateWatchdogTimer();
setState(PLAYING);
float currentVolume = m_audioQueue->volume();
if (currentVolume != m_outputVolume) {
m_audioQueue->setVolume(m_outputVolume);
}
} else if (aqState == Audio_Queue::IDLE) {
setState(STOPPED);
} else if (aqState == Audio_Queue::PAUSED) {
setState(PAUSED);
}
}
void Audio_Stream::audioQueueBuffersEmpty()
{
AS_TRACE("%s: enter\n", __PRETTY_FUNCTION__);
/*
* Entering here means that the audio queue has run out of data to play.
*/
const int count = playbackDataCount();
/*
* If we don't have any cached data to play and we are still supposed to
* feed the audio queue with data, enter the buffering state.
*/
if (count == 0 && m_inputStreamRunning && FAILED != state()) {
Stream_Configuration *config = Stream_Configuration::configuration();
pthread_mutex_lock(&m_packetQueueMutex);
m_playPacket = m_queuedHead;
if (m_processedPackets.size() > 0) {
/*
* We have audio packets in memory (only case with a non-continuous stream),
* so figure out the correct location to set the playback pointer so that we don't
* start decoding the packets from the beginning when
* buffering resumes.
*/
queued_packet_t *firstPacket = m_processedPackets.front();
queued_packet_t *cur = m_queuedHead;
while (cur) {
if (cur->identifier == firstPacket->identifier) {
break;
}
cur = cur->next;
}
if (cur) {
m_playPacket = cur;
}
}
pthread_mutex_unlock(&m_packetQueueMutex);
// Always make sure we are scheduled to receive data if we start buffering
m_inputStream->setScheduledInRunLoop(true);
AS_WARN("Audio queue run out data, starting buffering\n");
setState(BUFFERING);
if (m_firstBufferingTime == 0) {
// Never buffered, just increase the counter
m_firstBufferingTime = CFAbsoluteTimeGetCurrent();
m_bounceCount++;
AS_TRACE("stream buffered, increasing bounce count %zu, interval %i\n", m_bounceCount, config->bounceInterval);
} else {
// Buffered before, calculate the difference
CFAbsoluteTime cur = CFAbsoluteTimeGetCurrent();
int diff = cur - m_firstBufferingTime;
if (diff >= config->bounceInterval) {
// More than bounceInterval seconds passed from the last
// buffering. So not a continuous bouncing. Reset the
// counters.
m_bounceCount = 0;
m_firstBufferingTime = 0;
AS_TRACE("%i seconds passed from last buffering, resetting counters, interval %i\n", diff, config->bounceInterval);
} else {
m_bounceCount++;
AS_TRACE("%i seconds passed from last buffering, increasing bounce count to %zu, interval %i\n", diff, m_bounceCount, config->bounceInterval);
}
}
// Check if we have reached the bounce state
if (m_bounceCount >= config->maxBounceCount) {
CFStringRef errorDescription = CFStringCreateWithFormat(NULL, NULL, CFSTR("Buffered %zu times in the last %i seconds"), m_bounceCount, config->maxBounceCount);
closeAndSignalError(AS_ERR_BOUNCING, errorDescription);
if (errorDescription) {
CFRelease(errorDescription);
}
}
// Create the watchdog in case the input stream gets stuck
createWatchdogTimer();
return;
}
AS_TRACE("%i cached packets, enqueuing\n", count);
// Keep enqueuing the packets in the queue until we have them
pthread_mutex_lock(&m_packetQueueMutex);
if (m_playPacket && count > 0) {
pthread_mutex_unlock(&m_packetQueueMutex);
determineBufferingLimits();
} else {
pthread_mutex_unlock(&m_packetQueueMutex);
AS_TRACE("%s: closing the audio queue\n", __PRETTY_FUNCTION__);
setState(PLAYBACK_COMPLETED);
close(true);
}
}
void Audio_Stream::audioQueueInitializationFailed()
{
if (m_inputStreamRunning) {
if (m_inputStream) {
m_inputStream->close();
}
m_inputStreamRunning = false;
}
setState(FAILED);
if (m_delegate) {
if (audioQueue()->m_lastError == kAudioFormatUnsupportedDataFormatError) {
m_delegate->audioStreamErrorOccurred(AS_ERR_UNSUPPORTED_FORMAT, CFSTR("Audio queue failed, unsupported format"));
} else {
CFStringRef errorDescription = coreAudioErrorToCFString(CFSTR("Audio queue failed"), audioQueue()->m_lastError);
m_delegate->audioStreamErrorOccurred(AS_ERR_STREAM_PARSE, errorDescription);
if (errorDescription) {
CFRelease(errorDescription);
}
}
}
}
void Audio_Stream::audioQueueFinishedPlayingPacket()
{
}
void Audio_Stream::streamIsReadyRead()
{
if (m_audioStreamParserRunning) {
AS_TRACE("%s: parser already running!\n", __PRETTY_FUNCTION__);
return;
}
CFStringRef audioContentType = CFSTR("audio/");
CFStringRef videoContentType = CFSTR("video/");
const CFIndex audioContentTypeLen = CFStringGetLength(audioContentType);
const CFIndex videoContentTypeLen = CFStringGetLength(videoContentType);
bool matchesAudioContentType = false;
CFStringRef contentType = 0;
if (m_inputStream) {
contentType = m_inputStream->contentType();
}
if (m_contentType) {
CFRelease(m_contentType);
m_contentType = 0;
}
if (contentType) {
m_contentType = CFStringCreateCopy(kCFAllocatorDefault, contentType);
const CFIndex contentTypeLen = CFStringGetLength(contentType);
if (contentTypeLen >= audioContentTypeLen &&
(kCFCompareEqualTo == CFStringCompareWithOptions(contentType, audioContentType, CFRangeMake(0, audioContentTypeLen),0))) {
matchesAudioContentType = true;
} else if (contentTypeLen >= videoContentTypeLen &&
(kCFCompareEqualTo == CFStringCompareWithOptions(contentType, videoContentType, CFRangeMake(0, videoContentTypeLen),0))) {
matchesAudioContentType = true;
}
}
if (m_strictContentTypeChecking && !matchesAudioContentType) {
CFStringRef errorDescription = NULL;
if (m_contentType) {
errorDescription = CFStringCreateWithFormat(NULL, NULL, CFSTR("Strict content type checking active, %@ is not an audio content type"), m_contentType);
} else {
errorDescription = CFStringCreateCopy(kCFAllocatorDefault, CFSTR("Strict content type checking active, no content type provided by the server"));
}
closeAndSignalError(AS_ERR_OPEN, errorDescription);
if (errorDescription) {
CFRelease(errorDescription);
}
return;
}
m_audioDataByteCount = 0;
/* OK, it should be an audio stream, let's try to open it */
OSStatus result = AudioFileStreamOpen(this,
propertyValueCallback,
streamDataCallback,
audioStreamTypeFromContentType((contentType ? contentType : m_defaultContentType)),
&m_audioFileStream);
if (result == 0) {
AS_TRACE("%s: audio file stream opened.\n", __PRETTY_FUNCTION__);
m_audioStreamParserRunning = true;
} else {
closeAndSignalError(AS_ERR_OPEN, CFSTR("Audio file stream parser open error"));
}
}
void Audio_Stream::streamHasBytesAvailable(UInt8 *data, UInt32 numBytes)
{
AS_TRACE("%s: %u bytes\n", __FUNCTION__, (unsigned int)numBytes);
if (!m_inputStreamRunning) {
AS_TRACE("%s: stray callback detected!\n", __PRETTY_FUNCTION__);
return;
}
pthread_mutex_lock(&m_packetQueueMutex);
Stream_Configuration *config = Stream_Configuration::configuration();
if (m_cachedDataSize >= config->maxPrebufferedByteCount) {
pthread_mutex_unlock(&m_packetQueueMutex);
// If we got a cache overflow, disable the input stream so that we don't get more data
m_inputStream->setScheduledInRunLoop(false);
// Schedule a timer to watch when we can enable the input stream again
if (m_inputStreamTimer) {
CFRunLoopTimerInvalidate(m_inputStreamTimer);
CFRelease(m_inputStreamTimer);
m_inputStreamTimer = 0;
}
CFRunLoopTimerContext ctx = {0, this, NULL, NULL, NULL};
m_inputStreamTimer = CFRunLoopTimerCreate(NULL,
CFAbsoluteTimeGetCurrent(),
0.1, // 100 ms
0,
0,
inputStreamTimerCallback,
&ctx);
CFRunLoopAddTimer(CFRunLoopGetCurrent(), m_inputStreamTimer, kCFRunLoopCommonModes);
} else {
pthread_mutex_unlock(&m_packetQueueMutex);
}
bool decoderFailed = false;
pthread_mutex_lock(&m_streamStateMutex);
decoderFailed = m_decoderFailed;
pthread_mutex_unlock(&m_streamStateMutex);
if (decoderFailed) {
closeAndSignalError(AS_ERR_TERMINATED, CFSTR("Stream terminated abrubtly"));
return;
}
m_bytesReceived += numBytes;
if (m_fileOutput) {
m_fileOutput->write(data, numBytes);
}
if (m_audioStreamParserRunning) {
OSStatus result = AudioFileStreamParseBytes(m_audioFileStream, numBytes, data, (m_discontinuity ? kAudioFileStreamParseFlag_Discontinuity : 0));
if (result != 0) {
AS_TRACE("%s: AudioFileStreamParseBytes error %d\n", __PRETTY_FUNCTION__, (int)result);
if (result == kAudioFileStreamError_NotOptimized) {
closeAndSignalError(AS_ERR_UNSUPPORTED_FORMAT, CFSTR("Non-optimized formats not supported for streaming"));
} else {
CFStringRef errorDescription = coreAudioErrorToCFString(CFSTR("Audio file stream parse bytes error"), result);
closeAndSignalError(AS_ERR_STREAM_PARSE, errorDescription);
if (errorDescription) {
CFRelease(errorDescription);
}
}
} else if (m_initializationError == kAudioConverterErr_FormatNotSupported) {
CFStringRef sourceFormat = sourceFormatDescription();
CFStringRef errorDescription = CFStringCreateWithFormat(NULL, NULL, CFSTR("%@ not supported for streaming"), sourceFormat);
closeAndSignalError(AS_ERR_UNSUPPORTED_FORMAT, errorDescription);
if (errorDescription) {
CFRelease(errorDescription);
}
if (sourceFormat) {
CFRelease(sourceFormat);
}
} else if (m_initializationError != noErr) {
CFStringRef errorDescription = coreAudioErrorToCFString(CFSTR("Error in audio stream initialization"), m_initializationError);
closeAndSignalError(AS_ERR_OPEN, errorDescription);
if (errorDescription) {
CFRelease(errorDescription);
}
} else {
m_discontinuity = false;
}
}
}
void Audio_Stream::streamEndEncountered()
{
AS_TRACE("%s\n", __PRETTY_FUNCTION__);
if (!m_inputStreamRunning) {
AS_TRACE("%s: stray callback detected!\n", __PRETTY_FUNCTION__);
return;
}
if (!(contentLength() > 0)) {
/* Continuous streams are not supposed to end */
closeAndSignalError(AS_ERR_NETWORK, CFSTR("Stream ended abruptly"));
return;
}
setState(END_OF_FILE);
if (m_inputStream) {
m_inputStream->close();
}
m_inputStreamRunning = false;
}
void Audio_Stream::streamErrorOccurred(CFStringRef errorDesc)
{
AS_TRACE("%s\n", __PRETTY_FUNCTION__);
if (!m_inputStreamRunning) {
AS_TRACE("%s: stray callback detected!\n", __PRETTY_FUNCTION__);
return;
}
closeAndSignalError(AS_ERR_NETWORK, errorDesc);
}
void Audio_Stream::streamMetaDataAvailable(std::map<CFStringRef,CFStringRef> metaData)
{
if (m_delegate) {
m_delegate->audioStreamMetaDataAvailable(metaData);
}
}
void Audio_Stream::streamMetaDataByteSizeAvailable(UInt32 sizeInBytes)
{
m_metaDataSizeInBytes = sizeInBytes;
AS_TRACE("metadata size received %i\n", m_metaDataSizeInBytes);
}
/* private */
CFStringRef Audio_Stream::createHashForString(CFStringRef str)
{
UInt8 buf[4096];
CFIndex usedBytes = 0;
CFStringGetBytes(str,
CFRangeMake(0, CFStringGetLength(str)),
kCFStringEncodingUTF8,
'?',
false,
buf,
4096,
&usedBytes);
CC_SHA1_CTX hashObject;
CC_SHA1_Init(&hashObject);
CC_SHA1_Update(&hashObject,
(const void *)buf,
(CC_LONG)usedBytes);
unsigned char digest[CC_SHA1_DIGEST_LENGTH];
CC_SHA1_Final(digest, &hashObject);
char hash[2 * sizeof(digest) + 1];
for (size_t i = 0; i < sizeof(digest); ++i) {
snprintf(hash + (2 * i), 3, "%02x", (int)(digest[i]));
}
return CFStringCreateWithCString(kCFAllocatorDefault,
(const char *)hash,
kCFStringEncodingUTF8);
}
Audio_Queue* Audio_Stream::audioQueue()
{
if (!m_audioQueue) {
AS_TRACE("No audio queue, creating\n");
m_audioQueue = new Audio_Queue();
m_audioQueue->m_delegate = this;
m_audioQueue->m_streamDesc = m_dstFormat;
m_audioQueue->m_initialOutputVolume = m_outputVolume;
}
return m_audioQueue;
}
void Audio_Stream::closeAudioQueue()
{
if (!m_audioQueue) {
return;
}
AS_TRACE("Releasing audio queue\n");
pthread_mutex_lock(&m_streamStateMutex);
m_audioQueueConsumedPackets = false;
pthread_mutex_unlock(&m_streamStateMutex);
m_audioQueue->m_delegate = 0;
delete m_audioQueue;
m_audioQueue = 0;
}
UInt64 Audio_Stream::defaultContentLength()
{
return m_defaultContentLength;
}
UInt64 Audio_Stream::contentLength()
{
pthread_mutex_lock(&m_streamStateMutex);
if (m_contentLength == 0) {
if (m_inputStream) {
m_contentLength = m_inputStream->contentLength();
if (m_contentLength == 0) {
m_contentLength = defaultContentLength();
}
}
}
pthread_mutex_unlock(&m_streamStateMutex);
return m_contentLength;
}
void Audio_Stream::closeAndSignalError(int errorCode, CFStringRef errorDescription)
{
AS_TRACE("%s: error %i\n", __PRETTY_FUNCTION__, errorCode);
setState(FAILED);
close(true);
if (m_delegate) {
m_delegate->audioStreamErrorOccurred(errorCode, errorDescription);
}
}
void Audio_Stream::setState(State state)
{
pthread_mutex_lock(&m_streamStateMutex);
if (m_state == state) {
pthread_mutex_unlock(&m_streamStateMutex);
return;
}
#if defined (AS_DEBUG)
switch (state) {
case BUFFERING:
AS_TRACE("state set: BUFFERING\n");
break;
case PLAYING:
AS_TRACE("state set: PLAYING\n");
break;
case PAUSED:
AS_TRACE("state set: PAUSED\n");
break;
case SEEKING:
AS_TRACE("state set: SEEKING\n");
break;
case FAILED:
AS_TRACE("state set: FAILED\n");
break;
case END_OF_FILE:
AS_TRACE("state set: END_OF_FILE\n");
break;
case PLAYBACK_COMPLETED:
AS_TRACE("state set: PLAYBACK_COMPLETED\n");
break;
default:
AS_TRACE("unknown state\n");
break;
}
#endif
m_state = state;
pthread_mutex_unlock(&m_streamStateMutex);
if (m_delegate) {
m_delegate->audioStreamStateChanged(state);
}
}
void Audio_Stream::setCookiesForStream(AudioFileStreamID inAudioFileStream)
{
OSStatus err;
// get the cookie size
UInt32 cookieSize;
Boolean writable;
err = AudioFileStreamGetPropertyInfo(inAudioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, &writable);
if (err) {
return;
}
// get the cookie data
void* cookieData = calloc(1, cookieSize);
err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_MagicCookieData, &cookieSize, cookieData);
if (err) {
free(cookieData);
return;
}
// set the cookie on the queue.
if (m_audioConverter) {
AudioConverterSetProperty(m_audioConverter, kAudioConverterDecompressionMagicCookie, cookieSize, cookieData);
}
free(cookieData);
}
float Audio_Stream::bitrate()
{
// Use the stream provided bit rate, if available
if (m_bitRate > 0) {
return m_bitRate;
}
// Stream didn't provide a bit rate, so let's calculate it
if (m_bitrateBufferIndex < kAudioStreamBitrateBufferSize) {
return 0;
}
double sum = 0;
for (size_t i=0; i < kAudioStreamBitrateBufferSize; i++) {
sum += m_bitrateBuffer[i];
}
return sum / (float)kAudioStreamBitrateBufferSize;
}
void Audio_Stream::watchdogTimerCallback(CFRunLoopTimerRef timer, void *info)
{
Audio_Stream *THIS = (Audio_Stream *)info;
pthread_mutex_lock(&THIS->m_streamStateMutex);
if (!THIS->m_audioQueueConsumedPackets) {
pthread_mutex_unlock(&THIS->m_streamStateMutex);
Stream_Configuration *config = Stream_Configuration::configuration();
CFStringRef errorDescription = CFStringCreateWithFormat(NULL, NULL, CFSTR("The stream startup watchdog activated: stream didn't start to play in %d seconds"), config->startupWatchdogPeriod);
THIS->closeAndSignalError(AS_ERR_OPEN, errorDescription);
if (errorDescription) {
CFRelease(errorDescription);
}
} else {
pthread_mutex_unlock(&THIS->m_streamStateMutex);
}
}
void Audio_Stream::seekTimerCallback(CFRunLoopTimerRef timer, void *info)
{
Audio_Stream *THIS = (Audio_Stream *)info;
if (THIS->state() != SEEKING) {
return;
}
pthread_mutex_lock(&THIS->m_streamStateMutex);
AS_TRACE("decoder free, seeking\n");
if (THIS->m_seekTimer) {
CFRunLoopTimerInvalidate(THIS->m_seekTimer);
CFRelease(THIS->m_seekTimer);
THIS->m_seekTimer = 0;
}
pthread_mutex_unlock(&THIS->m_streamStateMutex);
// Close the audio queue so that it won't ask any more data
THIS->closeAudioQueue();
Input_Stream_Position position = THIS->streamPositionForOffset(THIS->m_seekOffset);
if (position.start == 0 && position.end == 0) {
THIS->closeAndSignalError(AS_ERR_NETWORK, CFSTR("Failed to retrieve seeking position"));
return;
}
const float duration = THIS->durationInSeconds();
const double packetDuration = THIS->m_srcFormat.mFramesPerPacket / THIS->m_srcFormat.mSampleRate;
if (packetDuration > 0) {
UInt32 ioFlags = 0;
SInt64 packetAlignedByteOffset;
SInt64 seekPacket = floor((duration * THIS->m_seekOffset) / packetDuration);
THIS->m_playingPacketIdentifier = seekPacket;
OSStatus err = AudioFileStreamSeek(THIS->m_audioFileStream, seekPacket, &packetAlignedByteOffset, &ioFlags);
if (!err) {
position.start = packetAlignedByteOffset + THIS->m_dataOffset;
} else {
THIS->closeAndSignalError(AS_ERR_NETWORK, CFSTR("Failed to calculate seeking position"));
return;
}
} else {
THIS->closeAndSignalError(AS_ERR_NETWORK, CFSTR("Failed to calculate seeking position"));
return;
}
Stream_Configuration *config = Stream_Configuration::configuration();
// Do a cache lookup if we can find the seeked packet from the cache and no need to
// open the stream from the new position
bool foundCachedPacket = false;
queued_packet_t *seekPacket = 0;
if (config->seekingFromCacheEnabled) {
AS_LOCK_TRACE("lock: seekToOffset\n");
pthread_mutex_lock(&THIS->m_packetQueueMutex);
queued_packet_t *cur = THIS->m_queuedHead;
while (cur) {
if (cur->identifier == THIS->m_playingPacketIdentifier) {
foundCachedPacket = true;
seekPacket = cur;
break;
}
queued_packet_t *tmp = cur->next;
cur = tmp;
}
AS_LOCK_TRACE("unlock: seekToOffset\n");
pthread_mutex_unlock(&THIS->m_packetQueueMutex);
} else {
AS_TRACE("Seeking from cache disabled\n");
}
if (!foundCachedPacket) {
AS_TRACE("Seeked packet not found from cache, reopening the input stream\n");
// Close but keep the stream parser running
THIS->close(false);
THIS->m_bytesReceived = 0;
THIS->m_bounceCount = 0;
THIS->m_firstBufferingTime = 0;
THIS->m_bitrateBufferIndex = 0;
THIS->m_initializationError = noErr;
THIS->m_converterRunOutOfData = false;
THIS->m_discontinuity = true;
bool success = THIS->m_inputStream->open(position);
if (success) {
THIS->setContentLength(THIS->m_originalContentLength);
pthread_mutex_lock(&THIS->m_streamStateMutex);
if (THIS->m_audioConverter) {
AudioConverterDispose(THIS->m_audioConverter);
}
OSStatus err = AudioConverterNew(&(THIS->m_srcFormat),
&(THIS->m_dstFormat),
&(THIS->m_audioConverter));
if (err) {
THIS->closeAndSignalError(AS_ERR_OPEN, CFSTR("Error in creating an audio converter"));
pthread_mutex_unlock(&THIS->m_streamStateMutex);
return;
}
pthread_mutex_unlock(&THIS->m_streamStateMutex);
THIS->setState(BUFFERING);
THIS->m_inputStreamRunning = true;
} else {
THIS->closeAndSignalError(AS_ERR_OPEN, CFSTR("Input stream open error"));
return;
}
} else {
AS_TRACE("Seeked packet found from cache!\n");
// Found the packet from the cache, let's use the cache directly.
pthread_mutex_lock(&THIS->m_packetQueueMutex);
THIS->m_playPacket = seekPacket;
pthread_mutex_unlock(&THIS->m_packetQueueMutex);
THIS->m_discontinuity = true;
THIS->setState(PLAYING);
}
THIS->audioQueue()->init();
THIS->m_inputStream->setScheduledInRunLoop(true);
THIS->setDecoderRunState(true);
}
void Audio_Stream::inputStreamTimerCallback(CFRunLoopTimerRef timer, void *info)
{
Audio_Stream *THIS = (Audio_Stream *)info;
if (!THIS->m_inputStreamRunning) {
if (THIS->m_inputStreamTimer) {
CFRunLoopTimerInvalidate(THIS->m_inputStreamTimer);
CFRelease(THIS->m_inputStreamTimer);
THIS->m_inputStreamTimer = 0;
}
return;
}
pthread_mutex_lock(&THIS->m_packetQueueMutex);
Stream_Configuration *config = Stream_Configuration::configuration();
if (THIS->m_cachedDataSize < config->maxPrebufferedByteCount) {
pthread_mutex_unlock(&THIS->m_packetQueueMutex);
THIS->m_inputStream->setScheduledInRunLoop(true);
} else {
pthread_mutex_unlock(&THIS->m_packetQueueMutex);
}
}
void Audio_Stream::stateSetTimerCallback(CFRunLoopTimerRef timer, void *info)
{
Audio_Stream *THIS = (Audio_Stream *)info;
pthread_mutex_lock(&THIS->m_streamStateMutex);
if (THIS->m_stateSetTimer) {
// Timer is automatically invalidated as it fires only once
CFRelease(THIS->m_stateSetTimer);
THIS->m_stateSetTimer = 0;
}
pthread_mutex_unlock(&THIS->m_streamStateMutex);
THIS->setState(PLAYING);
}
bool Audio_Stream::decoderShouldRun()
{
const Audio_Stream::State state = this->state();
pthread_mutex_lock(&m_streamStateMutex);
if (m_preloading ||
!m_decoderShouldRun ||
m_converterRunOutOfData ||
m_decoderFailed ||
state == PAUSED ||
state == STOPPED ||
state == SEEKING ||
state == FAILED ||
state == PLAYBACK_COMPLETED ||
m_dstFormat.mBytesPerPacket == 0) {
pthread_mutex_unlock(&m_streamStateMutex);
return false;
} else {
pthread_mutex_unlock(&m_streamStateMutex);
return true;
}
}
void Audio_Stream::decodeSinglePacket(CFRunLoopTimerRef timer, void *info)
{
Audio_Stream *THIS = (Audio_Stream *)info;
pthread_mutex_lock(&THIS->m_streamStateMutex);
if (THIS->m_decoderShouldRun && THIS->m_converterRunOutOfData) {
pthread_mutex_unlock(&THIS->m_streamStateMutex);
// Check if we got more data so we can run the decoder again
pthread_mutex_lock(&THIS->m_packetQueueMutex);
if (THIS->m_playPacket) {
// Yes, got data again
pthread_mutex_unlock(&THIS->m_packetQueueMutex);
AS_TRACE("Converter run out of data: more data available. Restarting the audio converter\n");
pthread_mutex_lock(&THIS->m_streamStateMutex);
if (THIS->m_audioConverter) {
AudioConverterDispose(THIS->m_audioConverter);
}
OSStatus err = AudioConverterNew(&(THIS->m_srcFormat),
&(THIS->m_dstFormat),
&(THIS->m_audioConverter));
if (err) {
AS_TRACE("Error in creating an audio converter, error %i\n", err);
THIS->m_decoderFailed = true;
}
THIS->m_converterRunOutOfData = false;
pthread_mutex_unlock(&THIS->m_streamStateMutex);
} else {
AS_TRACE("decoder: converter run out data: bailing out\n");
pthread_mutex_unlock(&THIS->m_packetQueueMutex);
}
} else {
pthread_mutex_unlock(&THIS->m_streamStateMutex);
}
if (!THIS->decoderShouldRun()) {
return;
}
AudioBufferList outputBufferList;
outputBufferList.mNumberBuffers = 1;
outputBufferList.mBuffers[0].mNumberChannels = THIS->m_dstFormat.mChannelsPerFrame;
outputBufferList.mBuffers[0].mDataByteSize = THIS->m_outputBufferSize;
outputBufferList.mBuffers[0].mData = THIS->m_outputBuffer;
AudioStreamPacketDescription description;
description.mStartOffset = 0;
description.mDataByteSize = THIS->m_outputBufferSize;
description.mVariableFramesInPacket = 0;
UInt32 ioOutputDataPackets = THIS->m_outputBufferSize / THIS->m_dstFormat.mBytesPerPacket;
AS_TRACE("calling AudioConverterFillComplexBuffer\n");
pthread_mutex_lock(&THIS->m_packetQueueMutex);
if (THIS->m_numPacketsToRewind > 0) {
AS_TRACE("Rewinding %i packets\n", THIS->m_numPacketsToRewind);
queued_packet_t *front = THIS->m_playPacket;
while (front && THIS->m_numPacketsToRewind-- > 0) {
queued_packet_t *tmp = front->next;
front = tmp;
}
THIS->m_playPacket = front;
THIS->m_numPacketsToRewind = 0;
}
pthread_mutex_unlock(&THIS->m_packetQueueMutex);
OSStatus err = AudioConverterFillComplexBuffer(THIS->m_audioConverter,
&encoderDataCallback,
THIS,
&ioOutputDataPackets,
&outputBufferList,
NULL);
pthread_mutex_lock(&THIS->m_streamStateMutex);
if (err == noErr && THIS->m_decoderShouldRun) {
THIS->m_audioQueueConsumedPackets = true;
if (THIS->m_state != PLAYING && !THIS->m_stateSetTimer) {
// Set the playing state in the main thread
CFRunLoopTimerContext ctx = {0, THIS, NULL, NULL, NULL};
THIS->m_stateSetTimer = CFRunLoopTimerCreate(NULL, 0, 0, 0, 0,
stateSetTimerCallback,
&ctx);
CFRunLoopAddTimer(THIS->m_mainRunLoop, THIS->m_stateSetTimer, kCFRunLoopCommonModes);
}
pthread_mutex_unlock(&THIS->m_streamStateMutex);
// This blocks until the queue has been able to consume the packets
THIS->audioQueue()->handleAudioPackets(outputBufferList.mBuffers[0].mDataByteSize,
outputBufferList.mNumberBuffers,
outputBufferList.mBuffers[0].mData,
&description);
const UInt32 nFrames = outputBufferList.mBuffers[0].mDataByteSize / THIS->m_dstFormat.mBytesPerFrame;
if (THIS->m_delegate) {
THIS->m_delegate->samplesAvailable(&outputBufferList, nFrames, description);
}
Stream_Configuration *config = Stream_Configuration::configuration();
const bool continuous = (!(THIS->contentLength() > 0));
pthread_mutex_lock(&THIS->m_packetQueueMutex);
/* The only reason we keep the already converted packets in memory
* is seeking from the cache. If in-memory seeking is disabled we
* can just cleanup the cache immediately. The same applies for
* continuous streams. They are never seeked backwards.
*/
if (!config->seekingFromCacheEnabled ||
continuous ||
THIS->m_cachedDataSize >= config->maxPrebufferedByteCount) {
pthread_mutex_unlock(&THIS->m_packetQueueMutex);
THIS->cleanupCachedData();
} else {
pthread_mutex_unlock(&THIS->m_packetQueueMutex);
}
} else if (err == kAudio_ParamError) {
AS_TRACE("decoder: converter param error\n");
/*
* This means that iOS terminated background audio. Stream must be restarted.
* Signal an error so that the app can handle it.
*/
THIS->m_decoderFailed = true;
pthread_mutex_unlock(&THIS->m_streamStateMutex);
} else {
pthread_mutex_unlock(&THIS->m_streamStateMutex);
}
}
void *Audio_Stream::decodeLoop(void *data)
{
Audio_Stream *THIS = (Audio_Stream *)data;
pthread_mutex_lock(&THIS->m_streamStateMutex);
THIS->m_decodeRunLoop = CFRunLoopGetCurrent();
pthread_mutex_unlock(&THIS->m_streamStateMutex);
/*
* Silly timer to make the runloop to wake up every 5ms
*/
CFRunLoopTimerContext ctx = {0, NULL, NULL, NULL, NULL};
CFRunLoopTimerRef tickTimer = CFRunLoopTimerCreate (NULL, CFAbsoluteTimeGetCurrent() + 0.005, 0.005, 0, 0, fsTick, &ctx);
CFRunLoopAddTimer(THIS->m_decodeRunLoop, tickTimer, kCFRunLoopCommonModes);
CFRunLoopRun();
CFRunLoopRemoveTimer(THIS->m_decodeRunLoop, tickTimer, kCFRunLoopCommonModes);
CFRelease(tickTimer);
AS_TRACE("returning from decodeLoop, bye\n");
return 0;
}
void Audio_Stream::createWatchdogTimer()
{
Stream_Configuration *config = Stream_Configuration::configuration();
if (!(config->startupWatchdogPeriod > 0)) {
return;
}
invalidateWatchdogTimer();
/*
* Start the WD if we have one requested. In this way we can track
* that the stream doesn't stuck forever on the buffering state
* (for instance some network error condition)
*/
CFRunLoopTimerContext ctx = {0, this, NULL, NULL, NULL};
m_watchdogTimer = CFRunLoopTimerCreate(NULL,
CFAbsoluteTimeGetCurrent() + config->startupWatchdogPeriod,
0,
0,
0,
watchdogTimerCallback,
&ctx);
AS_TRACE("Starting the startup watchdog, period %i seconds\n", config->startupWatchdogPeriod);
CFRunLoopAddTimer(CFRunLoopGetCurrent(), m_watchdogTimer, kCFRunLoopCommonModes);
}
void Audio_Stream::invalidateWatchdogTimer()
{
if (m_watchdogTimer) {
CFRunLoopTimerInvalidate(m_watchdogTimer);
CFRelease(m_watchdogTimer);
m_watchdogTimer = 0;
AS_TRACE("Watchdog invalidated\n");
}
}
int Audio_Stream::cachedDataCount()
{
AS_LOCK_TRACE("lock: cachedDataCount\n");
pthread_mutex_lock(&m_packetQueueMutex);
int count = 0;
queued_packet_t *cur = m_queuedHead;
while (cur) {
cur = cur->next;
count++;
}
AS_LOCK_TRACE("unlock: cachedDataCount\n");
pthread_mutex_unlock(&m_packetQueueMutex);
return count;
}
int Audio_Stream::playbackDataCount()
{
AS_LOCK_TRACE("lock: playbackDataCount\n");
pthread_mutex_lock(&m_packetQueueMutex);
int count = 0;
queued_packet_t *cur = m_playPacket;
while (cur) {
cur = cur->next;
count++;
}
AS_LOCK_TRACE("unlock: playbackDataCount\n");
pthread_mutex_unlock(&m_packetQueueMutex);
return count;
}
AudioQueueLevelMeterState Audio_Stream::levels()
{
return audioQueue()->levels();
}
void Audio_Stream::determineBufferingLimits()
{
if (state() == PAUSED || state() == SEEKING) {
return;
}
Stream_Configuration *config = Stream_Configuration::configuration();
const bool continuous = (!(contentLength() > 0));
if (!m_initialBufferingCompleted) {
// Check if we have enough prebuffered data to start playback
AS_TRACE("initial buffering not completed, checking if enough data\n");
if (config->usePrebufferSizeCalculationInPackets) {
const int packetCount = cachedDataCount();
if (packetCount >= config->requiredInitialPrebufferedPacketCount) {
AS_TRACE("More than %i packets prebuffered, required %i packets. Playback can be started\n",
packetCount,
config->requiredInitialPrebufferedPacketCount);
m_initialBufferingCompleted = true;
setDecoderRunState(true);
return;
}
}
int lim;
if (continuous) {
// Continuous stream
lim = config->requiredInitialPrebufferedByteCountForContinuousStream;
AS_TRACE("continuous stream, %i bytes must be cached to start the playback\n", lim);
} else {
// Non-continuous
lim = config->requiredInitialPrebufferedByteCountForNonContinuousStream;
AS_TRACE("non-continuous stream, %i bytes must be cached to start the playback\n", lim);
}
pthread_mutex_lock(&m_packetQueueMutex);
if (m_cachedDataSize > lim) {
pthread_mutex_unlock(&m_packetQueueMutex);
AS_TRACE("buffered %zu bytes, required for playback %i, starting playback\n", m_cachedDataSize, lim);
m_initialBufferingCompleted = true;
setDecoderRunState(true);
} else {
pthread_mutex_unlock(&m_packetQueueMutex);
AS_TRACE("not enough cached data to start playback\n");
}
}
// If the stream has never started playing and we have received 90% of the data of the stream,
// let's override the limits
bool audioQueueConsumedPackets = false;
pthread_mutex_lock(&m_streamStateMutex);
audioQueueConsumedPackets = m_audioQueueConsumedPackets;
pthread_mutex_unlock(&m_streamStateMutex);
if (!audioQueueConsumedPackets && contentLength() > 0) {
Stream_Configuration *config = Stream_Configuration::configuration();
const UInt64 seekLength = contentLength() * m_seekOffset;
AS_TRACE("seek length %llu\n", seekLength);
const UInt64 numBytesRequiredToBeBuffered = (contentLength() - seekLength) * 0.9;
AS_TRACE("audio queue not consumed packets, content length %llu, required bytes to be buffered %llu\n", contentLength(), numBytesRequiredToBeBuffered);
if (m_bytesReceived >= numBytesRequiredToBeBuffered ||
m_bytesReceived >= config->maxPrebufferedByteCount * 0.9) {
m_initialBufferingCompleted = true;
setDecoderRunState(true);
AS_TRACE("%llu bytes received, overriding buffering limits\n", m_bytesReceived);
}
}
}
void Audio_Stream::cleanupCachedData()
{
pthread_mutex_lock(&m_streamStateMutex);
if (!m_decoderShouldRun) {
AS_TRACE("cleanupCachedData: decoder should not run, bailing out!\n");
pthread_mutex_unlock(&m_streamStateMutex);
return;
} else {
pthread_mutex_unlock(&m_streamStateMutex);
}
AS_LOCK_TRACE("cleanupCachedData: lock\n");
pthread_mutex_lock(&m_packetQueueMutex);
queued_packet_t *cur = m_queuedHead;
/* Incoming (not yet processed) packets are added at the end (tail)
of the queue. Hence the processed packets reside in the front
of the queue. Clean the packets until we found the last packet
which is still needed.
*/
for (;;) {
if (cur && m_playPacket && cur->identifier == m_playPacket->identifier) {
break;
}
if (cur && !m_processedPackets.empty() &&
cur->identifier == m_processedPackets.back()->identifier) {
queued_packet_t *nextPacket = cur->next;
m_cachedDataSize -= cur->desc.mDataByteSize;
free(cur);
cur = nextPacket;
m_processedPackets.pop_back();
} else {
break;
}
}
m_queuedHead = cur;
AS_LOCK_TRACE("cleanupCachedData: unlock\n");
pthread_mutex_unlock(&m_packetQueueMutex);
}
OSStatus Audio_Stream::encoderDataCallback(AudioConverterRef inAudioConverter, UInt32 *ioNumberDataPackets, AudioBufferList *ioData, AudioStreamPacketDescription **outDataPacketDescription, void *inUserData)
{
Audio_Stream *THIS = (Audio_Stream *)inUserData;
AS_TRACE("encoderDataCallback called\n");
AS_LOCK_TRACE("encoderDataCallback 1: lock\n");
pthread_mutex_lock(&THIS->m_packetQueueMutex);
// Dequeue one packet per time for the decoder
queued_packet_t *front = THIS->m_playPacket;
if (!front) {
/* Don't deadlock */
AS_LOCK_TRACE("encoderDataCallback 2: unlock\n");
pthread_mutex_unlock(&THIS->m_packetQueueMutex);
/*
* End of stream - Inside your input procedure, you must set the total amount of packets read and the sizes of the data in the AudioBufferList to zero. The input procedure should also return noErr. This will signal the AudioConverter that you are out of data. More specifically, set ioNumberDataPackets and ioBufferList->mDataByteSize to zero in your input proc and return noErr. Where ioNumberDataPackets is the amount of data converted and ioBufferList->mDataByteSize is the size of the amount of data converted in each AudioBuffer within your input procedure callback. Your input procedure may be called a few more times; you should just keep returning zero and noErr.
*/
pthread_mutex_lock(&THIS->m_streamStateMutex);
THIS->m_converterRunOutOfData = true;
pthread_mutex_unlock(&THIS->m_streamStateMutex);
*ioNumberDataPackets = 0;
ioData->mBuffers[0].mDataByteSize = 0;
return noErr;
}
*ioNumberDataPackets = 1;
ioData->mBuffers[0].mData = front->data;
ioData->mBuffers[0].mDataByteSize = front->desc.mDataByteSize;
ioData->mBuffers[0].mNumberChannels = THIS->m_srcFormat.mChannelsPerFrame;
if (outDataPacketDescription) {
*outDataPacketDescription = &front->desc;
}
THIS->m_playPacket = front->next;
THIS->m_processedPackets.push_front(front);
AS_LOCK_TRACE("encoderDataCallback 5: unlock\n");
pthread_mutex_unlock(&THIS->m_packetQueueMutex);
return noErr;
}
/* This is called by audio file stream parser when it finds property values */
void Audio_Stream::propertyValueCallback(void *inClientData, AudioFileStreamID inAudioFileStream, AudioFileStreamPropertyID inPropertyID, UInt32 *ioFlags)
{
AS_TRACE("%s\n", __PRETTY_FUNCTION__);
Audio_Stream *THIS = static_cast<Audio_Stream*>(inClientData);
if (!THIS->m_audioStreamParserRunning) {
AS_TRACE("%s: stray callback detected!\n", __PRETTY_FUNCTION__);
return;
}
switch (inPropertyID) {
case kAudioFileStreamProperty_BitRate: {
bool sizeReceivedForFirstTime = (THIS->m_bitRate == 0);
UInt32 bitRateSize = sizeof(THIS->m_bitRate);
OSStatus err = AudioFileStreamGetProperty(inAudioFileStream,
kAudioFileStreamProperty_BitRate,
&bitRateSize, &THIS->m_bitRate);
if (err) {
THIS->m_bitRate = 0;
} else {
if (THIS->m_delegate && sizeReceivedForFirstTime) {
THIS->m_delegate->bitrateAvailable();
}
}
break;
}
case kAudioFileStreamProperty_DataOffset: {
SInt64 offset;
UInt32 offsetSize = sizeof(offset);
OSStatus result = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataOffset, &offsetSize, &offset);
if (result == 0) {
THIS->m_dataOffset = offset;
} else {
AS_TRACE("%s: reading kAudioFileStreamProperty_DataOffset property failed\n", __PRETTY_FUNCTION__);
}
break;
}
case kAudioFileStreamProperty_AudioDataByteCount: {
UInt32 byteCountSize = sizeof(THIS->m_audioDataByteCount);
OSStatus err = AudioFileStreamGetProperty(inAudioFileStream,
kAudioFileStreamProperty_AudioDataByteCount,
&byteCountSize, &THIS->m_audioDataByteCount);
if (err) {
THIS->m_audioDataByteCount = 0;
}
break;
}
case kAudioFileStreamProperty_AudioDataPacketCount: {
UInt32 packetCountSize = sizeof(THIS->m_audioDataPacketCount);
OSStatus err = AudioFileStreamGetProperty(inAudioFileStream,
kAudioFileStreamProperty_AudioDataPacketCount,
&packetCountSize, &THIS->m_audioDataPacketCount);
if (err) {
THIS->m_audioDataPacketCount = 0;
}
break;
}
case kAudioFileStreamProperty_ReadyToProducePackets: {
memset(&(THIS->m_srcFormat), 0, sizeof THIS->m_srcFormat);
UInt32 asbdSize = sizeof(THIS->m_srcFormat);
UInt32 formatListSize = 0;
Boolean writable;
OSStatus err = AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_DataFormat, &asbdSize, &(THIS->m_srcFormat));
if (err) {
AS_TRACE("Unable to set the src format\n");
break;
}
if (!AudioFileStreamGetPropertyInfo(inAudioFileStream, kAudioFileStreamProperty_FormatList, &formatListSize, &writable)) {
void *formatListData = calloc(1, formatListSize);
if (!AudioFileStreamGetProperty(inAudioFileStream, kAudioFileStreamProperty_FormatList, &formatListSize, formatListData)) {
for (int i=0; i < formatListSize; i += sizeof(AudioFormatListItem)) {
AudioStreamBasicDescription *pasbd = (AudioStreamBasicDescription *)formatListData + i;
if (pasbd->mFormatID == kAudioFormatMPEG4AAC_HE ||
pasbd->mFormatID == kAudioFormatMPEG4AAC_HE_V2) {
THIS->m_srcFormat = *pasbd;
break;
}
}
}
free(formatListData);
}
THIS->m_packetDuration = THIS->m_srcFormat.mFramesPerPacket / THIS->m_srcFormat.mSampleRate;
AS_TRACE("srcFormat, bytes per packet %i\n", (unsigned int)THIS->m_srcFormat.mBytesPerPacket);
if (THIS->m_audioConverter) {
AudioConverterDispose(THIS->m_audioConverter);
}
err = AudioConverterNew(&(THIS->m_srcFormat),
&(THIS->m_dstFormat),
&(THIS->m_audioConverter));
if (err) {
AS_WARN("Error in creating an audio converter, error %i\n", (int)err);
THIS->m_initializationError = err;
}
THIS->setCookiesForStream(inAudioFileStream);
THIS->audioQueue()->init();
break;
}
default: {
break;
}
}
}
/* This is called by audio file stream parser when it finds packets of audio */
void Audio_Stream::streamDataCallback(void *inClientData, UInt32 inNumberBytes, UInt32 inNumberPackets, const void *inInputData, AudioStreamPacketDescription *inPacketDescriptions)
{
AS_TRACE("%s: inNumberBytes %u, inNumberPackets %u\n", __FUNCTION__, inNumberBytes, inNumberPackets);
Audio_Stream *THIS = static_cast<Audio_Stream*>(inClientData);
if (!THIS->m_audioStreamParserRunning) {
AS_TRACE("%s: stray callback detected!\n", __PRETTY_FUNCTION__);
return;
}
for (int i = 0; i < inNumberPackets; i++) {
/* Allocate the packet */
UInt32 size = inPacketDescriptions[i].mDataByteSize;
queued_packet_t *packet = (queued_packet_t *)malloc(sizeof(queued_packet_t) + size);
packet->identifier = THIS->m_packetIdentifier;
// If the stream didn't provide bitRate (m_bitRate == 0), then let's calculate it
if (THIS->m_bitRate == 0 && THIS->m_bitrateBufferIndex < kAudioStreamBitrateBufferSize) {
// Only keep sampling for one buffer cycle; this is to keep the counters (for instance) duration
// stable.
THIS->m_bitrateBuffer[THIS->m_bitrateBufferIndex++] = 8 * inPacketDescriptions[i].mDataByteSize / THIS->m_packetDuration;
if (THIS->m_bitrateBufferIndex == kAudioStreamBitrateBufferSize) {
if (THIS->m_delegate) {
THIS->m_delegate->bitrateAvailable();
}
}
}
AS_LOCK_TRACE("streamDataCallback: lock\n");
pthread_mutex_lock(&THIS->m_packetQueueMutex);
/* Prepare the packet */
packet->next = NULL;
packet->desc = inPacketDescriptions[i];
packet->desc.mStartOffset = 0;
memcpy(packet->data, (const char *)inInputData + inPacketDescriptions[i].mStartOffset,
size);
if (THIS->m_queuedHead == NULL) {
THIS->m_queuedHead = THIS->m_queuedTail = THIS->m_playPacket = packet;
} else {
THIS->m_queuedTail->next = packet;
THIS->m_queuedTail = packet;
}
THIS->m_cachedDataSize += size;
THIS->m_packetIdentifier++;
AS_LOCK_TRACE("streamDataCallback: unlock\n");
pthread_mutex_unlock(&THIS->m_packetQueueMutex);
}
THIS->determineBufferingLimits();
}
} // namespace astreamer
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#ifndef ASTREAMER_AUDIO_STREAM_H
#define ASTREAMER_AUDIO_STREAM_H
#import "input_stream.h"
#include "audio_queue.h"
#include <AudioToolbox/AudioToolbox.h>
#include <list>
namespace astreamer {
typedef struct queued_packet {
UInt64 identifier;
AudioStreamPacketDescription desc;
struct queued_packet *next;
char data[];
} queued_packet_t;
typedef struct {
float offset;
float timePlayed;
} AS_Playback_Position;
enum Audio_Stream_Error {
AS_ERR_OPEN = 1, // Cannot open the audio stream
AS_ERR_STREAM_PARSE = 2, // Parse error
AS_ERR_NETWORK = 3, // Network error
AS_ERR_UNSUPPORTED_FORMAT = 4,
AS_ERR_BOUNCING = 5,
AS_ERR_TERMINATED = 6
};
class Audio_Stream_Delegate;
class File_Output;
#define kAudioStreamBitrateBufferSize 50
class Audio_Stream : public Input_Stream_Delegate, public Audio_Queue_Delegate {
public:
Audio_Stream_Delegate *m_delegate;
enum State {
STOPPED,
BUFFERING,
PLAYING,
PAUSED,
SEEKING,
FAILED,
END_OF_FILE,
PLAYBACK_COMPLETED
};
Audio_Stream();
virtual ~Audio_Stream();
void open();
void open(Input_Stream_Position *position);
void close(bool closeParser);
void pause();
void rewind(unsigned seconds);
void startCachedDataPlayback();
AS_Playback_Position playbackPosition();
UInt64 audioDataByteCount();
float durationInSeconds();
void seekToOffset(float offset);
Input_Stream_Position streamPositionForOffset(float offset);
float currentVolume();
void setDecoderRunState(bool decoderShouldRun);
void setVolume(float volume);
void setPlayRate(float playRate);
void setUrl(CFURLRef url);
void setStrictContentTypeChecking(bool strictChecking);
void setDefaultContentType(CFStringRef defaultContentType);
void setSeekOffset(float offset);
void setDefaultContentLength(UInt64 defaultContentLength);
void setContentLength(UInt64 contentLength);
void setPreloading(bool preloading);
bool isPreloading();
void setOutputFile(CFURLRef url);
CFURLRef outputFile();
State state();
CFStringRef sourceFormatDescription();
CFStringRef contentType();
CFStringRef createCacheIdentifierForURL(CFURLRef url);
size_t cachedDataSize();
bool strictContentTypeChecking();
float bitrate();
UInt64 defaultContentLength();
UInt64 contentLength();
int playbackDataCount();
AudioQueueLevelMeterState levels();
/* Audio_Queue_Delegate */
void audioQueueStateChanged(Audio_Queue::State state);
void audioQueueBuffersEmpty();
void audioQueueInitializationFailed();
void audioQueueFinishedPlayingPacket();
/* Input_Stream_Delegate */
void streamIsReadyRead();
void streamHasBytesAvailable(UInt8 *data, UInt32 numBytes);
void streamEndEncountered();
void streamErrorOccurred(CFStringRef errorDesc);
void streamMetaDataAvailable(std::map<CFStringRef,CFStringRef> metaData);
void streamMetaDataByteSizeAvailable(UInt32 sizeInBytes);
private:
Audio_Stream(const Audio_Stream&);
Audio_Stream& operator=(const Audio_Stream&);
bool m_inputStreamRunning;
bool m_audioStreamParserRunning;
bool m_initialBufferingCompleted;
bool m_discontinuity;
bool m_preloading;
bool m_audioQueueConsumedPackets;
UInt64 m_defaultContentLength;
UInt64 m_contentLength;
UInt64 m_originalContentLength;
UInt64 m_bytesReceived;
State m_state;
Input_Stream *m_inputStream;
Audio_Queue *m_audioQueue;
CFRunLoopTimerRef m_watchdogTimer;
CFRunLoopTimerRef m_seekTimer;
CFRunLoopTimerRef m_inputStreamTimer;
CFRunLoopTimerRef m_stateSetTimer;
CFRunLoopTimerRef m_decodeTimer;
AudioFileStreamID m_audioFileStream; // the audio file stream parser
AudioConverterRef m_audioConverter;
AudioStreamBasicDescription m_srcFormat;
AudioStreamBasicDescription m_dstFormat;
OSStatus m_initializationError;
UInt32 m_outputBufferSize;
UInt8 *m_outputBuffer;
UInt64 m_packetIdentifier;
UInt64 m_playingPacketIdentifier;
UInt64 m_dataOffset;
float m_seekOffset;
size_t m_bounceCount;
CFAbsoluteTime m_firstBufferingTime;
bool m_strictContentTypeChecking;
CFStringRef m_defaultContentType;
CFStringRef m_contentType;
File_Output *m_fileOutput;
CFURLRef m_outputFile;
queued_packet_t *m_queuedHead;
queued_packet_t *m_queuedTail;
queued_packet_t *m_playPacket;
std::list <queued_packet_t*> m_processedPackets;
unsigned m_numPacketsToRewind;
size_t m_cachedDataSize;
UInt64 m_audioDataByteCount;
UInt64 m_audioDataPacketCount;
UInt32 m_bitRate;
UInt32 m_metaDataSizeInBytes;
double m_packetDuration;
double m_bitrateBuffer[kAudioStreamBitrateBufferSize];
size_t m_bitrateBufferIndex;
float m_outputVolume;
bool m_converterRunOutOfData;
bool m_decoderShouldRun;
bool m_decoderFailed;
bool m_decoderThreadCreated;
pthread_mutex_t m_packetQueueMutex;
pthread_mutex_t m_streamStateMutex;
pthread_t m_decodeThread;
CFRunLoopRef m_decodeRunLoop;
CFRunLoopRef m_mainRunLoop;
CFStringRef createHashForString(CFStringRef str);
Audio_Queue *audioQueue();
void closeAudioQueue();
void closeAndSignalError(int error, CFStringRef errorDescription);
void setState(State state);
void setCookiesForStream(AudioFileStreamID inAudioFileStream);
void createWatchdogTimer();
void invalidateWatchdogTimer();
int cachedDataCount();
void determineBufferingLimits();
void cleanupCachedData();
static void watchdogTimerCallback(CFRunLoopTimerRef timer, void *info);
static void seekTimerCallback(CFRunLoopTimerRef timer, void *info);
static void inputStreamTimerCallback(CFRunLoopTimerRef timer, void *info);
static void stateSetTimerCallback(CFRunLoopTimerRef timer, void *info);
bool decoderShouldRun();
static void decodeSinglePacket(CFRunLoopTimerRef timer, void *info);
static void *decodeLoop(void *arg);
static OSStatus encoderDataCallback(AudioConverterRef inAudioConverter, UInt32 *ioNumberDataPackets, AudioBufferList *ioData, AudioStreamPacketDescription **outDataPacketDescription, void *inUserData);
static void propertyValueCallback(void *inClientData, AudioFileStreamID inAudioFileStream, AudioFileStreamPropertyID inPropertyID, UInt32 *ioFlags);
static void streamDataCallback(void *inClientData, UInt32 inNumberBytes, UInt32 inNumberPackets, const void *inInputData, AudioStreamPacketDescription *inPacketDescriptions);
AudioFileTypeID audioStreamTypeFromContentType(CFStringRef contentType);
};
class Audio_Stream_Delegate {
public:
virtual void audioStreamStateChanged(Audio_Stream::State state) = 0;
virtual void audioStreamErrorOccurred(int errorCode, CFStringRef errorDescription) = 0;
virtual void audioStreamMetaDataAvailable(std::map<CFStringRef,CFStringRef> metaData) = 0;
virtual void samplesAvailable(AudioBufferList *samples, UInt32 frames, AudioStreamPacketDescription description) = 0;
virtual void bitrateAvailable() = 0;
};
} // namespace astreamer
#endif // ASTREAMER_AUDIO_STREAM_H
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#include "caching_stream.h"
#include "file_output.h"
#include "stream_configuration.h"
#include "file_stream.h"
//#define CS_DEBUG 1
#if !defined (CS_DEBUG)
#define CS_TRACE(...) do {} while (0)
#define CS_TRACE_CFSTRING(X) do {} while (0)
#define CS_TRACE_CFURL(X) do {} while (0)
#else
#define CS_TRACE(...) printf(__VA_ARGS__)
#define CS_TRACE_CFSTRING(X) CS_TRACE("%s\n", CFStringGetCStringPtr(X, kCFStringEncodingMacRoman))
#define CS_TRACE_CFURL(X) CS_TRACE_CFSTRING(CFURLGetString(X))
#endif
namespace astreamer {
Caching_Stream::Caching_Stream(Input_Stream *target) :
m_target(target),
m_fileOutput(0),
m_fileStream(new File_Stream()),
m_cacheable(false),
m_writable(false),
m_useCache(false),
m_cacheMetaDataWritten(false),
m_cacheIdentifier(0),
m_fileUrl(0),
m_metaDataUrl(0)
{
m_target->m_delegate = this;
m_fileStream->m_delegate = this;
}
Caching_Stream::~Caching_Stream()
{
if (m_target) {
delete m_target;
m_target = 0;
}
if (m_fileOutput) {
delete m_fileOutput;
m_fileOutput = 0;
}
if (m_fileStream) {
delete m_fileStream;
m_fileStream = 0;
}
if (m_cacheIdentifier) {
CFRelease(m_cacheIdentifier);
m_cacheIdentifier = 0;
}
if (m_fileUrl) {
CFRelease(m_fileUrl);
m_fileUrl = 0;
}
if (m_metaDataUrl) {
CFRelease(m_metaDataUrl);
m_fileUrl = 0;
}
}
CFURLRef Caching_Stream::createFileURLWithPath(CFStringRef path)
{
CFURLRef fileUrl = NULL;
if (!path) {
return fileUrl;
}
CFStringRef escapedPath = CFURLCreateStringByAddingPercentEscapes(kCFAllocatorDefault, path, NULL, NULL, kCFStringEncodingUTF8);
CFURLRef regularUrl = CFURLCreateWithString(kCFAllocatorDefault, (escapedPath ? escapedPath : path), NULL);
if (regularUrl) {
fileUrl = CFURLCreateFilePathURL(kCFAllocatorDefault, regularUrl, NULL);
CFRelease(regularUrl);
}
if (escapedPath) {
CFRelease(escapedPath);
}
return fileUrl;
}
void Caching_Stream::readMetaData()
{
if (!m_metaDataUrl) {
return;
}
CFReadStreamRef readStream = CFReadStreamCreateWithFile(kCFAllocatorDefault, m_metaDataUrl);
if (readStream) {
if (CFReadStreamOpen(readStream)) {
UInt8 buf[1024];
CFIndex bytesRead = CFReadStreamRead(readStream, buf, 1024);
if (bytesRead > 0) {
CFStringRef contentType = CFStringCreateWithBytes(kCFAllocatorDefault, buf, bytesRead, kCFStringEncodingUTF8, false);
if (contentType) {
if (m_fileStream) {
CS_TRACE("Setting the content type of the file stream based on the meta data\n");
CS_TRACE_CFSTRING(contentType);
m_fileStream->setContentType(contentType);
}
CFRelease(contentType);
}
}
CFReadStreamClose(readStream);
}
CFRelease(readStream);
}
}
Input_Stream_Position Caching_Stream::position()
{
if (m_useCache) {
return m_fileStream->position();
} else {
return m_target->position();
}
}
CFStringRef Caching_Stream::contentType()
{
if (m_useCache) {
return m_fileStream->contentType();
} else {
return m_target->contentType();
}
}
size_t Caching_Stream::contentLength()
{
if (m_useCache) {
return m_fileStream->contentLength();
} else {
return m_target->contentLength();
}
}
bool Caching_Stream::open()
{
bool status;
if (CFURLResourceIsReachable(m_metaDataUrl, NULL) &&
CFURLResourceIsReachable(m_fileUrl, NULL)) {
m_cacheable = false;
m_writable = false;
m_useCache = true;
m_cacheMetaDataWritten = false;
readMetaData();
CS_TRACE("Playing file from cache\n");
CS_TRACE_CFURL(m_fileUrl);
status = m_fileStream->open();
} else {
m_cacheable = true;
m_writable = false;
m_useCache = false;
m_cacheMetaDataWritten = false;
CS_TRACE("File not cached\n");
status = m_target->open();
}
return status;
}
bool Caching_Stream::open(const Input_Stream_Position& position)
{
bool status;
if (CFURLResourceIsReachable(m_metaDataUrl, NULL) &&
CFURLResourceIsReachable(m_fileUrl, NULL)) {
m_cacheable = false;
m_writable = false;
m_useCache = true;
m_cacheMetaDataWritten = false;
readMetaData();
CS_TRACE("Playing file from cache\n");
CS_TRACE_CFURL(m_fileUrl);
status = m_fileStream->open(position);
} else {
m_cacheable = false;
m_writable = false;
m_useCache = false;
m_cacheMetaDataWritten = false;
CS_TRACE("File not cached\n");
status = m_target->open(position);
}
return status;
}
void Caching_Stream::close()
{
m_fileStream->close();
m_target->close();
}
void Caching_Stream::setScheduledInRunLoop(bool scheduledInRunLoop)
{
if (m_useCache) {
m_fileStream->setScheduledInRunLoop(scheduledInRunLoop);
} else {
m_target->setScheduledInRunLoop(scheduledInRunLoop);
}
}
void Caching_Stream::setUrl(CFURLRef url)
{
m_target->setUrl(url);
}
void Caching_Stream::setCacheIdentifier(CFStringRef cacheIdentifier)
{
m_cacheIdentifier = CFStringCreateCopy(kCFAllocatorDefault, cacheIdentifier);
if (m_fileOutput) {
delete m_fileOutput;
m_fileOutput = 0;
}
Stream_Configuration *config = Stream_Configuration::configuration();
CFStringRef filePath = CFStringCreateWithFormat(NULL, NULL, CFSTR("file://%@/%@"), config->cacheDirectory, m_cacheIdentifier);
CFStringRef metaDataPath = CFStringCreateWithFormat(NULL, NULL, CFSTR("file://%@/%@.metadata"), config->cacheDirectory, m_cacheIdentifier);
if (m_fileUrl) {
CFRelease(m_fileUrl);
m_fileUrl = 0;
}
if (m_metaDataUrl) {
CFRelease(m_metaDataUrl);
m_metaDataUrl = 0;
}
m_fileUrl = createFileURLWithPath(filePath);
m_metaDataUrl = createFileURLWithPath(metaDataPath);
m_fileStream->setUrl(m_fileUrl);
CFRelease(filePath);
CFRelease(metaDataPath);
}
bool Caching_Stream::canHandleUrl(CFURLRef url)
{
if (!url) {
return false;
}
CFStringRef scheme = CFURLCopyScheme(url);
if (scheme) {
if (CFStringCompare(scheme, CFSTR("http"), 0) == kCFCompareEqualTo) {
CFRelease(scheme);
// Using cache makes only sense for HTTP
return true;
}
CFRelease(scheme);
}
// Nothing else to server
return false;
}
/* ID3_Parser_Delegate */
void Caching_Stream::id3metaDataAvailable(std::map<CFStringRef,CFStringRef> metaData)
{
if (m_delegate) {
m_delegate->streamMetaDataAvailable(metaData);
}
}
void Caching_Stream::id3tagSizeAvailable(UInt32 tagSize)
{
if (m_delegate) {
m_delegate->streamMetaDataByteSizeAvailable(tagSize);
}
}
/* Input_Stream_Delegate */
void Caching_Stream::streamIsReadyRead()
{
if (m_cacheable) {
// If the stream is cacheable (not seeked from some position)
// Check if the stream has a length. If there is no length,
// it is a continuous stream and thus cannot be cached.
m_cacheable = (m_target->contentLength() > 0);
}
#if CS_DEBUG
if (m_cacheable) CS_TRACE("Stream can be cached!\n");
else CS_TRACE("Stream cannot be cached\n");
#endif
if (m_delegate) {
m_delegate->streamIsReadyRead();
}
}
void Caching_Stream::streamHasBytesAvailable(UInt8 *data, UInt32 numBytes)
{
if (m_cacheable) {
if (numBytes > 0) {
if (!m_fileOutput) {
if (m_fileUrl) {
CS_TRACE("Caching started for stream\n");
m_fileOutput = new File_Output(m_fileUrl);
m_writable = true;
}
}
if (m_writable && m_fileOutput) {
m_writable &= (m_fileOutput->write(data, numBytes) > 0);
}
}
}
if (m_delegate) {
m_delegate->streamHasBytesAvailable(data, numBytes);
}
}
void Caching_Stream::streamEndEncountered()
{
if (m_fileOutput) {
delete m_fileOutput;
m_fileOutput = 0;
}
if (m_cacheable) {
if (m_writable) {
CS_TRACE("Successfully cached the stream\n");
CS_TRACE_CFURL(m_fileUrl);
// We only write the meta data if the stream was successfully streamed.
// In that way we can use the meta data as an indicator that there is a file to stream.
if (!m_cacheMetaDataWritten) {
CFWriteStreamRef writeStream = CFWriteStreamCreateWithFile(kCFAllocatorDefault, m_metaDataUrl);
if (writeStream) {
if (CFWriteStreamOpen(writeStream)) {
CFStringRef contentType = m_target->contentType();
UInt8 buf[1024];
CFIndex usedBytes = 0;
if (contentType) {
// It is possible that some streams don't provide a content type
CFStringGetBytes(contentType,
CFRangeMake(0, CFStringGetLength(contentType)),
kCFStringEncodingUTF8,
'?',
false,
buf,
1024,
&usedBytes);
}
if (usedBytes > 0) {
CS_TRACE("Writing the meta data\n");
CS_TRACE_CFSTRING(contentType);
CFWriteStreamWrite(writeStream, buf, usedBytes);
}
CFWriteStreamClose(writeStream);
}
CFRelease(writeStream);
}
m_cacheable = false;
m_writable = false;
m_useCache = true;
m_cacheMetaDataWritten = true;
}
}
}
if (m_delegate) {
m_delegate->streamEndEncountered();
}
}
void Caching_Stream::streamErrorOccurred(CFStringRef errorDesc)
{
if (m_delegate) {
m_delegate->streamErrorOccurred(errorDesc);
}
}
void Caching_Stream::streamMetaDataAvailable(std::map<CFStringRef,CFStringRef> metaData)
{
if (m_delegate) {
m_delegate->streamMetaDataAvailable(metaData);
}
}
void Caching_Stream::streamMetaDataByteSizeAvailable(UInt32 sizeInBytes)
{
if (m_delegate) {
m_delegate->streamMetaDataByteSizeAvailable(sizeInBytes);
}
}
} // namespace astreamer
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#ifndef ASTREAMER_CACHING_STREAM_H
#define ASTREAMER_CACHING_STREAM_H
#include "input_stream.h"
namespace astreamer {
class File_Output;
class File_Stream;
class Caching_Stream : public Input_Stream, public Input_Stream_Delegate {
private:
Input_Stream *m_target;
File_Output *m_fileOutput;
File_Stream *m_fileStream;
bool m_cacheable;
bool m_writable;
bool m_useCache;
bool m_cacheMetaDataWritten;
CFStringRef m_cacheIdentifier;
CFURLRef m_fileUrl;
CFURLRef m_metaDataUrl;
private:
CFURLRef createFileURLWithPath(CFStringRef path);
void readMetaData();
public:
Caching_Stream(Input_Stream *target);
virtual ~Caching_Stream();
Input_Stream_Position position();
CFStringRef contentType();
size_t contentLength();
bool open();
bool open(const Input_Stream_Position& position);
void close();
void setScheduledInRunLoop(bool scheduledInRunLoop);
void setUrl(CFURLRef url);
void setCacheIdentifier(CFStringRef cacheIdentifier);
static bool canHandleUrl(CFURLRef url);
/* ID3_Parser_Delegate */
void id3metaDataAvailable(std::map<CFStringRef,CFStringRef> metaData);
void id3tagSizeAvailable(UInt32 tagSize);
void streamIsReadyRead();
void streamHasBytesAvailable(UInt8 *data, UInt32 numBytes);
void streamEndEncountered();
void streamErrorOccurred(CFStringRef errorDesc);
void streamMetaDataAvailable(std::map<CFStringRef,CFStringRef> metaData);
void streamMetaDataByteSizeAvailable(UInt32 sizeInBytes);
};
} // namespace astreamer
#endif /* ASTREAMER_CACHING_STREAM_H */
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#include "file_output.h"
namespace astreamer {
File_Output::File_Output(CFURLRef fileURL) :
m_writeStream(CFWriteStreamCreateWithFile(kCFAllocatorDefault, fileURL))
{
CFWriteStreamOpen(m_writeStream);
}
File_Output::~File_Output()
{
CFWriteStreamClose(m_writeStream);
CFRelease(m_writeStream);
}
CFIndex File_Output::write(const UInt8 *buffer, CFIndex bufferLength)
{
return CFWriteStreamWrite(m_writeStream, buffer, bufferLength);
}
} // namespace astreamer
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#ifndef ASTREAMER_FILE_OUTPUT_H
#define ASTREAMER_FILE_OUTPUT_H
#import <CoreFoundation/CoreFoundation.h>
namespace astreamer {
class File_Output {
private:
File_Output(const File_Output&);
File_Output& operator=(const File_Output&);
CFWriteStreamRef m_writeStream;
public:
File_Output(CFURLRef fileURL);
~File_Output();
CFIndex write(const UInt8 *buffer, CFIndex bufferLength);
};
} // namespace astreamer
#endif // ASTREAMER_FILE_OUTPUT_H
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#include "file_stream.h"
#include "stream_configuration.h"
namespace astreamer {
File_Stream::File_Stream() :
m_url(0),
m_readStream(0),
m_scheduledInRunLoop(false),
m_readPending(false),
m_fileReadBuffer(0),
m_id3Parser(new ID3_Parser()),
m_contentType(0)
{
m_id3Parser->m_delegate = this;
}
File_Stream::~File_Stream()
{
close();
if (m_fileReadBuffer) {
delete [] m_fileReadBuffer;
m_fileReadBuffer = 0;
}
if (m_url) {
CFRelease(m_url);
m_url = 0;
}
delete m_id3Parser;
m_id3Parser = 0;
if (m_contentType) {
CFRelease(m_contentType);
}
}
Input_Stream_Position File_Stream::position()
{
return m_position;
}
CFStringRef File_Stream::contentType()
{
if (m_contentType) {
// Use the provided content type
return m_contentType;
}
// Try to resolve the content type from the file
CFStringRef contentType = CFSTR("");
CFStringRef pathComponent = 0;
CFIndex len = 0;
CFRange range;
CFStringRef suffix = 0;
if (!m_url) {
goto done;
}
pathComponent = CFURLCopyLastPathComponent(m_url);
if (!pathComponent) {
goto done;
}
len = CFStringGetLength(pathComponent);
if (len > 5) {
range.length = 4;
range.location = len - 4;
suffix = CFStringCreateWithSubstring(kCFAllocatorDefault,
pathComponent,
range);
if (!suffix) {
goto done;
}
// TODO: we should do the content-type resolvation in a better way.
if (CFStringCompare(suffix, CFSTR(".mp3"), 0) == kCFCompareEqualTo) {
contentType = CFSTR("audio/mpeg");
} else if (CFStringCompare(suffix, CFSTR(".m4a"), 0) == kCFCompareEqualTo) {
contentType = CFSTR("audio/x-m4a");
} else if (CFStringCompare(suffix, CFSTR(".mp4"), 0) == kCFCompareEqualTo) {
contentType = CFSTR("audio/mp4");
} else if (CFStringCompare(suffix, CFSTR(".aac"), 0) == kCFCompareEqualTo) {
contentType = CFSTR("audio/aac");
}
}
done:
if (pathComponent) {
CFRelease(pathComponent);
}
if (suffix) {
CFRelease(suffix);
}
return contentType;
}
void File_Stream::setContentType(CFStringRef contentType)
{
if (m_contentType) {
CFRelease(m_contentType);
m_contentType = 0;
}
if (contentType) {
m_contentType = CFStringCreateCopy(kCFAllocatorDefault, contentType);
}
}
size_t File_Stream::contentLength()
{
CFNumberRef length = NULL;
CFErrorRef err = NULL;
if (CFURLCopyResourcePropertyForKey(m_url, kCFURLFileSizeKey, &length, &err)) {
CFIndex fileLength;
if (CFNumberGetValue(length, kCFNumberCFIndexType, &fileLength)) {
CFRelease(length);
return fileLength;
}
}
return 0;
}
bool File_Stream::open()
{
Input_Stream_Position position;
position.start = 0;
position.end = 0;
m_id3Parser->reset();
return open(position);
}
bool File_Stream::open(const Input_Stream_Position& position)
{
bool success = false;
CFStreamClientContext CTX = { 0, this, NULL, NULL, NULL };
/* Already opened a read stream, return */
if (m_readStream) {
goto out;
}
if (!m_url) {
goto out;
}
/* Reset state */
m_position = position;
m_readPending = false;
/* Failed to create a stream */
if (!(m_readStream = CFReadStreamCreateWithFile(kCFAllocatorDefault, m_url))) {
goto out;
}
if (m_position.start > 0) {
CFNumberRef position = CFNumberCreate(0, kCFNumberLongLongType, &m_position.start);
CFReadStreamSetProperty(m_readStream, kCFStreamPropertyFileCurrentOffset, position);
CFRelease(position);
}
if (!CFReadStreamSetClient(m_readStream, kCFStreamEventHasBytesAvailable |
kCFStreamEventEndEncountered |
kCFStreamEventErrorOccurred, readCallBack, &CTX)) {
CFRelease(m_readStream);
m_readStream = 0;
goto out;
}
setScheduledInRunLoop(true);
if (!CFReadStreamOpen(m_readStream)) {
/* Open failed: clean */
CFReadStreamSetClient(m_readStream, 0, NULL, NULL);
setScheduledInRunLoop(false);
if (m_readStream) {
CFRelease(m_readStream);
m_readStream = 0;
}
goto out;
}
success = true;
out:
if (success) {
if (m_delegate) {
m_delegate->streamIsReadyRead();
}
}
return success;
}
void File_Stream::close()
{
/* The stream has been already closed */
if (!m_readStream) {
return;
}
CFReadStreamSetClient(m_readStream, 0, NULL, NULL);
setScheduledInRunLoop(false);
CFReadStreamClose(m_readStream);
CFRelease(m_readStream);
m_readStream = 0;
}
void File_Stream::setScheduledInRunLoop(bool scheduledInRunLoop)
{
/* The stream has not been opened, or it has been already closed */
if (!m_readStream) {
return;
}
/* The state doesn't change */
if (m_scheduledInRunLoop == scheduledInRunLoop) {
return;
}
if (m_scheduledInRunLoop) {
CFReadStreamUnscheduleFromRunLoop(m_readStream, CFRunLoopGetCurrent(), kCFRunLoopCommonModes);
} else {
if (m_readPending) {
m_readPending = false;
readCallBack(m_readStream, kCFStreamEventHasBytesAvailable, this);
}
CFReadStreamScheduleWithRunLoop(m_readStream, CFRunLoopGetCurrent(), kCFRunLoopCommonModes);
}
m_scheduledInRunLoop = scheduledInRunLoop;
}
void File_Stream::setUrl(CFURLRef url)
{
if (m_url) {
CFRelease(m_url);
}
if (url) {
m_url = (CFURLRef)CFRetain(url);
} else {
m_url = NULL;
}
}
bool File_Stream::canHandleUrl(CFURLRef url)
{
if (!url) {
return false;
}
CFStringRef scheme = CFURLCopyScheme(url);
if (scheme) {
if (CFStringCompare(scheme, CFSTR("file"), 0) == kCFCompareEqualTo) {
CFRelease(scheme);
// The only scheme we claim to handle are the local files
return true;
}
CFRelease(scheme);
}
// We don't handle anything else but local files
return false;
}
/* ID3_Parser_Delegate */
void File_Stream::id3metaDataAvailable(std::map<CFStringRef,CFStringRef> metaData)
{
if (m_delegate) {
m_delegate->streamMetaDataAvailable(metaData);
}
}
void File_Stream::id3tagSizeAvailable(UInt32 tagSize)
{
if (m_delegate) {
m_delegate->streamMetaDataByteSizeAvailable(tagSize);
}
}
void File_Stream::readCallBack(CFReadStreamRef stream, CFStreamEventType eventType, void *clientCallBackInfo)
{
File_Stream *THIS = static_cast<File_Stream*>(clientCallBackInfo);
Stream_Configuration *config = Stream_Configuration::configuration();
switch (eventType) {
case kCFStreamEventHasBytesAvailable: {
if (!THIS->m_fileReadBuffer) {
THIS->m_fileReadBuffer = new UInt8[config->httpConnectionBufferSize];
}
while (CFReadStreamHasBytesAvailable(stream)) {
if (!THIS->m_scheduledInRunLoop) {
/*
* This is critical - though the stream has data available,
* do not try to feed the audio queue with data, if it has
* indicated that it doesn't want more data due to buffers
* full.
*/
THIS->m_readPending = true;
break;
}
CFIndex bytesRead = CFReadStreamRead(stream, THIS->m_fileReadBuffer, config->httpConnectionBufferSize);
if (CFReadStreamGetStatus(stream) == kCFStreamStatusError ||
bytesRead < 0) {
if (THIS->m_delegate) {
CFStringRef reportedNetworkError = NULL;
CFErrorRef streamError = CFReadStreamCopyError(stream);
if (streamError) {
CFStringRef errorDesc = CFErrorCopyDescription(streamError);
if (errorDesc) {
reportedNetworkError = CFStringCreateCopy(kCFAllocatorDefault, errorDesc);
CFRelease(errorDesc);
}
CFRelease(streamError);
}
THIS->m_delegate->streamErrorOccurred(reportedNetworkError);
if (reportedNetworkError) {
CFRelease(reportedNetworkError);
}
}
break;
}
if (bytesRead > 0) {
if (THIS->m_delegate) {
THIS->m_delegate->streamHasBytesAvailable(THIS->m_fileReadBuffer, (UInt32)bytesRead);
}
if (THIS->m_id3Parser->wantData()) {
THIS->m_id3Parser->feedData(THIS->m_fileReadBuffer, (UInt32)bytesRead);
}
}
}
break;
}
case kCFStreamEventEndEncountered: {
if (THIS->m_delegate) {
THIS->m_delegate->streamEndEncountered();
}
break;
}
case kCFStreamEventErrorOccurred: {
if (THIS->m_delegate) {
CFStringRef reportedNetworkError = NULL;
CFErrorRef streamError = CFReadStreamCopyError(stream);
if (streamError) {
CFStringRef errorDesc = CFErrorCopyDescription(streamError);
if (errorDesc) {
reportedNetworkError = CFStringCreateCopy(kCFAllocatorDefault, errorDesc);
CFRelease(errorDesc);
}
CFRelease(streamError);
}
THIS->m_delegate->streamErrorOccurred(reportedNetworkError);
if (reportedNetworkError) {
CFRelease(reportedNetworkError);
}
}
break;
}
}
}
} // namespace astreamer
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#ifndef ASTREAMER_FILE_STREAM_H
#define ASTREAMER_FILE_STREAM_H
#import "input_stream.h"
#import "id3_parser.h"
namespace astreamer {
class File_Stream : public Input_Stream {
private:
File_Stream(const File_Stream&);
File_Stream& operator=(const File_Stream&);
CFURLRef m_url;
CFReadStreamRef m_readStream;
bool m_scheduledInRunLoop;
bool m_readPending;
Input_Stream_Position m_position;
UInt8 *m_fileReadBuffer;
ID3_Parser *m_id3Parser;
CFStringRef m_contentType;
static void readCallBack(CFReadStreamRef stream, CFStreamEventType eventType, void *clientCallBackInfo);
public:
File_Stream();
virtual ~File_Stream();
Input_Stream_Position position();
CFStringRef contentType();
void setContentType(CFStringRef contentType);
size_t contentLength();
bool open();
bool open(const Input_Stream_Position& position);
void close();
void setScheduledInRunLoop(bool scheduledInRunLoop);
void setUrl(CFURLRef url);
static bool canHandleUrl(CFURLRef url);
/* ID3_Parser_Delegate */
void id3metaDataAvailable(std::map<CFStringRef,CFStringRef> metaData);
void id3tagSizeAvailable(UInt32 tagSize);
};
} // namespace astreamer
#endif // ASTREAMER_FILE_STREAM_H
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#include "http_stream.h"
#include "audio_queue.h"
#include "id3_parser.h"
#include "stream_configuration.h"
//#define HS_DEBUG 1
#if !defined (HS_DEBUG)
#define HS_TRACE(...) do {} while (0)
#define HS_TRACE_CFSTRING(X) do {} while (0)
#else
#define HS_TRACE(...) printf(__VA_ARGS__)
#define HS_TRACE_CFSTRING(X) HS_TRACE("%s\n", CFStringGetCStringPtr(X, kCFStringEncodingMacRoman))
#endif
/*
* Comment the following line to disable ID3 tag support:
*/
#define INCLUDE_ID3TAG_SUPPORT 1
namespace astreamer {
CFStringRef HTTP_Stream::httpRequestMethod = CFSTR("GET");
CFStringRef HTTP_Stream::httpUserAgentHeader = CFSTR("User-Agent");
CFStringRef HTTP_Stream::httpRangeHeader = CFSTR("Range");
CFStringRef HTTP_Stream::icyMetaDataHeader = CFSTR("Icy-MetaData");
CFStringRef HTTP_Stream::icyMetaDataValue = CFSTR("1"); /* always request ICY metadata, if available */
/* HTTP_Stream: public */
HTTP_Stream::HTTP_Stream() :
m_readStream(0),
m_scheduledInRunLoop(false),
m_readPending(false),
m_url(0),
m_httpHeadersParsed(false),
m_contentType(0),
m_contentLength(0),
m_bytesRead(0),
m_icyStream(false),
m_icyHeaderCR(false),
m_icyHeadersRead(false),
m_icyHeadersParsed(false),
m_icyName(0),
m_icyMetaDataInterval(0),
m_dataByteReadCount(0),
m_metaDataBytesRemaining(0),
m_httpReadBuffer(0),
m_icyReadBuffer(0),
m_id3Parser(new ID3_Parser())
{
m_id3Parser->m_delegate = this;
}
HTTP_Stream::~HTTP_Stream()
{
close();
for (std::vector<CFStringRef>::iterator h = m_icyHeaderLines.begin(); h != m_icyHeaderLines.end(); ++h) {
CFRelease(*h);
}
m_icyHeaderLines.clear();
if (m_contentType) {
CFRelease(m_contentType);
m_contentType = 0;
}
if (m_icyName) {
CFRelease(m_icyName);
m_icyName = 0;
}
if (m_httpReadBuffer) {
delete [] m_httpReadBuffer;
m_httpReadBuffer = 0;
}
if (m_icyReadBuffer) {
delete [] m_icyReadBuffer;
m_icyReadBuffer = 0;
}
if (m_url) {
CFRelease(m_url);
m_url = 0;
}
delete m_id3Parser;
m_id3Parser = 0;
}
Input_Stream_Position HTTP_Stream::position()
{
return m_position;
}
CFStringRef HTTP_Stream::contentType()
{
return m_contentType;
}
size_t HTTP_Stream::contentLength()
{
return m_contentLength;
}
bool HTTP_Stream::open()
{
Input_Stream_Position position;
position.start = 0;
position.end = 0;
m_contentLength = 0;
#ifdef INCLUDE_ID3TAG_SUPPORT
m_id3Parser->reset();
#endif
return open(position);
}
bool HTTP_Stream::open(const Input_Stream_Position& position)
{
bool success = false;
CFStreamClientContext CTX = { 0, this, NULL, NULL, NULL };
/* Already opened a read stream, return */
if (m_readStream) {
goto out;
}
/* Reset state */
m_position = position;
m_readPending = false;
m_httpHeadersParsed = false;
if (m_contentType) {
CFRelease(m_contentType);
m_contentType = NULL;
}
m_icyStream = false;
m_icyHeaderCR = false;
m_icyHeadersRead = false;
m_icyHeadersParsed = false;
if (m_icyName) {
CFRelease(m_icyName);
m_icyName = 0;
}
for (std::vector<CFStringRef>::iterator h = m_icyHeaderLines.begin(); h != m_icyHeaderLines.end(); ++h) {
CFRelease(*h);
}
m_icyHeaderLines.clear();
m_icyMetaDataInterval = 0;
m_dataByteReadCount = 0;
m_metaDataBytesRemaining = 0;
m_bytesRead = 0;
if (!m_url) {
goto out;
}
/* Failed to create a stream */
if (!(m_readStream = createReadStream(m_url))) {
goto out;
}
if (!CFReadStreamSetClient(m_readStream, kCFStreamEventHasBytesAvailable |
kCFStreamEventEndEncountered |
kCFStreamEventErrorOccurred, readCallBack, &CTX)) {
CFRelease(m_readStream);
m_readStream = 0;
goto out;
}
setScheduledInRunLoop(true);
if (!CFReadStreamOpen(m_readStream)) {
/* Open failed: clean */
CFReadStreamSetClient(m_readStream, 0, NULL, NULL);
setScheduledInRunLoop(false);
if (m_readStream) {
CFRelease(m_readStream);
m_readStream = 0;
}
goto out;
}
success = true;
out:
return success;
}
void HTTP_Stream::close()
{
/* The stream has been already closed */
if (!m_readStream) {
return;
}
CFReadStreamSetClient(m_readStream, 0, NULL, NULL);
setScheduledInRunLoop(false);
CFReadStreamClose(m_readStream);
CFRelease(m_readStream);
m_readStream = 0;
}
void HTTP_Stream::setScheduledInRunLoop(bool scheduledInRunLoop)
{
/* The stream has not been opened, or it has been already closed */
if (!m_readStream) {
return;
}
/* The state doesn't change */
if (m_scheduledInRunLoop == scheduledInRunLoop) {
return;
}
if (m_scheduledInRunLoop) {
CFReadStreamUnscheduleFromRunLoop(m_readStream, CFRunLoopGetCurrent(), kCFRunLoopCommonModes);
} else {
if (m_readPending) {
m_readPending = false;
readCallBack(m_readStream, kCFStreamEventHasBytesAvailable, this);
}
CFReadStreamScheduleWithRunLoop(m_readStream, CFRunLoopGetCurrent(), kCFRunLoopCommonModes);
}
m_scheduledInRunLoop = scheduledInRunLoop;
}
void HTTP_Stream::setUrl(CFURLRef url)
{
if (m_url) {
CFRelease(m_url);
}
if (url) {
m_url = (CFURLRef)CFRetain(url);
} else {
m_url = NULL;
}
}
bool HTTP_Stream::canHandleUrl(CFURLRef url)
{
if (!url) {
return false;
}
CFStringRef scheme = CFURLCopyScheme(url);
if (scheme) {
if (CFStringCompare(scheme, CFSTR("file"), 0) == kCFCompareEqualTo) {
CFRelease(scheme);
// The only scheme we claim not to handle are local files.
return false;
}
CFRelease(scheme);
}
return true;
}
void HTTP_Stream::id3metaDataAvailable(std::map<CFStringRef,CFStringRef> metaData)
{
if (m_delegate) {
m_delegate->streamMetaDataAvailable(metaData);
}
}
void HTTP_Stream::id3tagSizeAvailable(UInt32 tagSize)
{
if (m_delegate) {
m_delegate->streamMetaDataByteSizeAvailable(tagSize);
}
}
/* private */
CFReadStreamRef HTTP_Stream::createReadStream(CFURLRef url)
{
CFReadStreamRef readStream = 0;
CFHTTPMessageRef request = 0;
CFDictionaryRef proxySettings = 0;
Stream_Configuration *config = Stream_Configuration::configuration();
if (!(request = CFHTTPMessageCreateRequest(kCFAllocatorDefault, httpRequestMethod, url, kCFHTTPVersion1_1))) {
goto out;
}
if (config->userAgent) {
CFHTTPMessageSetHeaderFieldValue(request, httpUserAgentHeader, config->userAgent);
}
CFHTTPMessageSetHeaderFieldValue(request, icyMetaDataHeader, icyMetaDataValue);
if (m_position.start > 0 && m_position.end > m_position.start) {
CFStringRef rangeHeaderValue = CFStringCreateWithFormat(NULL,
NULL,
CFSTR("bytes=%llu-%llu"),
m_position.start,
m_position.end);
CFHTTPMessageSetHeaderFieldValue(request, httpRangeHeader, rangeHeaderValue);
CFRelease(rangeHeaderValue);
} else if (m_position.start > 0 && m_position.end < m_position.start) {
CFStringRef rangeHeaderValue = CFStringCreateWithFormat(NULL,
NULL,
CFSTR("bytes=%llu-"),
m_position.start);
CFHTTPMessageSetHeaderFieldValue(request, httpRangeHeader, rangeHeaderValue);
CFRelease(rangeHeaderValue);
}
if (config->predefinedHttpHeaderValues) {
const CFIndex numKeys = CFDictionaryGetCount(config->predefinedHttpHeaderValues);
if (numKeys > 0) {
CFTypeRef *keys = (CFTypeRef *) malloc(numKeys * sizeof(CFTypeRef));
if (keys) {
CFDictionaryGetKeysAndValues(config->predefinedHttpHeaderValues, (const void **) keys, NULL);
for (CFIndex i=0; i < numKeys; i++) {
CFTypeRef key = keys[i];
if (CFGetTypeID(key) == CFStringGetTypeID()) {
const void *value = CFDictionaryGetValue(config->predefinedHttpHeaderValues, (const void *) key);
if (value) {
CFStringRef headerKey = (CFStringRef) key;
CFTypeRef valueRef = (CFTypeRef) value;
if (CFGetTypeID(valueRef) == CFStringGetTypeID()) {
CFStringRef headerValue = (CFStringRef) valueRef;
HS_TRACE("Setting predefined HTTP header ");
HS_TRACE_CFSTRING(headerKey);
HS_TRACE_CFSTRING(headerValue);
CFHTTPMessageSetHeaderFieldValue(request, headerKey, headerValue);
}
}
}
}
free(keys);
}
}
}
if (!(readStream = CFReadStreamCreateForHTTPRequest(kCFAllocatorDefault, request))) {
goto out;
}
CFReadStreamSetProperty(readStream,
kCFStreamNetworkServiceType,
kCFStreamNetworkServiceTypeBackground);
CFReadStreamSetProperty(readStream,
kCFStreamPropertyHTTPShouldAutoredirect,
kCFBooleanTrue);
proxySettings = CFNetworkCopySystemProxySettings();
if (proxySettings) {
CFReadStreamSetProperty(readStream, kCFStreamPropertyHTTPProxy, proxySettings);
CFRelease(proxySettings);
}
out:
if (request) {
CFRelease(request);
}
return readStream;
}
void HTTP_Stream::parseHttpHeadersIfNeeded(const UInt8 *buf, const CFIndex bufSize)
{
if (m_httpHeadersParsed) {
return;
}
m_httpHeadersParsed = true;
/* If the response has the "ICY 200 OK" string,
* we are dealing with the ShoutCast protocol.
* The HTTP headers won't be available.
*/
if (bufSize >= 10 &&
buf[0] == 0x49 && buf[1] == 0x43 && buf[2] == 0x59 &&
buf[3] == 0x20 && buf[4] == 0x32 && buf[5] == 0x30 &&
buf[6] == 0x30 && buf[7] == 0x20 && buf[8] == 0x4F &&
buf[9] == 0x4B) {
m_icyStream = true;
HS_TRACE("Detected an IceCast stream\n");
// This is an ICY stream, don't try to parse the HTTP headers
return;
}
HS_TRACE("A regular HTTP stream\n");
CFHTTPMessageRef response = (CFHTTPMessageRef)CFReadStreamCopyProperty(m_readStream, kCFStreamPropertyHTTPResponseHeader);
CFIndex statusCode = 0;
if (response) {
/*
* If the server responded with the icy-metaint header, the response
* body will be encoded in the ShoutCast protocol.
*/
CFStringRef icyMetaIntString = CFHTTPMessageCopyHeaderFieldValue(response, CFSTR("icy-metaint"));
if (icyMetaIntString) {
m_icyStream = true;
m_icyHeadersParsed = true;
m_icyHeadersRead = true;
m_icyMetaDataInterval = CFStringGetIntValue(icyMetaIntString);
CFRelease(icyMetaIntString);
}
HS_TRACE("icy-metaint: %zu\n", m_icyMetaDataInterval);
statusCode = CFHTTPMessageGetResponseStatusCode(response);
HS_TRACE("HTTP response code %zu", statusCode);
CFStringRef icyNameString = CFHTTPMessageCopyHeaderFieldValue(response, CFSTR("icy-name"));
if (icyNameString) {
if (m_icyName) {
CFRelease(m_icyName);
}
m_icyName = icyNameString;
if (m_delegate) {
std::map<CFStringRef,CFStringRef> metadataMap;
metadataMap[CFSTR("IcecastStationName")] = CFStringCreateCopy(kCFAllocatorDefault, m_icyName);
m_delegate->streamMetaDataAvailable(metadataMap);
}
}
if (m_contentType) {
CFRelease(m_contentType);
}
m_contentType = CFHTTPMessageCopyHeaderFieldValue(response, CFSTR("Content-Type"));
HS_TRACE("Content-type: ");
HS_TRACE_CFSTRING(m_contentType);
CFStringRef contentLengthString = CFHTTPMessageCopyHeaderFieldValue(response, CFSTR("Content-Length"));
if (contentLengthString) {
m_contentLength = CFStringGetIntValue(contentLengthString);
CFRelease(contentLengthString);
}
CFRelease(response);
}
if (m_delegate &&
(statusCode == 200 || statusCode == 206)) {
m_delegate->streamIsReadyRead();
} else {
if (m_delegate) {
CFStringRef statusCodeString = CFStringCreateWithFormat(NULL,
NULL,
CFSTR("HTTP response code %d"),
(unsigned int)statusCode);
m_delegate->streamErrorOccurred(statusCodeString);
if (statusCodeString) {
CFRelease(statusCodeString);
}
}
}
}
void HTTP_Stream::parseICYStream(const UInt8 *buf, const CFIndex bufSize)
{
HS_TRACE("Parsing an IceCast stream, received %li bytes\n", bufSize);
CFIndex offset = 0;
CFIndex bytesFound = 0;
if (!m_icyHeadersRead) {
HS_TRACE("ICY headers not read, reading\n");
for (; offset < bufSize; offset++) {
if (m_icyHeaderCR && buf[offset] == '\n') {
if (bytesFound > 0) {
m_icyHeaderLines.push_back(createMetaDataStringWithMostReasonableEncoding(&buf[offset-bytesFound-1], bytesFound));
bytesFound = 0;
HS_TRACE_CFSTRING(m_icyHeaderLines[m_icyHeaderLines.size()-1]);
continue;
}
HS_TRACE("End of ICY headers\n");
m_icyHeadersRead = true;
break;
}
if (buf[offset] == '\r') {
m_icyHeaderCR = true;
continue;
} else {
m_icyHeaderCR = false;
}
bytesFound++;
}
} else if (!m_icyHeadersParsed) {
HS_TRACE("ICY headers not parsed, parsing\n");
const CFStringRef icyContentTypeHeader = CFSTR("content-type:");
const CFStringRef icyMetaDataHeader = CFSTR("icy-metaint:");
const CFStringRef icyNameHeader = CFSTR("icy-name:");
const CFIndex icyContenTypeHeaderLength = CFStringGetLength(icyContentTypeHeader);
const CFIndex icyMetaDataHeaderLength = CFStringGetLength(icyMetaDataHeader);
const CFIndex icyNameHeaderLength = CFStringGetLength(icyNameHeader);
for (std::vector<CFStringRef>::iterator h = m_icyHeaderLines.begin(); h != m_icyHeaderLines.end(); ++h) {
CFStringRef line = *h;
const CFIndex lineLength = CFStringGetLength(line);
if (lineLength == 0) {
continue;
}
HS_TRACE_CFSTRING(line);
if (CFStringCompareWithOptions(line,
icyContentTypeHeader,
CFRangeMake(0, icyContenTypeHeaderLength),
0) == kCFCompareEqualTo) {
if (m_contentType) {
CFRelease(m_contentType);
m_contentType = 0;
}
m_contentType = CFStringCreateWithSubstring(kCFAllocatorDefault,
line,
CFRangeMake(icyContenTypeHeaderLength, lineLength - icyContenTypeHeaderLength));
}
if (CFStringCompareWithOptions(line,
icyMetaDataHeader,
CFRangeMake(0, icyMetaDataHeaderLength),
0) == kCFCompareEqualTo) {
CFStringRef metadataInterval = CFStringCreateWithSubstring(kCFAllocatorDefault,
line,
CFRangeMake(icyMetaDataHeaderLength, lineLength - icyMetaDataHeaderLength));
if (metadataInterval) {
m_icyMetaDataInterval = CFStringGetIntValue(metadataInterval);
CFRelease(metadataInterval);
} else {
m_icyMetaDataInterval = 0;
}
}
if (CFStringCompareWithOptions(line,
icyNameHeader,
CFRangeMake(0, icyNameHeaderLength),
0) == kCFCompareEqualTo) {
if (m_icyName) {
CFRelease(m_icyName);
}
m_icyName = CFStringCreateWithSubstring(kCFAllocatorDefault,
line,
CFRangeMake(icyNameHeaderLength, lineLength - icyNameHeaderLength));
}
}
m_icyHeadersParsed = true;
offset++;
if (m_delegate) {
m_delegate->streamIsReadyRead();
}
}
Stream_Configuration *config = Stream_Configuration::configuration();
if (!m_icyReadBuffer) {
m_icyReadBuffer = new UInt8[config->httpConnectionBufferSize];
}
HS_TRACE("Reading ICY stream for playback\n");
UInt32 i=0;
for (; offset < bufSize; offset++) {
// is this a metadata byte?
if (m_metaDataBytesRemaining > 0) {
m_metaDataBytesRemaining--;
if (m_metaDataBytesRemaining == 0) {
m_dataByteReadCount = 0;
if (m_delegate && !m_icyMetaData.empty()) {
std::map<CFStringRef,CFStringRef> metadataMap;
CFStringRef metaData = createMetaDataStringWithMostReasonableEncoding(&m_icyMetaData[0],
m_icyMetaData.size());
if (!metaData) {
// Metadata encoding failed, cannot parse.
m_icyMetaData.clear();
continue;
}
CFArrayRef tokens = CFStringCreateArrayBySeparatingStrings(kCFAllocatorDefault,
metaData,
CFSTR(";"));
for (CFIndex i=0, max=CFArrayGetCount(tokens); i < max; i++) {
CFStringRef token = (CFStringRef) CFArrayGetValueAtIndex(tokens, i);
CFRange foundRange;
if (CFStringFindWithOptions(token,
CFSTR("='"),
CFRangeMake(0, CFStringGetLength(token)),
NULL,
&foundRange) == true) {
CFRange keyRange = CFRangeMake(0, foundRange.location);
CFStringRef metadaKey = CFStringCreateWithSubstring(kCFAllocatorDefault,
token,
keyRange);
CFRange valueRange = CFRangeMake(foundRange.location + 2, CFStringGetLength(token) - keyRange.length - 3);
CFStringRef metadaValue = CFStringCreateWithSubstring(kCFAllocatorDefault,
token,
valueRange);
metadataMap[metadaKey] = metadaValue;
}
}
CFRelease(tokens);
CFRelease(metaData);
if (m_icyName) {
metadataMap[CFSTR("IcecastStationName")] = CFStringCreateCopy(kCFAllocatorDefault, m_icyName);
}
m_delegate->streamMetaDataAvailable(metadataMap);
}
m_icyMetaData.clear();
continue;
}
m_icyMetaData.push_back(buf[offset]);
continue;
}
// is this the interval byte?
if (m_icyMetaDataInterval > 0 && m_dataByteReadCount == m_icyMetaDataInterval) {
m_metaDataBytesRemaining = buf[offset] * 16;
if (m_metaDataBytesRemaining == 0) {
m_dataByteReadCount = 0;
}
continue;
}
// a data byte
m_dataByteReadCount++;
m_icyReadBuffer[i++] = buf[offset];
}
if (m_delegate && i > 0) {
m_delegate->streamHasBytesAvailable(m_icyReadBuffer, i);
}
}
#define TRY_ENCODING(STR,ENC) STR = CFStringCreateWithBytes(kCFAllocatorDefault, bytes, numBytes, ENC, false); \
if (STR != NULL) { return STR; }
CFStringRef HTTP_Stream::createMetaDataStringWithMostReasonableEncoding(const UInt8 *bytes, const CFIndex numBytes)
{
CFStringRef metaData;
TRY_ENCODING(metaData, kCFStringEncodingUTF8);
TRY_ENCODING(metaData, kCFStringEncodingISOLatin1);
TRY_ENCODING(metaData, kCFStringEncodingWindowsLatin1);
TRY_ENCODING(metaData, kCFStringEncodingNextStepLatin);
TRY_ENCODING(metaData, kCFStringEncodingISOLatin2);
TRY_ENCODING(metaData, kCFStringEncodingISOLatin3);
TRY_ENCODING(metaData, kCFStringEncodingISOLatin4);
TRY_ENCODING(metaData, kCFStringEncodingISOLatinCyrillic);
TRY_ENCODING(metaData, kCFStringEncodingISOLatinArabic);
TRY_ENCODING(metaData, kCFStringEncodingISOLatinGreek);
TRY_ENCODING(metaData, kCFStringEncodingISOLatinHebrew);
TRY_ENCODING(metaData, kCFStringEncodingISOLatin5);
TRY_ENCODING(metaData, kCFStringEncodingISOLatin6);
TRY_ENCODING(metaData, kCFStringEncodingISOLatinThai);
TRY_ENCODING(metaData, kCFStringEncodingISOLatin7);
TRY_ENCODING(metaData, kCFStringEncodingISOLatin8);
TRY_ENCODING(metaData, kCFStringEncodingISOLatin9);
TRY_ENCODING(metaData, kCFStringEncodingWindowsLatin2);
TRY_ENCODING(metaData, kCFStringEncodingWindowsCyrillic);
TRY_ENCODING(metaData, kCFStringEncodingWindowsGreek);
TRY_ENCODING(metaData, kCFStringEncodingWindowsLatin5);
TRY_ENCODING(metaData, kCFStringEncodingWindowsHebrew);
TRY_ENCODING(metaData, kCFStringEncodingWindowsArabic);
TRY_ENCODING(metaData, kCFStringEncodingKOI8_R);
TRY_ENCODING(metaData, kCFStringEncodingBig5);
TRY_ENCODING(metaData, kCFStringEncodingASCII);
return metaData;
}
#undef TRY_ENCODING
void HTTP_Stream::readCallBack(CFReadStreamRef stream, CFStreamEventType eventType, void *clientCallBackInfo)
{
HTTP_Stream *THIS = static_cast<HTTP_Stream*>(clientCallBackInfo);
Stream_Configuration *config = Stream_Configuration::configuration();
CFStringRef reportedNetworkError = NULL;
switch (eventType) {
case kCFStreamEventHasBytesAvailable: {
if (!THIS->m_httpReadBuffer) {
THIS->m_httpReadBuffer = new UInt8[config->httpConnectionBufferSize];
}
while (CFReadStreamHasBytesAvailable(stream)) {
if (!THIS->m_scheduledInRunLoop) {
/*
* This is critical - though the stream has data available,
* do not try to feed the audio queue with data, if it has
* indicated that it doesn't want more data due to buffers
* full.
*/
THIS->m_readPending = true;
break;
}
CFIndex bytesRead = CFReadStreamRead(stream, THIS->m_httpReadBuffer, config->httpConnectionBufferSize);
if (CFReadStreamGetStatus(stream) == kCFStreamStatusError ||
bytesRead < 0) {
if (THIS->contentLength() > 0) {
/*
* Try to recover gracefully if we have a non-continuous stream
*/
Input_Stream_Position currentPosition = THIS->position();
Input_Stream_Position recoveryPosition;
recoveryPosition.start = currentPosition.start + THIS->m_bytesRead;
recoveryPosition.end = THIS->contentLength();
HS_TRACE("Recovering HTTP stream, start %llu\n", recoveryPosition.start);
THIS->open(recoveryPosition);
break;
}
CFErrorRef streamError = CFReadStreamCopyError(stream);
if (streamError) {
CFStringRef errorDesc = CFErrorCopyDescription(streamError);
if (errorDesc) {
reportedNetworkError = CFStringCreateCopy(kCFAllocatorDefault, errorDesc);
CFRelease(errorDesc);
}
CFRelease(streamError);
}
if (THIS->m_delegate) {
THIS->m_delegate->streamErrorOccurred(reportedNetworkError);
if (reportedNetworkError) {
CFRelease(reportedNetworkError);
reportedNetworkError = NULL;
}
}
break;
}
if (bytesRead > 0) {
THIS->m_bytesRead += bytesRead;
HS_TRACE("Read %li bytes, total %llu\n", bytesRead, THIS->m_bytesRead);
THIS->parseHttpHeadersIfNeeded(THIS->m_httpReadBuffer, bytesRead);
#ifdef INCLUDE_ID3TAG_SUPPORT
if (!THIS->m_icyStream && THIS->m_id3Parser->wantData()) {
THIS->m_id3Parser->feedData(THIS->m_httpReadBuffer, (UInt32)bytesRead);
}
#endif
if (THIS->m_icyStream) {
HS_TRACE("Parsing ICY stream\n");
THIS->parseICYStream(THIS->m_httpReadBuffer, bytesRead);
} else {
if (THIS->m_delegate) {
HS_TRACE("Not an ICY stream; calling the delegate back\n");
THIS->m_delegate->streamHasBytesAvailable(THIS->m_httpReadBuffer, (UInt32)bytesRead);
}
}
}
}
if (reportedNetworkError) {
CFRelease(reportedNetworkError);
reportedNetworkError = NULL;
}
break;
}
case kCFStreamEventEndEncountered: {
// This should concerns only non-continous streams
if (THIS->m_bytesRead < THIS->contentLength()) {
HS_TRACE("End of stream, but we have read only %llu bytes on a total of %li. Missing: %llu\n", THIS->m_bytesRead, THIS->contentLength(), (THIS->contentLength() - THIS->m_bytesRead));
Input_Stream_Position currentPosition = THIS->position();
Input_Stream_Position recoveryPosition;
recoveryPosition.start = currentPosition.start + THIS->m_bytesRead;
recoveryPosition.end = THIS->contentLength();
HS_TRACE("Reopen for the end of the file from byte position: %llu\n", recoveryPosition.start);
THIS->close();
THIS->open(recoveryPosition);
break;
}
if (THIS->m_delegate) {
THIS->m_delegate->streamEndEncountered();
}
break;
}
case kCFStreamEventErrorOccurred: {
if (THIS->m_delegate) {
CFStringRef reportedNetworkError = NULL;
CFErrorRef streamError = CFReadStreamCopyError(stream);
if (streamError) {
CFStringRef errorDesc = CFErrorCopyDescription(streamError);
if (errorDesc) {
reportedNetworkError = CFStringCreateCopy(kCFAllocatorDefault, errorDesc);
CFRelease(errorDesc);
}
CFRelease(streamError);
}
THIS->m_delegate->streamErrorOccurred(reportedNetworkError);
if (reportedNetworkError) {
CFRelease(reportedNetworkError);
}
}
break;
}
}
}
} // namespace astreamer
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#ifndef ASTREAMER_HTTP_STREAM_H
#define ASTREAMER_HTTP_STREAM_H
#import <CFNetwork/CFNetwork.h>
#import <vector>
#import <map>
#import "input_stream.h"
#import "id3_parser.h"
namespace astreamer {
class HTTP_Stream : public Input_Stream {
private:
HTTP_Stream(const HTTP_Stream&);
HTTP_Stream& operator=(const HTTP_Stream&);
static CFStringRef httpRequestMethod;
static CFStringRef httpUserAgentHeader;
static CFStringRef httpRangeHeader;
static CFStringRef icyMetaDataHeader;
static CFStringRef icyMetaDataValue;
CFURLRef m_url;
CFReadStreamRef m_readStream;
bool m_scheduledInRunLoop;
bool m_readPending;
Input_Stream_Position m_position;
/* HTTP headers */
bool m_httpHeadersParsed;
CFStringRef m_contentType;
size_t m_contentLength;
UInt64 m_bytesRead;
/* ICY protocol */
bool m_icyStream;
bool m_icyHeaderCR;
bool m_icyHeadersRead;
bool m_icyHeadersParsed;
CFStringRef m_icyName;
std::vector<CFStringRef> m_icyHeaderLines;
size_t m_icyMetaDataInterval;
size_t m_dataByteReadCount;
size_t m_metaDataBytesRemaining;
std::vector<UInt8> m_icyMetaData;
/* Read buffers */
UInt8 *m_httpReadBuffer;
UInt8 *m_icyReadBuffer;
ID3_Parser *m_id3Parser;
CFReadStreamRef createReadStream(CFURLRef url);
void parseHttpHeadersIfNeeded(const UInt8 *buf, const CFIndex bufSize);
void parseICYStream(const UInt8 *buf, const CFIndex bufSize);
CFStringRef createMetaDataStringWithMostReasonableEncoding(const UInt8 *bytes, const CFIndex numBytes);
static void readCallBack(CFReadStreamRef stream, CFStreamEventType eventType, void *clientCallBackInfo);
public:
HTTP_Stream();
virtual ~HTTP_Stream();
Input_Stream_Position position();
CFStringRef contentType();
size_t contentLength();
bool open();
bool open(const Input_Stream_Position& position);
void close();
void setScheduledInRunLoop(bool scheduledInRunLoop);
void setUrl(CFURLRef url);
static bool canHandleUrl(CFURLRef url);
/* ID3_Parser_Delegate */
void id3metaDataAvailable(std::map<CFStringRef,CFStringRef> metaData);
void id3tagSizeAvailable(UInt32 tagSize);
};
} // namespace astreamer
#endif // ASTREAMER_HTTP_STREAM_H
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#include "id3_parser.h"
#include <vector>
//#define ID3_DEBUG 1
#if !defined ( ID3_DEBUG)
#define ID3_TRACE(...) do {} while (0)
#else
#define ID3_TRACE(...) printf(__VA_ARGS__)
#endif
namespace astreamer {
// Code from:
// http://www.opensource.apple.com/source/libsecurity_manifest/libsecurity_manifest-29384/lib/SecureDownloadInternal.c
// Returns a CFString containing the base64 representation of the data.
// boolean argument for whether to line wrap at 64 columns or not.
CFStringRef createBase64EncodedString(const UInt8* ptr, size_t len, int wrap) {
const char* alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz"
"0123456789+/=";
// base64 encoded data uses 4 ASCII characters to represent 3 octets.
// There can be up to two == at the end of the base64 data for padding.
// If we are line wrapping then we need space for one newline character
// every 64 characters of output.
// Rounded 4/3 up to 2 to avoid floating point math.
//CFIndex max_len = (2*len) + 2;
//if (wrap) len = len + ((2*len) / 64) + 1;
CFMutableStringRef string = CFStringCreateMutable(NULL, 0);
if (!string) return NULL;
/*
http://www.faqs.org/rfcs/rfc3548.html
+--first octet--+-second octet--+--third octet--+
|7 6 5 4 3 2 1 0|7 6 5 4 3 2 1 0|7 6 5 4 3 2 1 0|
+-----------+---+-------+-------+---+-----------+
|5 4 3 2 1 0|5 4 3 2 1 0|5 4 3 2 1 0|5 4 3 2 1 0|
+--1.index--+--2.index--+--3.index--+--4.index--+
*/
int i = 0; // octet offset into input data
int column = 0; // output column number (used for line wrapping)
for (;;) {
UniChar c[16]; // buffer of characters to add to output
int j = 0; // offset to place next character in buffer
int index; // index into output alphabet
#define ADDCHAR(_X_) do { c[j++] = _X_; if (wrap && (++column == 64)) { column = 0; c[j++] = '\n'; } } while (0);
// 1.index
index = (ptr[i] >> 2) & 0x3F;
ADDCHAR(alphabet[index]);
// 2.index
index = (ptr[i] << 4) & 0x30;
if ((i+1) < len) {
index = index | ((ptr[i+1] >> 4) & 0x0F);
ADDCHAR(alphabet[index]);
} else { // end of input, pad as necessary
ADDCHAR(alphabet[index]);
ADDCHAR('=');
ADDCHAR('=');
}
// 3.index
if ((i+1) < len) {
index = (ptr[i+1] << 2) & 0x3C;
if ((i+2) < len) {
index = index | ((ptr[i+2] >> 6) & 0x03);
ADDCHAR(alphabet[index]);
} else { // end of input, pad as necessary
ADDCHAR(alphabet[index]);
ADDCHAR('=');
}
}
// 4.index
if ((i+2) < len) {
index = (ptr[i+2]) & 0x3F;
ADDCHAR(alphabet[index]);
}
CFStringAppendCharacters(string, c, j);
i += 3; // we processed 3 bytes of input
if (i >= len) {
// end of data, append newline if we haven't already
if (wrap && c[j-1] != '\n') {
c[0] = '\n';
CFStringAppendCharacters(string, c, 1);
}
break;
}
}
return string;
}
enum ID3_Parser_State {
ID3_Parser_State_Initial = 0,
ID3_Parser_State_Parse_Frames,
ID3_Parser_State_Tag_Parsed,
ID3_Parser_State_Not_Valid_Tag
};
/*
* =======================================
* Private class
* =======================================
*/
class ID3_Parser_Private {
public:
ID3_Parser_Private();
~ID3_Parser_Private();
bool wantData();
void feedData(UInt8 *data, UInt32 numBytes);
void setState(ID3_Parser_State state);
void reset();
CFStringRef parseContent(UInt32 framesize, UInt32 pos, CFStringEncoding encoding, bool byteOrderMark);
ID3_Parser *m_parser;
ID3_Parser_State m_state;
UInt32 m_bytesReceived;
UInt32 m_tagSize;
UInt8 m_majorVersion;
bool m_hasFooter;
bool m_usesUnsynchronisation;
bool m_usesExtendedHeader;
CFStringRef m_title;
CFStringRef m_performer;
CFStringRef m_coverArt;
std::vector<UInt8> m_tagData;
};
/*
* =======================================
* Private class implementation
* =======================================
*/
ID3_Parser_Private::ID3_Parser_Private() :
m_parser(0),
m_state(ID3_Parser_State_Initial),
m_bytesReceived(0),
m_tagSize(0),
m_majorVersion(0),
m_hasFooter(false),
m_usesUnsynchronisation(false),
m_usesExtendedHeader(false),
m_title(NULL),
m_performer(NULL),
m_coverArt(NULL)
{
}
ID3_Parser_Private::~ID3_Parser_Private()
{
if (m_performer) {
CFRelease(m_performer);
m_performer = NULL;
}
if (m_title) {
CFRelease(m_title);
m_title = NULL;
}
if (m_coverArt) {
CFRelease(m_coverArt);
m_coverArt = NULL;
}
}
bool ID3_Parser_Private::wantData()
{
if (m_state == ID3_Parser_State_Tag_Parsed) {
return false;
}
if (m_state == ID3_Parser_State_Not_Valid_Tag) {
return false;
}
return true;
}
void ID3_Parser_Private::feedData(UInt8 *data, UInt32 numBytes)
{
if (!wantData()) {
return;
}
m_bytesReceived += numBytes;
ID3_TRACE("received %i bytes, total bytes %i\n", numBytes, m_bytesReceived);
for (CFIndex i=0; i < numBytes; i++) {
m_tagData.push_back(data[i]);
}
bool enoughBytesToParse = true;
while (enoughBytesToParse) {
switch (m_state) {
case ID3_Parser_State_Initial: {
// Do we have enough bytes to determine if this is an ID3 tag or not?
if (m_bytesReceived <= 9) {
enoughBytesToParse = false;
break;
}
if (!(m_tagData[0] == 'I' &&
m_tagData[1] == 'D' &&
m_tagData[2] == '3')) {
ID3_TRACE("Not an ID3 tag, bailing out\n");
// Does not begin with the tag header; not an ID3 tag
setState(ID3_Parser_State_Not_Valid_Tag);
enoughBytesToParse = false;
break;
}
m_majorVersion = m_tagData[3];
// Currently support only id3v2.2 and 2.3
if (m_majorVersion != 2 && m_majorVersion != 3) {
ID3_TRACE("ID3v2.%i not supported by the parser\n", m_majorVersion);
setState(ID3_Parser_State_Not_Valid_Tag);
enoughBytesToParse = false;
break;
}
// Ignore the revision
// Parse the flags
if ((m_tagData[5] & 0x80) != 0) {
m_usesUnsynchronisation = true;
} else if ((m_tagData[5] & 0x40) != 0 && m_majorVersion >= 3) {
m_usesExtendedHeader = true;
} else if ((m_tagData[5] & 0x10) != 0 && m_majorVersion >= 3) {
m_hasFooter = true;
}
m_tagSize = ((m_tagData[6] & 0x7F) << 21) | ((m_tagData[7] & 0x7F) << 14) |
((m_tagData[8] & 0x7F) << 7) | (m_tagData[9] & 0x7F);
if (m_tagSize > 0) {
if (m_hasFooter) {
m_tagSize += 10;
}
m_tagSize += 10;
ID3_TRACE("tag size: %i\n", m_tagSize);
if (m_parser->m_delegate) {
m_parser->m_delegate->id3tagSizeAvailable(m_tagSize);
}
setState(ID3_Parser_State_Parse_Frames);
break;
}
setState(ID3_Parser_State_Not_Valid_Tag);
enoughBytesToParse = false;
break;
}
case ID3_Parser_State_Parse_Frames: {
// Do we have enough data to parse the frames?
if (m_tagData.size() < m_tagSize) {
ID3_TRACE("Not enough data received for parsing, have %lu bytes, need %i bytes\n",
m_tagData.size(),
m_tagSize);
enoughBytesToParse = false;
break;
}
UInt32 pos = 10;
// Do we have an extended header? If we do, skip it
if (m_usesExtendedHeader) {
UInt32 extendedHeaderSize = ((m_tagData[pos] << 21) |
(m_tagData[pos+1] << 14) |
(m_tagData[pos+2] << 7) |
m_tagData[pos+3]);
if (pos + extendedHeaderSize >= m_tagSize) {
setState(ID3_Parser_State_Not_Valid_Tag);
enoughBytesToParse = false;
break;
}
ID3_TRACE("Skipping extended header, size %i\n", extendedHeaderSize);
pos += extendedHeaderSize;
}
while (pos < m_tagSize) {
char frameName[5];
frameName[0] = m_tagData[pos];
frameName[1] = m_tagData[pos+1];
frameName[2] = m_tagData[pos+2];
if (m_majorVersion >= 3) {
frameName[3] = m_tagData[pos+3];
} else {
frameName[3] = 0;
}
frameName[4] = 0;
UInt32 framesize = 0;
if (m_majorVersion >= 3) {
pos += 4;
framesize = ((m_tagData[pos] << 21) |
(m_tagData[pos+1] << 14) |
(m_tagData[pos+2] << 7) |
m_tagData[pos+3]);
} else {
pos += 3;
framesize = ((m_tagData[pos] << 16) |
(m_tagData[pos+1] << 8) |
m_tagData[pos+2]);
}
if (framesize == 0) {
setState(ID3_Parser_State_Not_Valid_Tag);
enoughBytesToParse = false;
// Break from the loop and then out of the case context
goto ParseFramesExit;
}
if (m_majorVersion >= 3) {
pos += 6;
} else {
pos += 3;
}
CFStringEncoding encoding;
bool byteOrderMark = false;
if (m_tagData[pos] == 3) {
encoding = kCFStringEncodingUTF8;
} else if (m_tagData[pos] == 2) {
encoding = kCFStringEncodingUTF16BE;
} else if (m_tagData[pos] == 1) {
encoding = kCFStringEncodingUTF16;
byteOrderMark = true;
} else {
// ISO-8859-1 is the default encoding
encoding = kCFStringEncodingISOLatin1;
}
if (!strcmp(frameName, "TIT2") || !strcmp(frameName, "TT2")) {
if (m_title) {
CFRelease(m_title);
}
m_title = parseContent(framesize, pos + 1, encoding, byteOrderMark);
ID3_TRACE("ID3 title parsed: '%s'\n", CFStringGetCStringPtr(m_title, CFStringGetSystemEncoding()));
} else if (!strcmp(frameName, "TPE1") || !strcmp(frameName, "TP1")) {
if (m_performer) {
CFRelease(m_performer);
}
m_performer = parseContent(framesize, pos + 1, encoding, byteOrderMark);
ID3_TRACE("ID3 performer parsed: '%s'\n", CFStringGetCStringPtr(m_performer, CFStringGetSystemEncoding()));
} else if (!strcmp(frameName, "APIC")) {
char imageType[65] = {0};
size_t dataPos = pos+1;
for (int i=0; m_tagData[dataPos]; i++,dataPos++) {
imageType[i] = m_tagData[dataPos];
}
dataPos++;
if (!strcmp(imageType, "image/jpeg") ||
!strcmp(imageType, "image/png")) {
ID3_TRACE("Image type %s, parsing, dataPos %zu\n", imageType, dataPos);
// Skip the image description
while (!m_tagData[++dataPos]);
const size_t coverArtSize = framesize - ((dataPos - pos) + 5);
UInt8 *bytes = new UInt8[coverArtSize];
for (int i=0; i < coverArtSize; i++) {
bytes[i] = m_tagData[dataPos+i];
}
if (m_coverArt) {
CFRelease(m_coverArt);
}
m_coverArt = createBase64EncodedString(bytes, coverArtSize, 0);
delete [] bytes;
} else {
ID3_TRACE("%s is an unknown type for image data, skipping\n", imageType);
}
} else {
// Unknown/unhandled frame
ID3_TRACE("Unknown/unhandled frame: %s, size %i\n", frameName, framesize);
}
pos += framesize;
}
// Push out the metadata
if (m_parser->m_delegate) {
std::map<CFStringRef,CFStringRef> metadataMap;
if (m_performer && CFStringGetLength(m_performer) > 0) {
metadataMap[CFSTR("MPMediaItemPropertyArtist")] =
CFStringCreateCopy(kCFAllocatorDefault, m_performer);
}
if (m_title && CFStringGetLength(m_title) > 0) {
metadataMap[CFSTR("MPMediaItemPropertyTitle")] =
CFStringCreateCopy(kCFAllocatorDefault, m_title);
}
if (m_coverArt && CFStringGetLength(m_coverArt) > 0) {
metadataMap[CFSTR("CoverArt")] =
CFStringCreateCopy(kCFAllocatorDefault, m_coverArt);
}
m_parser->m_delegate->id3metaDataAvailable(metadataMap);
}
setState(ID3_Parser_State_Tag_Parsed);
enoughBytesToParse = false;
ParseFramesExit:
break;
}
default:
enoughBytesToParse = false;
break;
}
}
}
void ID3_Parser_Private::setState(astreamer::ID3_Parser_State state)
{
m_state = state;
}
void ID3_Parser_Private::reset()
{
m_state = ID3_Parser_State_Initial;
m_bytesReceived = 0;
m_tagSize = 0;
m_majorVersion = 0;
m_hasFooter = false;
m_usesUnsynchronisation = false;
m_usesExtendedHeader = false;
if (m_title) {
CFRelease(m_title);
m_title = NULL;
}
if (m_performer) {
CFRelease(m_performer);
m_performer = NULL;
}
if (m_coverArt) {
CFRelease(m_coverArt);
m_coverArt = NULL;
}
m_tagData.clear();
}
CFStringRef ID3_Parser_Private::parseContent(UInt32 framesize, UInt32 pos, CFStringEncoding encoding, bool byteOrderMark)
{
CFStringRef content = CFStringCreateWithBytes(kCFAllocatorDefault,
&m_tagData[pos],
framesize - 1,
encoding,
byteOrderMark);
return content;
}
/*
* =======================================
* ID3_Parser implementation
* =======================================
*/
ID3_Parser::ID3_Parser() :
m_delegate(0),
m_private(new ID3_Parser_Private())
{
m_private->m_parser = this;
}
ID3_Parser::~ID3_Parser()
{
delete m_private;
m_private = 0;
}
void ID3_Parser::reset()
{
m_private->reset();
}
bool ID3_Parser::wantData()
{
return m_private->wantData();
}
void ID3_Parser::feedData(UInt8 *data, UInt32 numBytes)
{
m_private->feedData(data, numBytes);
}
}
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#ifndef ASTREAMER_ID3_PARSER_H
#define ASTREAMER_ID3_PARSER_H
#include <map>
#import <CFNetwork/CFNetwork.h>
namespace astreamer {
class ID3_Parser_Delegate;
class ID3_Parser_Private;
class ID3_Parser {
public:
ID3_Parser();
~ID3_Parser();
void reset();
bool wantData();
void feedData(UInt8 *data, UInt32 numBytes);
ID3_Parser_Delegate *m_delegate;
private:
ID3_Parser_Private *m_private;
};
class ID3_Parser_Delegate {
public:
virtual void id3metaDataAvailable(std::map<CFStringRef,CFStringRef> metaData) = 0;
virtual void id3tagSizeAvailable(UInt32 tagSize) = 0;
};
} // namespace astreamer
#endif // ASTREAMER_ID3_PARSER_H
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#include "input_stream.h"
namespace astreamer {
Input_Stream::Input_Stream() : m_delegate(0)
{
}
Input_Stream::~Input_Stream()
{
}
}
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#ifndef ASTREAMER_INPUT_STREAM_H
#define ASTREAMER_INPUT_STREAM_H
#import "id3_parser.h"
namespace astreamer {
class Input_Stream_Delegate;
struct Input_Stream_Position {
UInt64 start;
UInt64 end;
};
class Input_Stream : public ID3_Parser_Delegate {
public:
Input_Stream();
virtual ~Input_Stream();
Input_Stream_Delegate* m_delegate;
virtual Input_Stream_Position position() = 0;
virtual CFStringRef contentType() = 0;
virtual size_t contentLength() = 0;
virtual bool open() = 0;
virtual bool open(const Input_Stream_Position& position) = 0;
virtual void close() = 0;
virtual void setScheduledInRunLoop(bool scheduledInRunLoop) = 0;
virtual void setUrl(CFURLRef url) = 0;
};
class Input_Stream_Delegate {
public:
virtual void streamIsReadyRead() = 0;
virtual void streamHasBytesAvailable(UInt8 *data, UInt32 numBytes) = 0;
virtual void streamEndEncountered() = 0;
virtual void streamErrorOccurred(CFStringRef errorDesc) = 0;
virtual void streamMetaDataAvailable(std::map<CFStringRef,CFStringRef> metaData) = 0;
virtual void streamMetaDataByteSizeAvailable(UInt32 sizeInBytes) = 0;
};
} // namespace astreamer
#endif // ASTREAMER_INPUT_STREAM_H
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#include "stream_configuration.h"
namespace astreamer {
Stream_Configuration::Stream_Configuration() :
userAgent(NULL),
cacheDirectory(NULL),
predefinedHttpHeaderValues(NULL)
{
}
Stream_Configuration::~Stream_Configuration()
{
if (userAgent) {
CFRelease(userAgent);
userAgent = NULL;
}
}
Stream_Configuration* Stream_Configuration::configuration()
{
static Stream_Configuration config;
return &config;
}
}
/*
* This file is part of the FreeStreamer project,
* (C)Copyright 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
* See the file ''LICENSE'' for using the code.
*
* https://github.com/muhku/FreeStreamer
*/
#ifndef ASTREAMER_STREAM_CONFIGURATION_H
#define ASTREAMER_STREAM_CONFIGURATION_H
#import <CoreFoundation/CoreFoundation.h>
namespace astreamer {
struct Stream_Configuration {
unsigned bufferCount;
unsigned bufferSize;
unsigned maxPacketDescs;
unsigned httpConnectionBufferSize;
double outputSampleRate;
long outputNumChannels;
int bounceInterval;
int maxBounceCount;
int startupWatchdogPeriod;
int maxPrebufferedByteCount;
bool usePrebufferSizeCalculationInSeconds;
bool usePrebufferSizeCalculationInPackets;
int requiredInitialPrebufferedByteCountForContinuousStream;
int requiredInitialPrebufferedByteCountForNonContinuousStream;
int requiredPrebufferSizeInSeconds;
int requiredInitialPrebufferedPacketCount;
CFStringRef userAgent;
CFStringRef cacheDirectory;
CFDictionaryRef predefinedHttpHeaderValues;
bool cacheEnabled;
bool seekingFromCacheEnabled;
bool automaticAudioSessionHandlingEnabled;
bool enableTimeAndPitchConversion;
bool requireStrictContentTypeChecking;
int maxDiskCacheSize;
static Stream_Configuration *configuration();
private:
Stream_Configuration();
~Stream_Configuration();
Stream_Configuration(const Stream_Configuration&);
Stream_Configuration& operator=(const Stream_Configuration&);
};
} // namespace astreamer
#endif // ASTREAMER_STREAM_CONFIGURATION_H
Copyright (c) 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The FreeStreamer framework bundles Reachability which is licensed under the following
license:
Copyright (c) 2011, Tony Million.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
FreeStreamer
====================
A streaming audio player for iOS and OS X.
Features
====================
- **CPU-friendly** design (uses 1% of CPU on average when streaming)
- **Multiple protocols supported**: ShoutCast, standard HTTP, local files
- **Prepared for tough network conditions**: adjustable buffer sizes, stream pre-buffering and restart on failures
- **Metadata support**: ShoutCast metadata, IDv2 tags
- **Local disk caching**: user only needs to stream a file once and after that it can be played from a local cache
- **Preloading**: playback can start immediately without needing to wait for buffering
- **Record**: support recording the stream contents to a file
- **Access the PCM audio samples**: as an example, a visualizer is included
Documentation
====================
See the [FAQ](https://github.com/muhku/FreeStreamer/wiki/FreeStreamer-FAQ) (Frequently Asked Questions) in the wiki. We also have an [API documentation](http://muhku.github.io/api/) available. The [usage instructions](https://github.com/muhku/FreeStreamer/wiki/Using-the-player-in-your-own-project) are also covered in the wiki.
Is somebody using this in real life?
====================
The short answer is yes! Check out our [website](http://muhku.github.io/) for the reference applications.
Reporting bugs and contributing
====================
For code contributions and other questions, it is preferrable to create a Github pull request. I don't have time for private email support, so usually the best way to get help is to interact with Github issues.
License
====================
See [LICENSE.txt](https://github.com/muhku/FreeStreamer/blob/master/LICENSE.txt) for the license.
Donations
====================
It is possible to use [PayPal](http://muhku.github.io/donate.html) for donations.
......@@ -20,10 +20,13 @@ PODS:
- DKNightVersion/UIKit (2.4.3):
- DKNightVersion/Core
- DOUAudioStreamer (0.2.16)
- FreeStreamer (4.0.0):
- Reachability (~> 3.0)
- lottie-ios (2.5.3)
- Masonry (1.1.0)
- MBProgressHUD (1.2.0)
- MJRefresh (3.7.5)
- Reachability (3.2)
- YTKNetwork (3.0.6):
- AFNetworking/NSURLSession (~> 4.0)
- YYCache (1.0.4)
......@@ -40,6 +43,7 @@ PODS:
DEPENDENCIES:
- DKNightVersion (~> 2.4.3)
- DOUAudioStreamer (~> 0.2.16)
- FreeStreamer (~> 4.0.0)
- lottie-ios (~> 2.5.3)
- Masonry (~> 1.1.0)
- MBProgressHUD (~> 1.2.0)
......@@ -54,10 +58,12 @@ SPEC REPOS:
- AFNetworking
- DKNightVersion
- DOUAudioStreamer
- FreeStreamer
- lottie-ios
- Masonry
- MBProgressHUD
- MJRefresh
- Reachability
- YTKNetwork
- YYCache
- YYImage
......@@ -68,16 +74,18 @@ SPEC CHECKSUMS:
AFNetworking: 7864c38297c79aaca1500c33288e429c3451fdce
DKNightVersion: eaa80cc4014b4bae7d4b535fd87ecc6a3c2767b3
DOUAudioStreamer: c503ba2ecb9a54ff7bda0eff66963ad224f3c7dc
FreeStreamer: 7e9c976045701ac2f7e9c14c17245203c37bf2ea
lottie-ios: a50d5c0160425cd4b01b852bb9578963e6d92d31
Masonry: 678fab65091a9290e40e2832a55e7ab731aad201
MBProgressHUD: 3ee5efcc380f6a79a7cc9b363dd669c5e1ae7406
MJRefresh: fdf5e979eb406a0341468932d1dfc8b7f9fce961
Reachability: 33e18b67625424e47b6cde6d202dce689ad7af96
YTKNetwork: c16be90b06be003de9e9cd0d3b187cc8eaf35c04
YYCache: 8105b6638f5e849296c71f331ff83891a4942952
YYImage: 1e1b62a9997399593e4b9c4ecfbbabbf1d3f3b54
YYModel: 2a7fdd96aaa4b86a824e26d0c517de8928c04b30
YYWebImage: 5f7f36aee2ae293f016d418c7d6ba05c4863e928
PODFILE CHECKSUM: 5f273d0f03f58db41d7f0a6d3d96a8bd054ab744
PODFILE CHECKSUM: d78d9f7fd55a2a7be3fae24d212bdd5eab78666c
COCOAPODS: 1.11.3
Copyright (c) 2011-2013, Tony Million.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
[![Reference Status](https://www.versioneye.com/objective-c/reachability/reference_badge.svg?style=flat)](https://www.versioneye.com/objective-c/reachability/references)
# Reachability
This is a drop-in replacement for Apple's `Reachability` class. It is ARC-compatible, and it uses the new GCD methods to notify of network interface changes.
In addition to the standard `NSNotification`, it supports the use of blocks for when the network becomes reachable and unreachable.
Finally, you can specify whether a WWAN connection is considered "reachable".
*DO NOT OPEN BUGS UNTIL YOU HAVE TESTED ON DEVICE*
## Requirements
Once you have added the `.h/m` files to your project, simply:
* Go to the `Project->TARGETS->Build Phases->Link Binary With Libraries`.
* Press the plus in the lower left of the list.
* Add `SystemConfiguration.framework`.
Boom, you're done.
## Examples
### Block Example
This sample uses blocks to notify when the interface state has changed. The blocks will be called on a **BACKGROUND THREAD**, so you need to dispatch UI updates onto the main thread.
// Allocate a reachability object
Reachability* reach = [Reachability reachabilityWithHostname:@"www.google.com"];
// Set the blocks
reach.reachableBlock = ^(Reachability*reach)
{
// keep in mind this is called on a background thread
// and if you are updating the UI it needs to happen
// on the main thread, like this:
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"REACHABLE!");
});
};
reach.unreachableBlock = ^(Reachability*reach)
{
NSLog(@"UNREACHABLE!");
};
// Start the notifier, which will cause the reachability object to retain itself!
[reach startNotifier];
### `NSNotification` Example
This sample will use `NSNotification`s to notify when the interface has changed. They will be delivered on the **MAIN THREAD**, so you *can* do UI updates from within the function.
In addition, it asks the `Reachability` object to consider the WWAN (3G/EDGE/CDMA) as a non-reachable connection (you might use this if you are writing a video streaming app, for example, to save the user's data plan).
// Allocate a reachability object
Reachability* reach = [Reachability reachabilityWithHostname:@"www.google.com"];
// Tell the reachability that we DON'T want to be reachable on 3G/EDGE/CDMA
reach.reachableOnWWAN = NO;
// Here we set up a NSNotification observer. The Reachability that caused the notification
// is passed in the object parameter
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(reachabilityChanged:)
name:kReachabilityChangedNotification
object:nil];
[reach startNotifier];
## Tell the world
Head over to [Projects using Reachability](https://github.com/tonymillion/Reachability/wiki/Projects-using-Reachability) and add your project for "Maximum Wins!".
/*
Copyright (c) 2011, Tony Million.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
#import <Foundation/Foundation.h>
#import <SystemConfiguration/SystemConfiguration.h>
/**
* Create NS_ENUM macro if it does not exist on the targeted version of iOS or OS X.
*
* @see http://nshipster.com/ns_enum-ns_options/
**/
#ifndef NS_ENUM
#define NS_ENUM(_type, _name) enum _name : _type _name; enum _name : _type
#endif
extern NSString *const kReachabilityChangedNotification;
typedef NS_ENUM(NSInteger, NetworkStatus) {
// Apple NetworkStatus Compatible Names.
NotReachable = 0,
ReachableViaWiFi = 2,
ReachableViaWWAN = 1
};
@class Reachability;
typedef void (^NetworkReachable)(Reachability * reachability);
typedef void (^NetworkUnreachable)(Reachability * reachability);
@interface Reachability : NSObject
@property (nonatomic, copy) NetworkReachable reachableBlock;
@property (nonatomic, copy) NetworkUnreachable unreachableBlock;
@property (nonatomic, assign) BOOL reachableOnWWAN;
+(Reachability*)reachabilityWithHostname:(NSString*)hostname;
// This is identical to the function above, but is here to maintain
//compatibility with Apples original code. (see .m)
+(Reachability*)reachabilityWithHostName:(NSString*)hostname;
+(Reachability*)reachabilityForInternetConnection;
+(Reachability*)reachabilityWithAddress:(void *)hostAddress;
+(Reachability*)reachabilityForLocalWiFi;
-(Reachability *)initWithReachabilityRef:(SCNetworkReachabilityRef)ref;
-(BOOL)startNotifier;
-(void)stopNotifier;
-(BOOL)isReachable;
-(BOOL)isReachableViaWWAN;
-(BOOL)isReachableViaWiFi;
// WWAN may be available, but not active until a connection has been established.
// WiFi may require a connection for VPN on Demand.
-(BOOL)isConnectionRequired; // Identical DDG variant.
-(BOOL)connectionRequired; // Apple's routine.
// Dynamic, on demand connection?
-(BOOL)isConnectionOnDemand;
// Is user intervention required?
-(BOOL)isInterventionRequired;
-(NetworkStatus)currentReachabilityStatus;
-(SCNetworkReachabilityFlags)reachabilityFlags;
-(NSString*)currentReachabilityString;
-(NSString*)currentReachabilityFlags;
@end
/*
Copyright (c) 2011, Tony Million.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
#import "Reachability.h"
#import <sys/socket.h>
#import <netinet/in.h>
#import <netinet6/in6.h>
#import <arpa/inet.h>
#import <ifaddrs.h>
#import <netdb.h>
NSString *const kReachabilityChangedNotification = @"kReachabilityChangedNotification";
@interface Reachability ()
@property (nonatomic, assign) SCNetworkReachabilityRef reachabilityRef;
@property (nonatomic, strong) dispatch_queue_t reachabilitySerialQueue;
@property (nonatomic, strong) id reachabilityObject;
-(void)reachabilityChanged:(SCNetworkReachabilityFlags)flags;
-(BOOL)isReachableWithFlags:(SCNetworkReachabilityFlags)flags;
@end
static NSString *reachabilityFlags(SCNetworkReachabilityFlags flags)
{
return [NSString stringWithFormat:@"%c%c %c%c%c%c%c%c%c",
#if TARGET_OS_IPHONE
(flags & kSCNetworkReachabilityFlagsIsWWAN) ? 'W' : '-',
#else
'X',
#endif
(flags & kSCNetworkReachabilityFlagsReachable) ? 'R' : '-',
(flags & kSCNetworkReachabilityFlagsConnectionRequired) ? 'c' : '-',
(flags & kSCNetworkReachabilityFlagsTransientConnection) ? 't' : '-',
(flags & kSCNetworkReachabilityFlagsInterventionRequired) ? 'i' : '-',
(flags & kSCNetworkReachabilityFlagsConnectionOnTraffic) ? 'C' : '-',
(flags & kSCNetworkReachabilityFlagsConnectionOnDemand) ? 'D' : '-',
(flags & kSCNetworkReachabilityFlagsIsLocalAddress) ? 'l' : '-',
(flags & kSCNetworkReachabilityFlagsIsDirect) ? 'd' : '-'];
}
// Start listening for reachability notifications on the current run loop
static void TMReachabilityCallback(SCNetworkReachabilityRef target, SCNetworkReachabilityFlags flags, void* info)
{
#pragma unused (target)
Reachability *reachability = ((__bridge Reachability*)info);
// We probably don't need an autoreleasepool here, as GCD docs state each queue has its own autorelease pool,
// but what the heck eh?
@autoreleasepool
{
[reachability reachabilityChanged:flags];
}
}
@implementation Reachability
#pragma mark - Class Constructor Methods
+(Reachability*)reachabilityWithHostName:(NSString*)hostname
{
return [Reachability reachabilityWithHostname:hostname];
}
+(Reachability*)reachabilityWithHostname:(NSString*)hostname
{
SCNetworkReachabilityRef ref = SCNetworkReachabilityCreateWithName(NULL, [hostname UTF8String]);
if (ref)
{
id reachability = [[self alloc] initWithReachabilityRef:ref];
return reachability;
}
return nil;
}
+(Reachability *)reachabilityWithAddress:(void *)hostAddress
{
SCNetworkReachabilityRef ref = SCNetworkReachabilityCreateWithAddress(kCFAllocatorDefault, (const struct sockaddr*)hostAddress);
if (ref)
{
id reachability = [[self alloc] initWithReachabilityRef:ref];
return reachability;
}
return nil;
}
+(Reachability *)reachabilityForInternetConnection
{
struct sockaddr_in zeroAddress;
bzero(&zeroAddress, sizeof(zeroAddress));
zeroAddress.sin_len = sizeof(zeroAddress);
zeroAddress.sin_family = AF_INET;
return [self reachabilityWithAddress:&zeroAddress];
}
+(Reachability*)reachabilityForLocalWiFi
{
struct sockaddr_in localWifiAddress;
bzero(&localWifiAddress, sizeof(localWifiAddress));
localWifiAddress.sin_len = sizeof(localWifiAddress);
localWifiAddress.sin_family = AF_INET;
// IN_LINKLOCALNETNUM is defined in <netinet/in.h> as 169.254.0.0
localWifiAddress.sin_addr.s_addr = htonl(IN_LINKLOCALNETNUM);
return [self reachabilityWithAddress:&localWifiAddress];
}
// Initialization methods
-(Reachability *)initWithReachabilityRef:(SCNetworkReachabilityRef)ref
{
self = [super init];
if (self != nil)
{
self.reachableOnWWAN = YES;
self.reachabilityRef = ref;
// We need to create a serial queue.
// We allocate this once for the lifetime of the notifier.
self.reachabilitySerialQueue = dispatch_queue_create("com.tonymillion.reachability", NULL);
}
return self;
}
-(void)dealloc
{
[self stopNotifier];
if(self.reachabilityRef)
{
CFRelease(self.reachabilityRef);
self.reachabilityRef = nil;
}
self.reachableBlock = nil;
self.unreachableBlock = nil;
self.reachabilitySerialQueue = nil;
}
#pragma mark - Notifier Methods
// Notifier
// NOTE: This uses GCD to trigger the blocks - they *WILL NOT* be called on THE MAIN THREAD
// - In other words DO NOT DO ANY UI UPDATES IN THE BLOCKS.
// INSTEAD USE dispatch_async(dispatch_get_main_queue(), ^{UISTUFF}) (or dispatch_sync if you want)
-(BOOL)startNotifier
{
// allow start notifier to be called multiple times
if(self.reachabilityObject && (self.reachabilityObject == self))
{
return YES;
}
SCNetworkReachabilityContext context = { 0, NULL, NULL, NULL, NULL };
context.info = (__bridge void *)self;
if(SCNetworkReachabilitySetCallback(self.reachabilityRef, TMReachabilityCallback, &context))
{
// Set it as our reachability queue, which will retain the queue
if(SCNetworkReachabilitySetDispatchQueue(self.reachabilityRef, self.reachabilitySerialQueue))
{
// this should do a retain on ourself, so as long as we're in notifier mode we shouldn't disappear out from under ourselves
// woah
self.reachabilityObject = self;
return YES;
}
else
{
#ifdef DEBUG
NSLog(@"SCNetworkReachabilitySetDispatchQueue() failed: %s", SCErrorString(SCError()));
#endif
// UH OH - FAILURE - stop any callbacks!
SCNetworkReachabilitySetCallback(self.reachabilityRef, NULL, NULL);
}
}
else
{
#ifdef DEBUG
NSLog(@"SCNetworkReachabilitySetCallback() failed: %s", SCErrorString(SCError()));
#endif
}
// if we get here we fail at the internet
self.reachabilityObject = nil;
return NO;
}
-(void)stopNotifier
{
// First stop, any callbacks!
SCNetworkReachabilitySetCallback(self.reachabilityRef, NULL, NULL);
// Unregister target from the GCD serial dispatch queue.
SCNetworkReachabilitySetDispatchQueue(self.reachabilityRef, NULL);
self.reachabilityObject = nil;
}
#pragma mark - reachability tests
// This is for the case where you flick the airplane mode;
// you end up getting something like this:
//Reachability: WR ct-----
//Reachability: -- -------
//Reachability: WR ct-----
//Reachability: -- -------
// We treat this as 4 UNREACHABLE triggers - really apple should do better than this
#define testcase (kSCNetworkReachabilityFlagsConnectionRequired | kSCNetworkReachabilityFlagsTransientConnection)
-(BOOL)isReachableWithFlags:(SCNetworkReachabilityFlags)flags
{
BOOL connectionUP = YES;
if(!(flags & kSCNetworkReachabilityFlagsReachable))
connectionUP = NO;
if( (flags & testcase) == testcase )
connectionUP = NO;
#if TARGET_OS_IPHONE
if(flags & kSCNetworkReachabilityFlagsIsWWAN)
{
// We're on 3G.
if(!self.reachableOnWWAN)
{
// We don't want to connect when on 3G.
connectionUP = NO;
}
}
#endif
return connectionUP;
}
-(BOOL)isReachable
{
SCNetworkReachabilityFlags flags;
if(!SCNetworkReachabilityGetFlags(self.reachabilityRef, &flags))
return NO;
return [self isReachableWithFlags:flags];
}
-(BOOL)isReachableViaWWAN
{
#if TARGET_OS_IPHONE
SCNetworkReachabilityFlags flags = 0;
if(SCNetworkReachabilityGetFlags(self.reachabilityRef, &flags))
{
// Check we're REACHABLE
if(flags & kSCNetworkReachabilityFlagsReachable)
{
// Now, check we're on WWAN
if(flags & kSCNetworkReachabilityFlagsIsWWAN)
{
return YES;
}
}
}
#endif
return NO;
}
-(BOOL)isReachableViaWiFi
{
SCNetworkReachabilityFlags flags = 0;
if(SCNetworkReachabilityGetFlags(self.reachabilityRef, &flags))
{
// Check we're reachable
if((flags & kSCNetworkReachabilityFlagsReachable))
{
#if TARGET_OS_IPHONE
// Check we're NOT on WWAN
if((flags & kSCNetworkReachabilityFlagsIsWWAN))
{
return NO;
}
#endif
return YES;
}
}
return NO;
}
// WWAN may be available, but not active until a connection has been established.
// WiFi may require a connection for VPN on Demand.
-(BOOL)isConnectionRequired
{
return [self connectionRequired];
}
-(BOOL)connectionRequired
{
SCNetworkReachabilityFlags flags;
if(SCNetworkReachabilityGetFlags(self.reachabilityRef, &flags))
{
return (flags & kSCNetworkReachabilityFlagsConnectionRequired);
}
return NO;
}
// Dynamic, on demand connection?
-(BOOL)isConnectionOnDemand
{
SCNetworkReachabilityFlags flags;
if (SCNetworkReachabilityGetFlags(self.reachabilityRef, &flags))
{
return ((flags & kSCNetworkReachabilityFlagsConnectionRequired) &&
(flags & (kSCNetworkReachabilityFlagsConnectionOnTraffic | kSCNetworkReachabilityFlagsConnectionOnDemand)));
}
return NO;
}
// Is user intervention required?
-(BOOL)isInterventionRequired
{
SCNetworkReachabilityFlags flags;
if (SCNetworkReachabilityGetFlags(self.reachabilityRef, &flags))
{
return ((flags & kSCNetworkReachabilityFlagsConnectionRequired) &&
(flags & kSCNetworkReachabilityFlagsInterventionRequired));
}
return NO;
}
#pragma mark - reachability status stuff
-(NetworkStatus)currentReachabilityStatus
{
if([self isReachable])
{
if([self isReachableViaWiFi])
return ReachableViaWiFi;
#if TARGET_OS_IPHONE
return ReachableViaWWAN;
#endif
}
return NotReachable;
}
-(SCNetworkReachabilityFlags)reachabilityFlags
{
SCNetworkReachabilityFlags flags = 0;
if(SCNetworkReachabilityGetFlags(self.reachabilityRef, &flags))
{
return flags;
}
return 0;
}
-(NSString*)currentReachabilityString
{
NetworkStatus temp = [self currentReachabilityStatus];
if(temp == ReachableViaWWAN)
{
// Updated for the fact that we have CDMA phones now!
return NSLocalizedString(@"Cellular", @"");
}
if (temp == ReachableViaWiFi)
{
return NSLocalizedString(@"WiFi", @"");
}
return NSLocalizedString(@"No Connection", @"");
}
-(NSString*)currentReachabilityFlags
{
return reachabilityFlags([self reachabilityFlags]);
}
#pragma mark - Callback function calls this method
-(void)reachabilityChanged:(SCNetworkReachabilityFlags)flags
{
if([self isReachableWithFlags:flags])
{
if(self.reachableBlock)
{
self.reachableBlock(self);
}
}
else
{
if(self.unreachableBlock)
{
self.unreachableBlock(self);
}
}
// this makes sure the change notification happens on the MAIN THREAD
dispatch_async(dispatch_get_main_queue(), ^{
[[NSNotificationCenter defaultCenter] postNotificationName:kReachabilityChangedNotification
object:self];
});
}
#pragma mark - Debug Description
- (NSString *) description
{
NSString *description = [NSString stringWithFormat:@"<%@: %#x (%@)>",
NSStringFromClass([self class]), (unsigned int) self, [self currentReachabilityFlags]];
return description;
}
@end
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key>
<string>${PRODUCT_BUNDLE_IDENTIFIER}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>4.0.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>${CURRENT_PROJECT_VERSION}</string>
<key>NSPrincipalClass</key>
<string></string>
</dict>
</plist>
#import <Foundation/Foundation.h>
@interface PodsDummy_FreeStreamer : NSObject
@end
@implementation PodsDummy_FreeStreamer
@end
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
#import "FSAudioController.h"
#import "FSAudioStream.h"
#import "FSCheckContentTypeRequest.h"
#import "FSParsePlaylistRequest.h"
#import "FSParseRssPodcastFeedRequest.h"
#import "FSPlaylistItem.h"
#import "FSXMLHttpRequest.h"
FOUNDATION_EXPORT double FreeStreamerVersionNumber;
FOUNDATION_EXPORT const unsigned char FreeStreamerVersionString[];
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/FreeStreamer
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/Reachability"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
HEADER_SEARCH_PATHS = $(inherited) $(SDKROOT)/usr/include/libxml2
OTHER_LDFLAGS = $(inherited) -l"c++" -l"xml2" -framework "AVFoundation" -framework "AudioToolbox" -framework "CFNetwork" -framework "MediaPlayer" -framework "Reachability" -framework "SystemConfiguration"
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_ROOT = ${SRCROOT}
PODS_TARGET_SRCROOT = ${PODS_ROOT}/FreeStreamer
PODS_XCFRAMEWORKS_BUILD_DIR = $(PODS_CONFIGURATION_BUILD_DIR)/XCFrameworkIntermediates
PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier}
SKIP_INSTALL = YES
USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
framework module FreeStreamer {
umbrella header "FreeStreamer-umbrella.h"
export *
module * { export * }
}
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/FreeStreamer
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/Reachability"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
HEADER_SEARCH_PATHS = $(inherited) $(SDKROOT)/usr/include/libxml2
OTHER_LDFLAGS = $(inherited) -l"c++" -l"xml2" -framework "AVFoundation" -framework "AudioToolbox" -framework "CFNetwork" -framework "MediaPlayer" -framework "Reachability" -framework "SystemConfiguration"
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_ROOT = ${SRCROOT}
PODS_TARGET_SRCROOT = ${PODS_ROOT}/FreeStreamer
PODS_XCFRAMEWORKS_BUILD_DIR = $(PODS_CONFIGURATION_BUILD_DIR)/XCFrameworkIntermediates
PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier}
SKIP_INSTALL = YES
USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
......@@ -84,6 +84,63 @@ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
## FreeStreamer
Copyright (c) 2011-2018 Matias Muhonen <mmu@iki.fi> 穆马帝
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The FreeStreamer framework bundles Reachability which is licensed under the following
license:
Copyright (c) 2011, Tony Million.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
## MBProgressHUD
Copyright © 2009-2020 Matej Bukovinski
......@@ -151,6 +208,20 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
## Reachability
Copyright (c) 2011, Tony Million.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
## YTKNetwork
Copyright (c) 2012-2016 YTKNetwork https://github.com/yuantiku
......
......@@ -115,6 +115,69 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
</dict>
<dict>
<key>FooterText</key>
<string>Copyright (c) 2011-2018 Matias Muhonen &lt;mmu@iki.fi&gt; 穆马帝
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The FreeStreamer framework bundles Reachability which is licensed under the following
license:
Copyright (c) 2011, Tony Million.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
</string>
<key>License</key>
<string>BSD</string>
<key>Title</key>
<string>FreeStreamer</string>
<key>Type</key>
<string>PSGroupSpecifier</string>
</dict>
<dict>
<key>FooterText</key>
<string>Copyright © 2009-2020 Matej Bukovinski
Permission is hereby granted, free of charge, to any person obtaining a copy
......@@ -200,6 +263,26 @@ THE SOFTWARE.</string>
</dict>
<dict>
<key>FooterText</key>
<string>Copyright (c) 2011, Tony Million.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
</string>
<key>License</key>
<string>BSD</string>
<key>Title</key>
<string>Reachability</string>
<key>Type</key>
<string>PSGroupSpecifier</string>
</dict>
<dict>
<key>FooterText</key>
<string>Copyright (c) 2012-2016 YTKNetwork https://github.com/yuantiku
Permission is hereby granted, free of charge, to any person obtaining a copy
......
......@@ -2,9 +2,11 @@ ${PODS_ROOT}/Target Support Files/Pods-DreamSleep/Pods-DreamSleep-frameworks.sh
${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework
${BUILT_PRODUCTS_DIR}/DKNightVersion/DKNightVersion.framework
${BUILT_PRODUCTS_DIR}/DOUAudioStreamer/DOUAudioStreamer.framework
${BUILT_PRODUCTS_DIR}/FreeStreamer/FreeStreamer.framework
${BUILT_PRODUCTS_DIR}/MBProgressHUD/MBProgressHUD.framework
${BUILT_PRODUCTS_DIR}/MJRefresh/MJRefresh.framework
${BUILT_PRODUCTS_DIR}/Masonry/Masonry.framework
${BUILT_PRODUCTS_DIR}/Reachability/Reachability.framework
${BUILT_PRODUCTS_DIR}/YTKNetwork/YTKNetwork.framework
${BUILT_PRODUCTS_DIR}/YYCache/YYCache.framework
${BUILT_PRODUCTS_DIR}/YYImage/YYImage.framework
......
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/AFNetworking.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/DKNightVersion.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/DOUAudioStreamer.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/FreeStreamer.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/MBProgressHUD.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/MJRefresh.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Masonry.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Reachability.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/YTKNetwork.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/YYCache.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/YYImage.framework
......
......@@ -2,9 +2,11 @@ ${PODS_ROOT}/Target Support Files/Pods-DreamSleep/Pods-DreamSleep-frameworks.sh
${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework
${BUILT_PRODUCTS_DIR}/DKNightVersion/DKNightVersion.framework
${BUILT_PRODUCTS_DIR}/DOUAudioStreamer/DOUAudioStreamer.framework
${BUILT_PRODUCTS_DIR}/FreeStreamer/FreeStreamer.framework
${BUILT_PRODUCTS_DIR}/MBProgressHUD/MBProgressHUD.framework
${BUILT_PRODUCTS_DIR}/MJRefresh/MJRefresh.framework
${BUILT_PRODUCTS_DIR}/Masonry/Masonry.framework
${BUILT_PRODUCTS_DIR}/Reachability/Reachability.framework
${BUILT_PRODUCTS_DIR}/YTKNetwork/YTKNetwork.framework
${BUILT_PRODUCTS_DIR}/YYCache/YYCache.framework
${BUILT_PRODUCTS_DIR}/YYImage/YYImage.framework
......
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/AFNetworking.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/DKNightVersion.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/DOUAudioStreamer.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/FreeStreamer.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/MBProgressHUD.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/MJRefresh.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Masonry.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Reachability.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/YTKNetwork.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/YYCache.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/YYImage.framework
......
......@@ -2,9 +2,11 @@ ${PODS_ROOT}/Target Support Files/Pods-DreamSleep/Pods-DreamSleep-frameworks.sh
${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework
${BUILT_PRODUCTS_DIR}/DKNightVersion/DKNightVersion.framework
${BUILT_PRODUCTS_DIR}/DOUAudioStreamer/DOUAudioStreamer.framework
${BUILT_PRODUCTS_DIR}/FreeStreamer/FreeStreamer.framework
${BUILT_PRODUCTS_DIR}/MBProgressHUD/MBProgressHUD.framework
${BUILT_PRODUCTS_DIR}/MJRefresh/MJRefresh.framework
${BUILT_PRODUCTS_DIR}/Masonry/Masonry.framework
${BUILT_PRODUCTS_DIR}/Reachability/Reachability.framework
${BUILT_PRODUCTS_DIR}/YTKNetwork/YTKNetwork.framework
${BUILT_PRODUCTS_DIR}/YYCache/YYCache.framework
${BUILT_PRODUCTS_DIR}/YYImage/YYImage.framework
......
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/AFNetworking.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/DKNightVersion.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/DOUAudioStreamer.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/FreeStreamer.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/MBProgressHUD.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/MJRefresh.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Masonry.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Reachability.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/YTKNetwork.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/YYCache.framework
${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/YYImage.framework
......
......@@ -179,9 +179,11 @@ if [[ "$CONFIGURATION" == "Beta" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DKNightVersion/DKNightVersion.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DOUAudioStreamer/DOUAudioStreamer.framework"
install_framework "${BUILT_PRODUCTS_DIR}/FreeStreamer/FreeStreamer.framework"
install_framework "${BUILT_PRODUCTS_DIR}/MBProgressHUD/MBProgressHUD.framework"
install_framework "${BUILT_PRODUCTS_DIR}/MJRefresh/MJRefresh.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Masonry/Masonry.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Reachability/Reachability.framework"
install_framework "${BUILT_PRODUCTS_DIR}/YTKNetwork/YTKNetwork.framework"
install_framework "${BUILT_PRODUCTS_DIR}/YYCache/YYCache.framework"
install_framework "${BUILT_PRODUCTS_DIR}/YYImage/YYImage.framework"
......@@ -193,9 +195,11 @@ if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DKNightVersion/DKNightVersion.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DOUAudioStreamer/DOUAudioStreamer.framework"
install_framework "${BUILT_PRODUCTS_DIR}/FreeStreamer/FreeStreamer.framework"
install_framework "${BUILT_PRODUCTS_DIR}/MBProgressHUD/MBProgressHUD.framework"
install_framework "${BUILT_PRODUCTS_DIR}/MJRefresh/MJRefresh.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Masonry/Masonry.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Reachability/Reachability.framework"
install_framework "${BUILT_PRODUCTS_DIR}/YTKNetwork/YTKNetwork.framework"
install_framework "${BUILT_PRODUCTS_DIR}/YYCache/YYCache.framework"
install_framework "${BUILT_PRODUCTS_DIR}/YYImage/YYImage.framework"
......@@ -207,9 +211,11 @@ if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DKNightVersion/DKNightVersion.framework"
install_framework "${BUILT_PRODUCTS_DIR}/DOUAudioStreamer/DOUAudioStreamer.framework"
install_framework "${BUILT_PRODUCTS_DIR}/FreeStreamer/FreeStreamer.framework"
install_framework "${BUILT_PRODUCTS_DIR}/MBProgressHUD/MBProgressHUD.framework"
install_framework "${BUILT_PRODUCTS_DIR}/MJRefresh/MJRefresh.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Masonry/Masonry.framework"
install_framework "${BUILT_PRODUCTS_DIR}/Reachability/Reachability.framework"
install_framework "${BUILT_PRODUCTS_DIR}/YTKNetwork/YTKNetwork.framework"
install_framework "${BUILT_PRODUCTS_DIR}/YYCache/YYCache.framework"
install_framework "${BUILT_PRODUCTS_DIR}/YYImage/YYImage.framework"
......
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/AFNetworking" "${PODS_CONFIGURATION_BUILD_DIR}/DKNightVersion" "${PODS_CONFIGURATION_BUILD_DIR}/DOUAudioStreamer" "${PODS_CONFIGURATION_BUILD_DIR}/MBProgressHUD" "${PODS_CONFIGURATION_BUILD_DIR}/MJRefresh" "${PODS_CONFIGURATION_BUILD_DIR}/Masonry" "${PODS_CONFIGURATION_BUILD_DIR}/YTKNetwork" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage" "${PODS_CONFIGURATION_BUILD_DIR}/YYModel" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage" "${PODS_CONFIGURATION_BUILD_DIR}/lottie-ios" "${PODS_ROOT}/YYImage/Vendor"
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/AFNetworking" "${PODS_CONFIGURATION_BUILD_DIR}/DKNightVersion" "${PODS_CONFIGURATION_BUILD_DIR}/DOUAudioStreamer" "${PODS_CONFIGURATION_BUILD_DIR}/FreeStreamer" "${PODS_CONFIGURATION_BUILD_DIR}/MBProgressHUD" "${PODS_CONFIGURATION_BUILD_DIR}/MJRefresh" "${PODS_CONFIGURATION_BUILD_DIR}/Masonry" "${PODS_CONFIGURATION_BUILD_DIR}/Reachability" "${PODS_CONFIGURATION_BUILD_DIR}/YTKNetwork" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage" "${PODS_CONFIGURATION_BUILD_DIR}/YYModel" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage" "${PODS_CONFIGURATION_BUILD_DIR}/lottie-ios" "${PODS_ROOT}/YYImage/Vendor"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/AFNetworking/AFNetworking.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/DKNightVersion/DKNightVersion.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/DOUAudioStreamer/DOUAudioStreamer.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/MBProgressHUD/MBProgressHUD.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/MJRefresh/MJRefresh.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/Masonry/Masonry.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YTKNetwork/YTKNetwork.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache/YYCache.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage/YYImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYModel/YYModel.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage/YYWebImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/lottie-ios/Lottie.framework/Headers"
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/AFNetworking/AFNetworking.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/DKNightVersion/DKNightVersion.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/DOUAudioStreamer/DOUAudioStreamer.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/FreeStreamer/FreeStreamer.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/MBProgressHUD/MBProgressHUD.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/MJRefresh/MJRefresh.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/Masonry/Masonry.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/Reachability/Reachability.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YTKNetwork/YTKNetwork.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache/YYCache.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage/YYImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYModel/YYModel.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage/YYWebImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/lottie-ios/Lottie.framework/Headers" $(SDKROOT)/usr/include/libxml2
LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks'
OTHER_LDFLAGS = $(inherited) -ObjC -l"sqlite3" -l"z" -framework "AFNetworking" -framework "AVFoundation" -framework "Accelerate" -framework "AssetsLibrary" -framework "AudioToolbox" -framework "CFNetwork" -framework "CoreAudio" -framework "CoreFoundation" -framework "CoreGraphics" -framework "DKNightVersion" -framework "DOUAudioStreamer" -framework "Foundation" -framework "ImageIO" -framework "Lottie" -framework "MBProgressHUD" -framework "MJRefresh" -framework "Masonry" -framework "MediaPlayer" -framework "MobileCoreServices" -framework "OpenGLES" -framework "QuartzCore" -framework "UIKit" -framework "YTKNetwork" -framework "YYCache" -framework "YYImage" -framework "YYModel" -framework "YYWebImage"
OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -l"sqlite3" -l"xml2" -l"z" -framework "AFNetworking" -framework "AVFoundation" -framework "Accelerate" -framework "AssetsLibrary" -framework "AudioToolbox" -framework "CFNetwork" -framework "CoreAudio" -framework "CoreFoundation" -framework "CoreGraphics" -framework "DKNightVersion" -framework "DOUAudioStreamer" -framework "Foundation" -framework "FreeStreamer" -framework "ImageIO" -framework "Lottie" -framework "MBProgressHUD" -framework "MJRefresh" -framework "Masonry" -framework "MediaPlayer" -framework "MobileCoreServices" -framework "OpenGLES" -framework "QuartzCore" -framework "Reachability" -framework "SystemConfiguration" -framework "UIKit" -framework "YTKNetwork" -framework "YYCache" -framework "YYImage" -framework "YYModel" -framework "YYWebImage"
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_PODFILE_DIR_PATH = ${SRCROOT}/.
......
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/AFNetworking" "${PODS_CONFIGURATION_BUILD_DIR}/DKNightVersion" "${PODS_CONFIGURATION_BUILD_DIR}/DOUAudioStreamer" "${PODS_CONFIGURATION_BUILD_DIR}/MBProgressHUD" "${PODS_CONFIGURATION_BUILD_DIR}/MJRefresh" "${PODS_CONFIGURATION_BUILD_DIR}/Masonry" "${PODS_CONFIGURATION_BUILD_DIR}/YTKNetwork" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage" "${PODS_CONFIGURATION_BUILD_DIR}/YYModel" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage" "${PODS_CONFIGURATION_BUILD_DIR}/lottie-ios" "${PODS_ROOT}/YYImage/Vendor"
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/AFNetworking" "${PODS_CONFIGURATION_BUILD_DIR}/DKNightVersion" "${PODS_CONFIGURATION_BUILD_DIR}/DOUAudioStreamer" "${PODS_CONFIGURATION_BUILD_DIR}/FreeStreamer" "${PODS_CONFIGURATION_BUILD_DIR}/MBProgressHUD" "${PODS_CONFIGURATION_BUILD_DIR}/MJRefresh" "${PODS_CONFIGURATION_BUILD_DIR}/Masonry" "${PODS_CONFIGURATION_BUILD_DIR}/Reachability" "${PODS_CONFIGURATION_BUILD_DIR}/YTKNetwork" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage" "${PODS_CONFIGURATION_BUILD_DIR}/YYModel" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage" "${PODS_CONFIGURATION_BUILD_DIR}/lottie-ios" "${PODS_ROOT}/YYImage/Vendor"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/AFNetworking/AFNetworking.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/DKNightVersion/DKNightVersion.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/DOUAudioStreamer/DOUAudioStreamer.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/MBProgressHUD/MBProgressHUD.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/MJRefresh/MJRefresh.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/Masonry/Masonry.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YTKNetwork/YTKNetwork.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache/YYCache.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage/YYImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYModel/YYModel.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage/YYWebImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/lottie-ios/Lottie.framework/Headers"
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/AFNetworking/AFNetworking.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/DKNightVersion/DKNightVersion.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/DOUAudioStreamer/DOUAudioStreamer.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/FreeStreamer/FreeStreamer.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/MBProgressHUD/MBProgressHUD.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/MJRefresh/MJRefresh.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/Masonry/Masonry.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/Reachability/Reachability.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YTKNetwork/YTKNetwork.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache/YYCache.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage/YYImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYModel/YYModel.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage/YYWebImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/lottie-ios/Lottie.framework/Headers" $(SDKROOT)/usr/include/libxml2
LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks'
OTHER_LDFLAGS = $(inherited) -ObjC -l"sqlite3" -l"z" -framework "AFNetworking" -framework "AVFoundation" -framework "Accelerate" -framework "AssetsLibrary" -framework "AudioToolbox" -framework "CFNetwork" -framework "CoreAudio" -framework "CoreFoundation" -framework "CoreGraphics" -framework "DKNightVersion" -framework "DOUAudioStreamer" -framework "Foundation" -framework "ImageIO" -framework "Lottie" -framework "MBProgressHUD" -framework "MJRefresh" -framework "Masonry" -framework "MediaPlayer" -framework "MobileCoreServices" -framework "OpenGLES" -framework "QuartzCore" -framework "UIKit" -framework "YTKNetwork" -framework "YYCache" -framework "YYImage" -framework "YYModel" -framework "YYWebImage"
OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -l"sqlite3" -l"xml2" -l"z" -framework "AFNetworking" -framework "AVFoundation" -framework "Accelerate" -framework "AssetsLibrary" -framework "AudioToolbox" -framework "CFNetwork" -framework "CoreAudio" -framework "CoreFoundation" -framework "CoreGraphics" -framework "DKNightVersion" -framework "DOUAudioStreamer" -framework "Foundation" -framework "FreeStreamer" -framework "ImageIO" -framework "Lottie" -framework "MBProgressHUD" -framework "MJRefresh" -framework "Masonry" -framework "MediaPlayer" -framework "MobileCoreServices" -framework "OpenGLES" -framework "QuartzCore" -framework "Reachability" -framework "SystemConfiguration" -framework "UIKit" -framework "YTKNetwork" -framework "YYCache" -framework "YYImage" -framework "YYModel" -framework "YYWebImage"
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_PODFILE_DIR_PATH = ${SRCROOT}/.
......
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/AFNetworking" "${PODS_CONFIGURATION_BUILD_DIR}/DKNightVersion" "${PODS_CONFIGURATION_BUILD_DIR}/DOUAudioStreamer" "${PODS_CONFIGURATION_BUILD_DIR}/MBProgressHUD" "${PODS_CONFIGURATION_BUILD_DIR}/MJRefresh" "${PODS_CONFIGURATION_BUILD_DIR}/Masonry" "${PODS_CONFIGURATION_BUILD_DIR}/YTKNetwork" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage" "${PODS_CONFIGURATION_BUILD_DIR}/YYModel" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage" "${PODS_CONFIGURATION_BUILD_DIR}/lottie-ios" "${PODS_ROOT}/YYImage/Vendor"
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/AFNetworking" "${PODS_CONFIGURATION_BUILD_DIR}/DKNightVersion" "${PODS_CONFIGURATION_BUILD_DIR}/DOUAudioStreamer" "${PODS_CONFIGURATION_BUILD_DIR}/FreeStreamer" "${PODS_CONFIGURATION_BUILD_DIR}/MBProgressHUD" "${PODS_CONFIGURATION_BUILD_DIR}/MJRefresh" "${PODS_CONFIGURATION_BUILD_DIR}/Masonry" "${PODS_CONFIGURATION_BUILD_DIR}/Reachability" "${PODS_CONFIGURATION_BUILD_DIR}/YTKNetwork" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage" "${PODS_CONFIGURATION_BUILD_DIR}/YYModel" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage" "${PODS_CONFIGURATION_BUILD_DIR}/lottie-ios" "${PODS_ROOT}/YYImage/Vendor"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/AFNetworking/AFNetworking.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/DKNightVersion/DKNightVersion.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/DOUAudioStreamer/DOUAudioStreamer.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/MBProgressHUD/MBProgressHUD.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/MJRefresh/MJRefresh.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/Masonry/Masonry.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YTKNetwork/YTKNetwork.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache/YYCache.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage/YYImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYModel/YYModel.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage/YYWebImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/lottie-ios/Lottie.framework/Headers"
HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/AFNetworking/AFNetworking.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/DKNightVersion/DKNightVersion.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/DOUAudioStreamer/DOUAudioStreamer.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/FreeStreamer/FreeStreamer.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/MBProgressHUD/MBProgressHUD.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/MJRefresh/MJRefresh.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/Masonry/Masonry.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/Reachability/Reachability.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YTKNetwork/YTKNetwork.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYCache/YYCache.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYImage/YYImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYModel/YYModel.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/YYWebImage/YYWebImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/lottie-ios/Lottie.framework/Headers" $(SDKROOT)/usr/include/libxml2
LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks'
OTHER_LDFLAGS = $(inherited) -ObjC -l"sqlite3" -l"z" -framework "AFNetworking" -framework "AVFoundation" -framework "Accelerate" -framework "AssetsLibrary" -framework "AudioToolbox" -framework "CFNetwork" -framework "CoreAudio" -framework "CoreFoundation" -framework "CoreGraphics" -framework "DKNightVersion" -framework "DOUAudioStreamer" -framework "Foundation" -framework "ImageIO" -framework "Lottie" -framework "MBProgressHUD" -framework "MJRefresh" -framework "Masonry" -framework "MediaPlayer" -framework "MobileCoreServices" -framework "OpenGLES" -framework "QuartzCore" -framework "UIKit" -framework "YTKNetwork" -framework "YYCache" -framework "YYImage" -framework "YYModel" -framework "YYWebImage"
OTHER_LDFLAGS = $(inherited) -ObjC -l"c++" -l"sqlite3" -l"xml2" -l"z" -framework "AFNetworking" -framework "AVFoundation" -framework "Accelerate" -framework "AssetsLibrary" -framework "AudioToolbox" -framework "CFNetwork" -framework "CoreAudio" -framework "CoreFoundation" -framework "CoreGraphics" -framework "DKNightVersion" -framework "DOUAudioStreamer" -framework "Foundation" -framework "FreeStreamer" -framework "ImageIO" -framework "Lottie" -framework "MBProgressHUD" -framework "MJRefresh" -framework "Masonry" -framework "MediaPlayer" -framework "MobileCoreServices" -framework "OpenGLES" -framework "QuartzCore" -framework "Reachability" -framework "SystemConfiguration" -framework "UIKit" -framework "YTKNetwork" -framework "YYCache" -framework "YYImage" -framework "YYModel" -framework "YYWebImage"
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_PODFILE_DIR_PATH = ${SRCROOT}/.
......
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key>
<string>${PRODUCT_BUNDLE_IDENTIFIER}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>3.2.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>${CURRENT_PROJECT_VERSION}</string>
<key>NSPrincipalClass</key>
<string></string>
</dict>
</plist>
#import <Foundation/Foundation.h>
@interface PodsDummy_Reachability : NSObject
@end
@implementation PodsDummy_Reachability
@end
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
#import "Reachability.h"
FOUNDATION_EXPORT double ReachabilityVersionNumber;
FOUNDATION_EXPORT const unsigned char ReachabilityVersionString[];
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/Reachability
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
OTHER_LDFLAGS = $(inherited) -framework "SystemConfiguration"
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_ROOT = ${SRCROOT}
PODS_TARGET_SRCROOT = ${PODS_ROOT}/Reachability
PODS_XCFRAMEWORKS_BUILD_DIR = $(PODS_CONFIGURATION_BUILD_DIR)/XCFrameworkIntermediates
PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier}
SKIP_INSTALL = YES
USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
framework module Reachability {
umbrella header "Reachability-umbrella.h"
export *
module * { export * }
}
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/Reachability
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
OTHER_LDFLAGS = $(inherited) -framework "SystemConfiguration"
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_ROOT = ${SRCROOT}
PODS_TARGET_SRCROOT = ${PODS_ROOT}/Reachability
PODS_XCFRAMEWORKS_BUILD_DIR = $(PODS_CONFIGURATION_BUILD_DIR)/XCFrameworkIntermediates
PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier}
SKIP_INSTALL = YES
USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
支持 Markdown 格式
你添加了 0 到此讨论。请谨慎行事。
Finish editing this message first!