简单的音频播放
2021-02-01 本文已影响0人
Sh1mmer
AVAudioView.h
#import <UIKit/UIKit.h>
#import "microphone.h"
NS_ASSUME_NONNULL_BEGIN
@interface AVAudioView : UIView
- (void)createPlayView;
@property (nonatomic) NSString *urlStr;
- (void)createVoiceView;
@end
AVAudioView.m
#import "AVAudioView.h"
#import "MediaProgressBar.h"
@interface AVAudioView()
/**
拖动进度条
*/
@property MediaProgressBar *slider;
@property UIButton *playBtn;
@property UILabel *durationTimeLabel;
@property BOOL hasDrag;
@property CMTime inTime;
@property UIImageView *voiceImageView;
@property microphone *voiceControll;
@end
@implementation AVAudioView
/*
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect {
// Drawing code
}
*/
- (instancetype)init{
self = [super init];
if (self) {
self.voiceControll = [[microphone alloc] init];
}
return self;
}
- (instancetype)initWithFrame:(CGRect)frame{
self = [super initWithFrame:frame];
if (self) {
self.voiceControll = [[microphone alloc] init];
}
return self;
}
- (void)setUrlStr:(NSString *)urlStr{
_urlStr = urlStr;
dispatch_async(dispatch_get_global_queue(0, 0), ^{
self.voiceControll.urlStr = urlStr;
dispatch_async(dispatch_get_main_queue(), ^{
if (self.slider) {
self.durationTimeLabel.text = [NSString stringWithFormat:@"%02ld:%02ld",(NSInteger)self.voiceControll.durationTime/60,(NSInteger)self.voiceControll.durationTime%60];
}else{
self.durationTimeLabel.text = [NSString stringWithFormat:@"%02ld″",(NSInteger)self.voiceControll.durationTime%60];
}
});
});
}
- (void)createPlayView{
self.userInteractionEnabled = YES;
self.playBtn = [UIButton buttonWithType:UIButtonTypeCustom];
self.playBtn.layer.cornerRadius = 20;
self.playBtn.frame = CGRectMake(20, self.frame.size.height/2-20, 40, 40);
[self.playBtn addTarget:self action:@selector(playOrPauseVoice:) forControlEvents:UIControlEventTouchUpInside];
self.playBtn.selected = YES;
self.playBtn.backgroundColor = [UIColor blueColor];
// [self.playBtn setBackgroundImage:[self OriginImageToSize:CGSizeMake(30, 30) color:[UIColor yellowColor] count:3] forState:UIControlStateNormal];
[self addSubview:self.playBtn];
self.slider = [[MediaProgressBar alloc] initWithFrame:CGRectMake(70, self.frame.size.height/2-10, self.frame.size.width-150, 20)];
[self.slider setThumbSize:CGSizeMake(15, 15) andColor:[UIColor redColor]];
__weak typeof(self)WeakSelf = self;
self.voiceControll.playProgressBlock = ^(CGFloat progress,CGFloat nowTime) {
if (WeakSelf.hasDrag == NO) {
[UIView animateWithDuration:0.2 animations:^{
[WeakSelf.slider setValue:progress animated:YES];
}];
}
if (progress >=1||isnan(progress)) {
[WeakSelf.slider setValue:0 animated:YES];
WeakSelf.playBtn.backgroundColor = [UIColor blueColor];
WeakSelf.playBtn.selected = !WeakSelf.playBtn.selected;
WeakSelf.durationTimeLabel.text = [NSString stringWithFormat:@"%02ld:%02ld",(NSInteger)WeakSelf.voiceControll.durationTime/60,(NSInteger)WeakSelf.voiceControll.durationTime%60];
NSString *tempStr = [NSString stringWithFormat:@"%02ld:%02ld",(NSInteger)WeakSelf.voiceControll.durationTime/60,(NSInteger)WeakSelf.voiceControll.durationTime%60];
WeakSelf.durationTimeLabel.text = tempStr;
}else{
CGFloat leftTime = WeakSelf.voiceControll.durationTime-nowTime;
WeakSelf.durationTimeLabel.text = [NSString stringWithFormat:@"%02ld:%02ld",(NSInteger)leftTime/60,(NSInteger)leftTime%60];
}
};
[self.slider addTarget:self action:@selector(changedTime:forEvent:) forControlEvents: UIControlEventValueChanged];
[self addSubview:self.slider];
self.durationTimeLabel = [[UILabel alloc] initWithFrame:CGRectMake(self.frame.size.width-120, self.frame.size.height/2-20, 120, 40)];
self.durationTimeLabel.textAlignment = NSTextAlignmentCenter;
self.durationTimeLabel.font = [UIFont systemFontOfSize:12];
[self addSubview:self.durationTimeLabel];
}
- (void)changedTime:(UISlider *)slider forEvent:(UIEvent*)event{
UITouch*touchEvent = [[event allTouches]anyObject];
switch(touchEvent.phase) {
case UITouchPhaseBegan:
NSLog(@"开始拖动");
self.hasDrag = YES;
break;
case UITouchPhaseMoved:
NSLog(@"正在拖动");
break;
case UITouchPhaseEnded:
NSLog(@"结束拖动");
NSLog(@"%f",slider.value);
Float64 seconds = self.voiceControll.durationTime*slider.value;
int32_t preferredTimeScale = 600;
self.inTime = CMTimeMakeWithSeconds(seconds, preferredTimeScale);
[self.voiceControll seekToTime:self.inTime];
if (!self.playBtn.selected) {
self.inTime = kCMTimeInvalid;
}
self.hasDrag = NO;
break;
default:
break;
}
}
- (void)changedTimeEnd:(UISlider *)slider forEvent:(UIEvent*)event{
NSLog(@"离开");
}
- (void)playOrPauseVoice:(UIButton *)btn{
if (btn.selected) {
self.playBtn.backgroundColor = [UIColor redColor];
[self.voiceControll playVoice:self.urlStr];
if (CMTIME_IS_VALID(self.inTime)) {
[self.voiceControll seekToTime:self.inTime];
self.inTime = kCMTimeInvalid;
}
}else{
self.playBtn.backgroundColor = [UIColor blueColor];
[self.voiceControll pauseVoice];
}
btn.selected = !btn.selected;
}
- (void)createVoiceView{
self.voiceImageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, self.frame.size.width, self.frame.size.height)];
self.voiceImageView.userInteractionEnabled = YES;
self.voiceImageView.animationImages = @[[UIImage imageNamed:@"play_circle01@3x"],[UIImage imageNamed:@"play_circle02@3x"],[UIImage imageNamed:@"play_circle03@3x"]];
self.voiceImageView.image = [UIImage imageNamed:@"play_circle03"];
self.voiceImageView.animationDuration = 1;
self.voiceImageView.animationRepeatCount = 0;
[self addSubview:self.voiceImageView];
UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapAction:)];
[self.voiceImageView addGestureRecognizer:tap];
self.durationTimeLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, self.frame.size.width-40, self.frame.size.height)];
self.durationTimeLabel.textAlignment = NSTextAlignmentCenter;
self.durationTimeLabel.font = [UIFont systemFontOfSize:12];
[self.voiceImageView addSubview:self.durationTimeLabel];
__weak typeof(self)WeakSelf = self;
self.voiceControll.playProgressBlock = ^(CGFloat progress, CGFloat nowTime) {
NSLog(@"%f",progress);
if (progress >=1) {
[WeakSelf.voiceImageView stopAnimating];
}
};
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(changeVoice:) name:@"changeVoice" object:nil];
}
- (void)changeVoice:(NSNotification *)notificaiton{
[self.voiceImageView stopAnimating];
}
- (void)tapAction:(UITapGestureRecognizer *)tap{
NSLog(@"人生大");
if ([self.voiceImageView isAnimating]) {
[self.voiceImageView stopAnimating];
[self.voiceControll pauseVoice];
Float64 seconds = 0;
int32_t preferredTimeScale = 600;
[self.voiceControll seekToTime:CMTimeMakeWithSeconds(seconds, preferredTimeScale)];
}else{
[self.voiceControll playVoice:self.urlStr];
[self.voiceImageView startAnimating];
}
[[self getControllerFromView:self] viewWillDisappear:YES];
}
- (void)dealloc
{
[[NSNotificationCenter defaultCenter] removeObserver:@"changeVoice"];
[self.voiceControll pauseVoice];
}
- (UIViewController *)getControllerFromView:(UIView *)view {
// 遍历响应者链。返回第一个找到视图控制器
UIResponder *responder = view;
while ((responder = [responder nextResponder])){
if ([responder isKindOfClass: [UIViewController class]]){
return (UIViewController *)responder;
}
}
// 如果没有找到则返回nil
return nil;
}
@end
microphone.h
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import "lame.h"
#import "PlayerShare.h"
NS_ASSUME_NONNULL_BEGIN
typedef void(^PlayProgressBlock)(CGFloat progress,CGFloat nowTime);
@interface microphone : NSObject
//+(microphone *)shareMicrophone;
/** 录音对象 */
@property (nonatomic,strong) AVAudioRecorder * recoder;
/** 音频文件地址 */
@property (nonatomic,strong) NSURL * url;
/** 音频播放器 */
@property (nonatomic,strong) AVAudioPlayer * audioPlayer;
/** 录音文件路径 */
@property (nonatomic,strong) NSString * recordeFilePath;
/**
转换的Mp3文件地址
*/
@property (nonatomic,strong) NSURL * mp3Url;
- (void)stardRecord;
- (NSURL *)stopRecord;
- (void)cancelRecord;
- (void)playSound:(NSURL *)fileURL;
- (void)pauseVoice;
@property PlayProgressBlock playProgressBlock;
/**
网络音频地址
*/
@property (nonatomic) NSString *urlStr;
/**
音频总时长
*/
@property CGFloat durationTime;
/**
播放音频
@param fileURL url
*/
- (void)playVoice:(NSString *)fileURL;
/**
跳转到指定时间
@param time time
*/
- (void)seekToTime:(CMTime)time;
@end
NS_ASSUME_NONNULL_END
.m
#import "microphone.h"
@interface microphone()
@end
@implementation microphone
//+ (microphone *)shareMicrophone{
// static microphone *single = nil;
// static dispatch_once_t takeOnce;
// dispatch_once(&takeOnce, ^{
// single = [[microphone alloc] init];
// });
// return single;
//}
- (void)setUrlStr:(NSString *)urlStr{
_urlStr = urlStr;
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL URLWithString:urlStr] options:nil];
AVPlayerItem *songitem = [[AVPlayerItem alloc] initWithAsset:asset];
CMTime duration = songitem.asset.duration;
float seconds = CMTimeGetSeconds(duration);
NSLog(@"duration: %.2f", seconds);
self.durationTime = seconds;
}
- (void)playVoice:(NSString *)fileURL{
if ([fileURL isEqualToString: [((AVURLAsset *)[PlayerShare sharePlayerShare].player.currentItem.asset).URL absoluteString]]) {
[[PlayerShare sharePlayerShare].player play];
}else{
[[NSNotificationCenter defaultCenter] postNotificationName:@"changeVoice" object:nil userInfo:nil];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL URLWithString:fileURL] options:nil];
AVPlayerItem *songitem = [[AVPlayerItem alloc] initWithAsset:asset];
[PlayerShare sharePlayerShare].player = [[AVPlayer alloc] initWithPlayerItem:songitem];
[PlayerShare sharePlayerShare].player.automaticallyWaitsToMinimizeStalling = NO;
CMTime duration = [PlayerShare sharePlayerShare].player.currentItem.asset.duration;
float seconds = CMTimeGetSeconds(duration);
NSLog(@"duration: %.2f", seconds);
[songitem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
}
}
- (void)pauseVoice{
[[PlayerShare sharePlayerShare].player pause];
}
- (void)seekToTime:(CMTime)time{
[[PlayerShare sharePlayerShare].player seekToTime:time];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary<NSKeyValueChangeKey,id> *)change context:(void *)context {
if ([keyPath isEqualToString:@"status"]) {
AVPlayerItem *item = (AVPlayerItem *)object;
//AVPlayerItemStatus *status = item.status;
NSLog(@"hahahha%@",[PlayerShare sharePlayerShare].player.error);
if (item.status == AVPlayerItemStatusReadyToPlay) {
[[PlayerShare sharePlayerShare].player play];
//对播放界面的一些操作,时间、进度等
__weak typeof(self)WeakSelf = self;
[[PlayerShare sharePlayerShare].player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(0.2,NSEC_PER_SEC) queue:NULL usingBlock:^(CMTime time) {
CGFloat progress = (CMTimeGetSeconds([PlayerShare sharePlayerShare].player.currentItem.currentTime) + 0.2) / CMTimeGetSeconds([PlayerShare sharePlayerShare].player.currentItem.duration);
//在这里截取播放进度并处理
if (WeakSelf.playProgressBlock) {
WeakSelf.playProgressBlock(progress,CMTimeGetSeconds([PlayerShare sharePlayerShare].player.currentItem.currentTime));
}
if (progress >= 1.0f) {
[PlayerShare sharePlayerShare].player = [[AVPlayer alloc] init];
}
}];
}
}
}
- (void)playSound:(NSURL *)fileURL {
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayback error:nil];
[audioSession setActive:YES error:nil];
NSError * error = nil;
self.audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:fileURL error:&error];
[self.audioPlayer play];
}
- (BOOL)convertToMp3
{
NSString *fileName = [NSString stringWithFormat:@"/%@.mp3", @"test"];
NSString *filePath = [[NSHomeDirectory() stringByAppendingFormat:@"/Documents/"] stringByAppendingPathComponent:fileName];
NSLog(@"%@",filePath);
_mp3Url = [NSURL URLWithString:filePath];
@try {
int read,write;
//只读方式打开被转换音频文件
FILE *pcm = fopen([self.recordeFilePath cStringUsingEncoding:1], "rb");
fseek(pcm, 4 * 1024, SEEK_CUR);//删除头,否则在前一秒钟会有杂音
//只写方式打开生成的MP3文件
FILE *mp3 = fopen([filePath cStringUsingEncoding:1], "wb");
const int PCM_SIZE = 8192;
const int MP3_SIZE = 8192;
short int pcm_buffer[PCM_SIZE * 2];
unsigned char mp3_buffer[MP3_SIZE];
//这里要注意,lame的配置要跟AVAudioRecorder的配置一致,否则会造成转换不成功
lame_t lame = lame_init();
lame_set_in_samplerate(lame, 11025.0);//采样率
lame_set_VBR(lame, vbr_default);
lame_init_params(lame);
do {
//以二进制形式读取文件中的数据
read = (int)fread(pcm_buffer, 2 * sizeof(short int), PCM_SIZE, pcm);
if (read == 0)
write = lame_encode_flush(lame, mp3_buffer, MP3_SIZE);
else
write = lame_encode_buffer_interleaved(lame, pcm_buffer, read, mp3_buffer, MP3_SIZE);
//二进制形式写数据到文件中 mp3_buffer:数据输出到文件的缓冲区首地址 write:一个数据块的字节数 1:指定一次输出数据块的个数 mp3:文件指针
fwrite(mp3_buffer, write, 1, mp3);
} while (read != 0);
lame_close(lame);
fclose(mp3);
fclose(pcm);
} @catch (NSException *exception) {
NSLog(@"%@",[exception description]);
return NO;
} @finally {
NSLog(@"MP3生成成功!!!");
return YES;
}
}
- (AVAudioRecorder *)recoder{
if (!_recoder) {
//存放录音文件的地址
NSString * path = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
NSString * filePath = [path stringByAppendingPathComponent:@"temp.caf"];
NSLog(@"%@",filePath);
self.recordeFilePath = filePath;
NSURL * url = [NSURL URLWithString:filePath];
self.url = url;
//录音设置
NSMutableDictionary *recordSettings = [[NSMutableDictionary alloc] init];
//设置录音格式
[recordSettings setValue :[NSNumber numberWithInt:kAudioFormatLinearPCM] forKey: AVFormatIDKey];
//采样率 采样率必须要设为11025才能使转化成mp3格式后不会失真
[recordSettings setValue :[NSNumber numberWithFloat:11025.0] forKey: AVSampleRateKey];//44100.0
//通道数要转换成MP3格式必须为双通道
[recordSettings setValue :[NSNumber numberWithInt:2] forKey: AVNumberOfChannelsKey];
//音频质量,采样质量
[recordSettings setValue:[NSNumber numberWithInt:AVAudioQualityMin] forKey:AVEncoderAudioQualityKey];
//创建录音对象
_recoder = [[AVAudioRecorder alloc] initWithURL:url settings:recordSettings error:nil];
[_recoder prepareToRecord];
}
return _recoder;
}
- (void)stardRecord{
[self.recoder record];
}
- (NSURL *)stopRecord{
[self.recoder stop];
if ([self convertToMp3]) {
return self.mp3Url;
}else{
return nil;
}
}
- (void)cancelRecord{
[self.recoder deleteRecording];
}
@end
MediaProgressBar.h
#import <UIKit/UIKit.h>
@interface MediaProgressBar : UISlider
- (void)setThumbSize:(CGSize)size andColor:(UIColor *)color;
@end
.m
#import "MediaProgressBar.h"
@implementation MediaProgressBar
/*
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect {
// Drawing code
}
*/
- (CGRect)trackRectForBounds:(CGRect)bounds{
// [self setThumbImage:[self OriginImageToSize:CGSizeMake(bounds.size.height/2, bounds.size.height/2)] forState:UIControlStateNormal];
return CGRectMake(0, bounds.size.height/2-1.5, bounds.size.width, 3);
}
-(UIImage*) OriginImageToSize:(CGSize)size color:(UIColor *)color{
UIGraphicsBeginImageContextWithOptions(size, NO, 0);
//从图形上下文获取图片
CGContextRef context = UIGraphicsGetCurrentContext();//填充圆,无边框
CGContextSetFillColorWithColor(context, [UIColor redColor].CGColor);//填充颜色
CGContextAddArc(context, size.width/2, size.width/2, size.width/2, 0, 2*M_PI, 0); //添加一个圆
CGContextDrawPath(context, kCGPathFill);//绘制填充
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
//关闭图形上下文
UIGraphicsEndImageContext();
return newImage;
}
- (void)setThumbSize:(CGSize)size andColor:(UIColor *)color{
[self setThumbImage:[self OriginImageToSize:size color:color] forState:UIControlStateNormal];
}
@end
#d
PlayerShare文件就是一个单例
PlayerShare.h
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface PlayerShare : NSObject
+ (PlayerShare *)sharePlayerShare;
@property AVPlayer *player;
@end
NS_ASSUME_NONNULL_END
PlayerShare.m
#import "PlayerShare.h"
@implementation PlayerShare
+ (PlayerShare *)sharePlayerShare{
static PlayerShare *single = nil;
static dispatch_once_t takeOnce;
dispatch_once(&takeOnce,^{
single = [[PlayerShare alloc]init];
});
return single;
}
@end