iOS通过手势滑动改变屏幕声音和亮度和播放进度【类bilibil
嗨呀,这个礼拜尽把时间花在坑爹的外包项目上了,第一次做了一个功能相对完善的播放器,播放器是基于百度的云播放SDK,然后我把控制器这块的主要功能全都实现,这里单独写一下关于利用手势去控制音量亮度和播放进度的问题。
先把展示的一些属性定义好
@interface PlayerControlVC () <UIGestureRecognizerDelegate>
{
CGPoint _currentPoint;
float _systemVolume; // 系统声音大小
BOOL _isComplate;// 播放完成
BOOL _isGes;// 滑动手势只执行一次
MPVolumeView *sysVolumeView ;// 不显示系统音量提示
UIImageView *blightView;// 明暗提示图
UIImageView *voiceView; // 音量提示图
UIImageView *seekView; // 快进快退提示图
UIProgressView *blightPtogress; // 明暗提示
UIProgressView *volumeProgress; // 音量提示
float _origional; // 原始数据
}
// 是否正在拖拽
@property (nonatomic, assign) BOOL progressDragging;
@property (strong, nonatomic) MPMusicPlayerController *mpc;
这里的
UIProgressView *blightPtogress; // 明暗提示
UIProgressView *volumeProgress; // 音量提示
表示的是音量和亮度的刻度
然后在viewdidload
的时候把这些需要实现的元素实现,以及在viewDidLayoutSubviews
的时候把几个需要居中显示的元素居中
- (void)viewDidLoad {
[super viewDidLoad];
[self optimizationView].progressView = self.slider;
UIImage *blightImage = [UIImage imageNamed:@"blight"];
UIImage *voiceImage = [UIImage imageNamed:@"volume"];
blightView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, 150, 150)];
blightView.image = blightImage;
blightView.alpha=0.0;
blightView.backgroundColor = [UIColor clearColor];
[self.view addSubview:blightView];
voiceView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, 150, 150)];
voiceView.image = voiceImage;
voiceView.alpha=0.0;
voiceView.backgroundColor = [UIColor clearColor];
[self.view addSubview:voiceView];
seekView = [[UIImageView alloc]initWithFrame:CGRectMake(0, 0, 50, 50)];
seekView.image = [UIImage imageNamed:@"icon_backoff"];
seekView.alpha = 0.0;
[self.view addSubview:seekView];
blightPtogress = [[UIProgressView alloc] initWithFrame:CGRectMake(20,blightView.frame.size.height-20,blightView.frame.size.width-40,20)];
blightPtogress.backgroundColor = [UIColor clearColor];
blightPtogress.trackTintColor = [UIColor blackColor];
blightPtogress.progressTintColor = [UIColor whiteColor];
blightPtogress.progress = 0.5f;
// 改变进度条的粗细
blightPtogress.transform = CGAffineTransformMakeScale(1.0f,2.0f);
blightPtogress.progressViewStyle = UIProgressViewStyleBar;
[blightView addSubview:blightPtogress];
volumeProgress = [[UIProgressView alloc] initWithFrame:CGRectMake(20,blightView.frame.size.height-20,blightView.frame.size.width-40,20)];
volumeProgress.backgroundColor = [UIColor clearColor];
volumeProgress.trackTintColor = [UIColor blackColor];
volumeProgress.progress = 0.5f;
volumeProgress.transform = CGAffineTransformMakeScale(1.0f,2.0f);
volumeProgress.progressViewStyle= UIProgressViewStyleBar;
volumeProgress.progressTintColor = [UIColor whiteColor];
[voiceView addSubview:volumeProgress];
// 添加滑动手势
UIPanGestureRecognizer *panGesture = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(panGestureDown:)];
panGesture.delegate = self;
[self.view addGestureRecognizer:panGesture];
self.mpc = [MPMusicPlayerController applicationMusicPlayer];
// 隐藏系统音量显示
sysVolumeView=[MPVolumeView new];
sysVolumeView.frame = CGRectMake(-1000, -1000, 0, 0);
[self.view addSubview:sysVolumeView];
}
- (void)viewDidLayoutSubviews {
[super viewDidLayoutSubviews];
blightView.center = self.view.center;
voiceView.center = self.view.center;
seekView.center = self.view.center;
}
// 添加滑动手势
UIPanGestureRecognizer *panGesture = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(panGestureDown:)];
panGesture.delegate = self;
[self.view addGestureRecognizer:panGesture];
UIPanGestureRecognizer
指的是手指的拖动,与他相对于的还有相面几个手势api
- tap:轻触
- long press:在一点上长按
- pinch:两个指头捏或者放的操作
- pan:手指的拖动
- swipe:手指在屏幕上很快的滑动
- rotation:手指反向操作
这里就先不过多赘述了。
最后是关于整个panGestureDown
的实现,这个也是整个方法的关键我们分步来解释
想要获取用户的动作,首先你要先获得用户点击的第一个点,同时还要注意不要和原来的UISliber
发生冲突,下面的_currentPoint
就是你第一个点。
#pragma mark - 手势代理 解决手势冲突问题,不要忘记添加代理delegate
-(BOOL)gestureRecognizer:(UIGestureRecognizer*)gestureRecognizer shouldReceiveTouch:(UITouch*)touch{
// NSLog(@"touch.view=====%@",touch.view);
if([touch.view isKindOfClass:[UISlider class]]){
return NO;
}else{
return YES;
}
}
-(void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event{
_currentPoint = [[touches anyObject] locationInView:self.view];
}
下面的代码全都是写在panGestureDown
我这里分段描述
-(void)panGestureDown:(UIPanGestureRecognizer*)sender
CGFloat sliberWith;
if (FullScreen) {
sliberWith = SCREEN_WIDTH;
} else {
sliberWith = SCREEN_HEIGHT;
}
CGPoint point = [sender locationInView:self.view];// 上下控制点
CGPoint tranPoint = [sender translationInView:self.view];//播放进度
typedef NS_ENUM(NSUInteger, UIPanGestureRecognizerDirection) {
UIPanGestureRecognizerDirectionUndefined,
UIPanGestureRecognizerDirectionUp,
UIPanGestureRecognizerDirectionDown,
UIPanGestureRecognizerDirectionLeft,
UIPanGestureRecognizerDirectionRight
};
sliberWith
是用来判断用户的屏幕到底是取手机的宽度还是高度的,通过fullscreen
来判断,至于fullscreen
是怎么来的,大家自己通过具体情况添加就好。
然后是关于控制点point和进度tranPoint的获取;
再自己定义5个状态来表示你需要的4个手势,
最后是4个手势以及UIGestureRecognizerStateEnded
,UIGestureRecognizerStateEnded
代表用户把手指移开屏幕时的动作,在这个时候你就可以对这个动作做最后的行动了。
static UIPanGestureRecognizerDirection direction = UIPanGestureRecognizerDirectionUndefined;
switch (sender.state) {
case UIGestureRecognizerStateBegan: {
_origional = self.slider.value;// 记录开始滑动位置
if (direction == UIPanGestureRecognizerDirectionUndefined) {
CGPoint velocity = [sender velocityInView:self.view];
BOOL isVerticalGesture = fabs(velocity.y) > fabs(velocity.x);
if (isVerticalGesture) {
if(!FullScreen){
return;// 只有横屏才可以调节音量和亮度
}
if (velocity.y > 0) {
direction = UIPanGestureRecognizerDirectionDown;
} else {
direction = UIPanGestureRecognizerDirectionUp;
}
}
else {
if (velocity.x > 0) {
direction = UIPanGestureRecognizerDirectionRight;
} else {
direction = UIPanGestureRecognizerDirectionLeft;
}
}
}
break;
}
case UIGestureRecognizerStateChanged: {
switch (direction) {
case UIPanGestureRecognizerDirectionUp: {
float dy = point.y - _currentPoint.y;
int index = (int)dy;
// 左侧 上下改变亮度
if (_currentPoint.x < self.view.frame.size.width / 2) {
blightView.alpha = 1.0f;
if(index >0){
[UIScreen mainScreen].brightness = [UIScreen mainScreen].brightness - 0.01;
} else {
[UIScreen mainScreen].brightness = [UIScreen mainScreen].brightness + 0.01;
}
blightPtogress.progress =[UIScreen mainScreen].brightness;
} else {// 右侧上下改变声音
voiceView.alpha = 1.0f;
if (index > 0) {
[self setVolumeDown];
}else{
[self setVolumeUp];
}
volumeProgress.progress = _systemVolume;
}
break;
}
case UIPanGestureRecognizerDirectionDown: {
float dy = point.y - _currentPoint.y;
int index = (int)dy;
// 左侧 上下改变亮度
if (_currentPoint.x <self.view.frame.size.width/2) {
blightView.alpha = 1.0f;
if (index > 0) {
[UIScreen mainScreen].brightness = [UIScreen mainScreen].brightness - 0.01;
} else {
[UIScreen mainScreen].brightness = [UIScreen mainScreen].brightness + 0.01;
}
blightPtogress.progress = [UIScreen mainScreen].brightness;
} else {
// 右侧上下改变声音
voiceView.alpha =1.0f;
if (index > 0) {
[self setVolumeDown];
}else{
[self setVolumeUp];
}
volumeProgress.progress = _systemVolume;
}
break;
}
case UIPanGestureRecognizerDirectionLeft: {
if (_isGes ==NO) {
NSLog(@"Left");
_isGes =YES;
self.progressDragging = YES;
}
if (FullScreen) {
seekView.alpha = 1.0f;
seekView.image = [UIImage imageNamed:@"icon_backoff"];
}
// 手势滑动控制 快进进度
if (tranPoint.x / sliberWith +_origional <= 0) {
self.slider.value = 0.0f;
} else {
self.slider.value = (tranPoint.x/sliberWith) * 100 + _origional;
}
break;
}
case UIPanGestureRecognizerDirectionRight: {
if (_isGes == NO) {
NSLog(@"Right");
_isGes = YES;
self.progressDragging = YES;
}
if (FullScreen) {
seekView.alpha = 1.0f;
seekView.image = [UIImage imageNamed:@"icon_forward"];
}
if (tranPoint.x/sliberWith +_origional <=0 ) {
self.slider.value = 0.0f;
} else {
self.slider.value = (tranPoint.x / sliberWith) * 100 +_origional;
}
break;
}
default: {
break;
}
}
break;
}
case UIGestureRecognizerStateEnded: {
_isGes =NO;
NSLog(@"end");
_origional = self.slider.value;// 记录结束滑动位置
direction = UIPanGestureRecognizerDirectionUndefined;
[self updateIdleTime];
[self.delegate seek:self.slider.value];
[UIView animateWithDuration:0.5f animations:^{
blightView.alpha = 0.0f;
voiceView.alpha = 0.0f;
seekView.alpha = 0.0f;
}];
break;
}
default:
break;
}
最后再把音量的两个方法加上
-(void)setVolumeUp
{
_systemVolume = _systemVolume+0.01;
NSLog(@"%f",_systemVolume);
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
self.mpc.volume = _systemVolume;
#pragma GCC diagnostic pop
}
-(void)setVolumeDown{
_systemVolume = _systemVolume-0.01;
NSLog(@"%f",_systemVolume);
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
self.mpc.volume = _systemVolume;
#pragma GCC diagnostic pop
}
这里直接用self.mpc.volume
比再去写一套MPVolumeView
要方便的多。
总得来说其实并没有很多的知识点,主要是关于UIPanGestureRecognizer
的具体应用,也算学到了很多。不过这次项目是真的紧,揪心啊- -明明应该是少说要20天完成的工作,硬是让我们7个工作日搞定,嗨呀,只能被迫从百度云播放现有的SDK Demo基础上进行修改了,还好值得庆幸的是我改出来了,阿弥陀佛