这里简单阐述一下开发需求,用户要上直播课之前要测试麦克风是否可用,所以首先我们要使用录音的类录音,对声音的分贝进行监控,图形化界面上为用户展示声音的变化。
一、首先我们看一张UI图,补充了解一下上述需求
- 上面是一个麦克风的开关,负责开启测试和关闭测试
- 下面是一个简单的分贝图,实时展示声音的高地(这个进度条类似mac设置里面输入设备测试的进度条)
二、开始编写
- 首先是申请系统权限,(别忘了在plist文件里面设置相关权限)
AVAuthorizationStatus videoAuthStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio];
if (videoAuthStatus == AVAuthorizationStatusNotDetermined) {
[[AVAudioSession sharedInstance] requestRecordPermission:^void(BOOL granted) {}];
}
- 定义两个全局变量,如下:
@property (nonatomic , strong) AVAudioRecorder *audioRecorder;//录音
@property (nonatomic , strong) NSTimer *timer;//录音定时器
- 初始化这两个类,然后开始监听
[[AVAudioSession sharedInstance] setCategory: AVAudioSessionCategoryPlayAndRecord error: nil];
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
[NSNumber numberWithInt: kAudioFormatAppleLossless], AVFormatIDKey,
[NSNumber numberWithInt: 2], AVNumberOfChannelsKey,
[NSNumber numberWithInt: AVAudioQualityMax], AVEncoderAudioQualityKey,
nil];
NSURL *url = [NSURL fileURLWithPath:@"/dev/null"];//只监听不写入,所以空地址
NSError *error;
_audioRecorder = [[AVAudioRecorder alloc] initWithURL:url settings:settings error:&error];
_audioRecorder.meteringEnabled = YES;
[_audioRecorder prepareToRecord];
[self.audioRecorder record];
_timer = [NSTimer timerWithTimeInterval:0.1 repeats:YES block:^(NSTimer * _Nonnull timer) {
if (self.audioRecorder.isRecording) {
[self.audioRecorder updateMeters];
float peakPower = [self.audioRecorder peakPowerForChannel:0];
if (peakPower <= -40) {
self.toolView.volumeView.level = 0.01;
}else if (peakPower == 0){
self.toolView.volumeView.level = 1.0;
}else{
self.toolView.volumeView.level = (peakPower + 40)*2.5/100.0;
}
}
}];
[[NSRunLoop currentRunLoop] addTimer:_timer forMode:NSRunLoopCommonModes];
-
peakPower
取值范围是-160 <--> 0之间,但是经过测试背景噪音一般在-40以下,所以我这里就取值-40 <-->0之间了
录音监控的部分结束了,下面分享三角形进度条的代码,有需要的小伙伴可以直接复制使用
SHLiveCourseVolumeDBView.h
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface SHLiveCourseVolumeDBView : UIView
@property (nonatomic , strong) UIView *selectView;
@property (nonatomic , assign) CGFloat level;
@end
NS_ASSUME_NONNULL_END
SHLiveCourseVolumeDBView.m
#import "SHLiveCourseVolumeDBView.h"
@implementation SHLiveCourseVolumeDBView
- (instancetype)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
self.backgroundColor = UIColor.grayColor;
UIBezierPath *bezierPath = [UIBezierPath bezierPath];
[bezierPath moveToPoint:CGPointMake(0, frame.size.height)];
[bezierPath addLineToPoint:CGPointMake(frame.size.width, 0)];
[bezierPath addLineToPoint:CGPointMake(frame.size.width,frame.size.height)];
[bezierPath addClip];
CAShapeLayer *maskLayer = [[CAShapeLayer alloc] init];
maskLayer.frame = self.bounds;
maskLayer.path = bezierPath.CGPath;
self.layer.mask = maskLayer;
[self addSubview:self.selectView];
}
return self;
}
- (void)setLevel:(CGFloat)level
{
_level = level;
CGRect rect = self.selectView.bounds;
rect.size.width = self.bounds.size.width*level;
self.selectView.frame = rect;
}
- (UIView *)selectView
{
if (!_selectView) {
_selectView = [[UIView alloc] initWithFrame:self.bounds];
_selectView.backgroundColor = UIColor.lightGrayColor;
UIBezierPath *bezierPath = [UIBezierPath bezierPath];
[bezierPath moveToPoint:CGPointMake(0, _selectView.frame.size.height)];
[bezierPath addLineToPoint:CGPointMake(_selectView.frame.size.width, 0)];
[bezierPath addLineToPoint:CGPointMake(_selectView.frame.size.width,_selectView.frame.size.height)];
[bezierPath addClip];
CAShapeLayer *maskLayer = [[CAShapeLayer alloc] init];
maskLayer.frame = self.bounds;
maskLayer.path = bezierPath.CGPath;
_selectView.layer.mask = maskLayer;
}
return _selectView;
}
@end
完美