好久没写东西了,iOS音视频开发抽出时间整理一下,权当备忘吧。iOS音视频开发原理文章在网上有很多了,就不记录了。后面会记录每步骤的实现。
音视频开发首先需要捕捉视频会话,这里只是捕获了视频会话,音频会话后续会记录。废话少说,贴代码:
#import <UIKit/UIKit.h>
@interface BBVideoCapture : NSObject
/**
开始捕获视频
@param preview 捕获视频显示的父控件
*/
- (void)startCapture:(UIView *)preview;
/**
结束捕获视频
*/
- (void)stopCapture;
@end
#import <AVFoundation/AVFoundation.h>
#import "BBVideoCapture.h"
@interface BBVideoCapture () <AVCaptureVideoDataOutputSampleBufferDelegate>
/** 捕捉画面执行的线程队列 */
@property (nonatomic, strong) dispatch_queue_t captureQueue;
/** 捕捉会话*/
@property (nonatomic, weak) AVCaptureSession *captureSession;
/** 预览图层 */
@property (nonatomic, weak) AVCaptureVideoPreviewLayer *previewLayer;
@end
@implementation BBVideoCapture
- (void)startCapture:(UIView *)preview
{
// 1.创建捕捉会话
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetHigh;
self.captureSession = session;
// 2.设置输入设备
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// 2.1自动变焦
if([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){
if([device lockForConfiguration:nil]){
device.focusMode = AVCaptureFocusModeContinuousAutoFocus;
}
}
NSError *error = nil;
AVCaptureDeviceInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
if ([session canAddInput:input]) {
[session addInput:input];
}
// 3.添加输出设备
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
self.captureQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
[output setSampleBufferDelegate:self queue:self.captureQueue];
// 3.1kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange 表示原始数据的格式为YUV420
// 这里YUV420为后续编码设置,暂时可忽略,后续会写。
NSDictionary *settings = [[NSDictionary alloc] initWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, nil];
output.videoSettings = settings;
output.alwaysDiscardsLateVideoFrames = YES;
if ([session canAddOutput:output]) {
[session addOutput:output];
}
// 4.设置录制视频的方向
AVCaptureConnection *connection = [output connectionWithMediaType:AVMediaTypeVideo];
[connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
// 5.添加预览图层
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
previewLayer.frame = preview.bounds;
[preview.layer insertSublayer:previewLayer atIndex:0];
self.previewLayer = previewLayer;
// 6.开始捕捉
[self.captureSession startRunning];
}
- (void)stopCapture {
[self.captureSession stopRunning];
[self.previewLayer removeFromSuperlayer];
}
#pragma mark - 获取视频数据代理
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
/**这里就是捕获到视频数据的地方,后续再这里实现视频编码*/
}
@end
视频捕获会话相对较简单,代码中注释很详细,入门时参考了很多资料,感谢分享者,此代码很多参考coderWhy的分享,存在较小改动,再次感谢!