使用HTTP-FLV把iPhone摄像头的画面进行直播,局域网内的设备可以通过VLC进行观看,不通过服务器,实现局域网点对点直播。
实现步骤
1、采集iPhone摄像头画面
2、采集到的数据硬编码成H264数据
3、把编码的数据通过FFmpeg封装成FLV tag
4、搭建HTTP服务器监听HTTP连接,连接成功之后发送数据
代码实现
1、采集iPhone摄像头画面
_captureSession = [[AVCaptureSession alloc] init];
_captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
_captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError * error = nil;
_captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:&error];
if (_captureDeviceInput) {
[_captureSession addInput:_captureDeviceInput];
}
_captureVideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[_captureVideoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
NSDictionary * settings = [[NSDictionary alloc] initWithObjectsAndKeys:@(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), kCVPixelBufferPixelFormatTypeKey, nil];
_captureVideoDataOutput.videoSettings = settings;
dispatch_queue_t queue = dispatch_queue_create("CaptureQueue", NULL);
[_captureVideoDataOutput setSampleBufferDelegate:self queue:queue];
[_captureSession addOutput:_captureVideoDataOutput];
_previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
_previewLayer.frame = CGRectMake(0, 100, self.view.bounds.size.width, self.view.bounds.size.height - 100);
[self.view.layer addSublayer:_previewLayer];
[_captureSession startRunning];
2、采集到的数据硬编码成H264数据
//初始化硬编码器
OSStatus status = VTCompressionSessionCreate(NULL, 1280, 720, kCMVideoCodecType_H264, NULL, NULL, NULL, didCompressH264, (__bridge void *)self, &_compressionSession);
if (status != noErr) {
NSLog(@"Create compressionSession error");
return;
}
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_High_AutoLevel);
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, kCFBooleanTrue);
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, (__bridge CFTypeRef)(@(30)));
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, (__bridge CFTypeRef)(@(30)));
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef)(@(800 * 1024)));
status = VTCompressionSessionPrepareToEncodeFrames(_compressionSession);
status = VTCompressionSessionCompleteFrames(_compressionSession, kCMTimeInvalid);
if (status != noErr) {
NSLog(@"Prepare error");
}
//编码采集到的数据
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
VTEncodeInfoFlags flags;
OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession, imageBuffer, pts, dur, NULL, NULL, &flags);
if (status != noErr) {
NSLog(@"Encode fail");
}
//此处编码也可以使用FFmpeg进行软编码CVImageBufferRef是采集出的像素数据,和CVPixelBufferRef一样,可以取出yuv数据传入FFmpeg中进行编码
}
3、把编码的数据通过FFmpeg封装成FLV tag
int ret = avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", NULL);
if (ret < 0) {
NSLog(@"Could not allocate output format context!");
}
//这里我们通过write_buffer方法把数据写入内存通过HTTP发送出去,而不是写入文件或服务器地址,需要创建AVIOContext然后赋值给AVFormatContext
//这里申请的AVIOContext要通过avio_context_free()释放
unsigned char * outBuffer = (unsigned char *)av_malloc(32768);
AVIOContext * avio_out = avio_alloc_context(outBuffer, 32768, 1, NULL, NULL, write_buffer, NULL);
ofmt_ctx->pb = avio_out;
ofmt_ctx->flags = AVFMT_FLAG_CUSTOM_IO;
AVCodec * codec = avcodec_find_encoder(AV_CODEC_ID_H264);
out_stream = avformat_new_stream(ofmt_ctx, codec);
codec_ctx = avcodec_alloc_context3(codec);
AVRational dst_fps = {30, 1};
codec_ctx->codec_tag = 0;
codec_ctx->codec_id = AV_CODEC_ID_H264;
codec_ctx->codec_type = AVMEDIA_TYPE_VIDEO;
codec_ctx->width = 1280;
codec_ctx->height = 720;
codec_ctx->gop_size = 12;
codec_ctx->pix_fmt = AV_PIX_FMT_NV12;
codec_ctx->framerate = dst_fps;
codec_ctx->time_base = av_inv_q(dst_fps);
if(ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
codec_ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
ret = avcodec_parameters_from_context(out_stream->codecpar, codec_ctx);
if (ret < 0) {
NSLog(@"Could not initialize stream codec parameters!");
}
AVDictionary * codec_options = NULL;
av_dict_set(&codec_options, "profile", "high", 0);
av_dict_set(&codec_options, "preset", "superfast", 0);
av_dict_set(&codec_options, "tune", "zerolatency", 0);
ret = avcodec_open2(codec_ctx, codec, &codec_options);
if (ret < 0) {
NSLog(@"Could not open video encoder!");
}
out_stream->codecpar->extradata = codec_ctx->extradata;
out_stream->codecpar->extradata_size = codec_ctx->extradata_size;
ret = avformat_write_header(ofmt_ctx, NULL);
if (ret < 0) {
NSLog(@"Could not write header!");
}
static int write_buffer(void * opaque, uint8_t * buf, int buf_size) {
//在avformat_write_header的时候这里得到的数据是FLV文件的头部,在av_write_frame的时候这里得到的是FLV tag数据,可以通过HTTP发送出去,我使用的GCDWebServer库
//这里buf_size上限是我们创建时的32768,如果每个tag数据小于32768就会得到完整的tag数据,如果tag数据大于32768就会得到部分tag数据,要自行处理
return 0;
}
static void didCompressH264(void *outputCallbackRefCon, void *sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer ) {
//iOS硬编码的H264数据是AVCC格式的,每个NALU的前4个字节是数据长度,AVCC格式的数据直接写入文件是不能播放的,需要转换成0x00000001开始码开头的Annex B格式
if (status != noErr) {
NSLog(@"Compress H264 failed");
return;
}
if (!CMSampleBufferDataIsReady(sampleBuffer)) {
return;
}
CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
size_t length, totalLength;
char * dataPointer;
const char bytesHeader[] = "\x00\x00\x00\x01";
OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer);
bool keyFrame = !CFDictionaryContainsKey((CFDictionaryRef)(CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0)), (const void)kCMSampleAttachmentKey_NotSync);
NSMutableData * pktData = [NSMutableData data];
if (keyFrame) {
CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sample);
size_t sparameterSetSize, sparameterSetCount;
const uint8_t * sparameterSet;
status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0);
if (status == noErr) {
size_t pparameterSetSize, pparameterSetCount;
const uint8_t * pparameterSet;
status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0);
if (status == noErr) {
size_t headerLength = 4;
size length = 2 * headerLength + sparameterSetSize + pparameterSetSize;
unsigned char * buffer = (unsigned char *)malloc(sizeof(unsigned char) * length);
memcpy(buffer, bytesHeader, headerLength);
memcpy(buffer + headerLength, sparameterSet, sparameterSetSize);
memcpy(buffer + headerLength + sparameterSetSize, bytesHeader, headerLength);
memcpy(buffer + headerLength + sparameterSetSize + headerLength, pparameterSet, pparameterSetSize);
[pktData appendBytes:buffer length:length];
}
}
}
size_t bufferOffset = 0;
int AVCCHeaderLength = 4;
while(bufferOffset < totalLength - AVCCHeaderLength) {
uint32_t NALUintLength = 0;
memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength);
NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
unsigned char * buffer = (unsigned char *)malloc(sizeof(unsigned char) * (NALUnitLength + AVCCHeaderLength));
memcpy(buffer, bytesHeader, AVCCHeaderLength);
memcpy(buffer + AVCCHeaderLength, dataPointer + bufferOffset + AVCCHeaderLength, NALUnitLength);
[pktData appendBytes:buffer length:NALUnitLength + AVCCHeaderLength];
bufferOffset += AVCCHeaderLength + NALUnitLength;
}
AVPacket pkt = {0};
av_init_packet(&pkt);
pkt.data = (uint8_t *)[pktData bytes];
pkt.size = (int)[pktData length];
//pkt_pts从0开始递增
pkt.pts = pkt_pts;
pkt.dts = pkt.pts;
if (keyFrame) {
pkt.flags = AV_PKT_FLAG_KEY;
} else {
pkt.flags = 0;
}
pkt.stream_index = 0;
av_packet_rescale_ts(&pkt, codec_ctx->time_base, out_stream->time_base);
av_write_frame(ofmt_ctx, &pkt);
pkt_pts++;
}
通过VLC可以观看直播,通过ffplay播放黑屏,原因还未发现。