直播的学习与使用-----采集

2017-12-06  本文已影响16人  OwenKing

// 捕获音视频

- (void)setupCaputureVideo

{

// 1.创建捕获会话,必须要强引用,否则会被释放

AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];

_captureSession = captureSession;

// 2.获取摄像头设备,默认是后置摄像头

AVCaptureDevice *videoDevice = [self getVideoDevice:AVCaptureDevicePositionFront];

// 3.获取声音设备

AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];

// 4.创建对应视频设备输入对象

AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];

_currentVideoDeviceInput = videoDeviceInput;

// 5.创建对应音频设备输入对象

AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:nil];

// 6.添加到会话中

// 注意“最好要判断是否能添加输入,会话不能添加空的

// 6.1 添加视频

if ([captureSession canAddInput:videoDeviceInput]) {

[captureSession addInput:videoDeviceInput];

}

// 6.2 添加音频

if ([captureSession canAddInput:audioDeviceInput]) {

[captureSession addInput:audioDeviceInput];

}

// 7.获取视频数据输出设备

AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];

// 7.1 设置代理,捕获视频样品数据

// 注意:队列必须是串行队列,才能获取到数据,而且不能为空

dispatch_queue_t videoQueue = dispatch_queue_create("Video Capture Queue", DISPATCH_QUEUE_SERIAL);

[videoOutput setSampleBufferDelegate:self queue:videoQueue];

if ([captureSession canAddOutput:videoOutput]) {

[captureSession addOutput:videoOutput];

}

// 8.获取音频数据输出设备

AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init];

// 8.2 设置代理,捕获视频样品数据

// 注意:队列必须是串行队列,才能获取到数据,而且不能为空

dispatch_queue_t audioQueue = dispatch_queue_create("Audio Capture Queue", DISPATCH_QUEUE_SERIAL);

[audioOutput setSampleBufferDelegate:self queue:audioQueue];

if ([captureSession canAddOutput:audioOutput]) {

[captureSession addOutput:audioOutput];

}

// 9.获取视频输入与输出连接,用于分辨音视频数据

_videoConnection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];

// 10.添加视频预览图层

AVCaptureVideoPreviewLayer *previedLayer = [AVCaptureVideoPreviewLayer layerWithSession:captureSession];

previedLayer.frame = [UIScreen mainScreen].bounds;

[self.view.layer insertSublayer:previedLayer atIndex:0];

_previedLayer = previedLayer;

// 11.启动会话

[captureSession startRunning];

}

// 指定摄像头方向获取摄像头

- (AVCaptureDevice *)getVideoDevice:(AVCaptureDevicePosition)position

{

NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];

for (AVCaptureDevice *device in devices) {

if (device.position == position) {

return device;

}

}

return nil;

}

#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate

// 获取输入设备数据,有可能是音频有可能是视频

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection

{

if (_videoConnection == connection) {

NSLog(@"采集到视频数据");

} else {

NSLog(@"采集到音频数据");

}

}

上一篇 下一篇

猜你喜欢

热点阅读