AVCaptureSession音频视频采集

//
//  AudioVideoCaptureViewController.m
//  live
//
//  Created by lujunjie on 2016/10/31.
//  Copyright © 2016年 lujunjie. All rights reserved.
//

#import "AudioVideoCaptureViewController.h"
#import <AVFoundation/AVFoundation.h>
@interface AudioVideoCaptureViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>
@property (nonatomic,strong) AVCaptureSession           *mCaptureSession;
@property (nonatomic,strong) AVCaptureDeviceInput       *mCaptureDeviceInput;
@property (nonatomic ,strong) AVCaptureDeviceInput      *mCaptureAudioDeviceInput;//负责从AVCaptureDevice获得输入数据
@property (nonatomic,strong) AVCaptureVideoDataOutput   *mCaptureVideoOutput;
@property (nonatomic , strong) AVCaptureAudioDataOutput *mCaptureAudioOutput;
@property (nonatomic,strong) dispatch_queue_t mProcessQueue;
@property (nonatomic,strong) dispatch_queue_t mCaptureQueue;
@property (nonatomic,strong) dispatch_queue_t mEncodeQueue;

@property (nonatomic,strong) AVCaptureVideoPreviewLayer *mPreviewLayer;
@end

@implementation AudioVideoCaptureViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.
    
    //    a、AVCaptureDevice。这里代表抽象的硬件设备。
    //
    //    b、AVCaptureInput。这里代表输入设备(可以是它的子类),它配置抽象硬件设备的ports。
    //
    //    c、AVCaptureOutput。它代表输出数据,管理着输出到一个movie或者图像。
    //
    //    d、AVCaptureSession。它是input和output的桥梁。它协调着intput到output的数据传输。
    [self startCapture];
}

- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

- (void)startCapture {
    // 新建会话,设置图像大小
    self.mCaptureSession = [[AVCaptureSession alloc] init];
    self.mCaptureSession.sessionPreset = AVCaptureSessionPreset640x480;
    
    self.mCaptureQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
    self.mEncodeQueue  = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
    
    AVCaptureDevice *inputCamera = nil;
    // 获取前置摄像头
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice *device in devices)
    {
        if ([device position] == AVCaptureDevicePositionFront)
        {
            inputCamera = device;
        }
    }
    // 把摄像头设置到输入设备,并添加到会话
    self.mCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:inputCamera error:nil];
    
    if ([self.mCaptureSession canAddInput:self.mCaptureDeviceInput]) {
        [self.mCaptureSession addInput:self.mCaptureDeviceInput];
    }
    
    
    // 设置输出设备的参数,并把输出设备添加到会话
    self.mCaptureVideoOutput = [[AVCaptureVideoDataOutput alloc] init];
    [self.mCaptureVideoOutput setAlwaysDiscardsLateVideoFrames:NO];
    
    [self.mCaptureVideoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
    
    [self.mCaptureVideoOutput setSampleBufferDelegate:self queue:self.mCaptureQueue];
    if ([self.mCaptureSession canAddOutput:self.mCaptureVideoOutput]) {
        [self.mCaptureSession addOutput:self.mCaptureVideoOutput];
    }
    // 输出设置缩放裁剪系数设置、建立视频音频链接
    AVCaptureConnection *connection = [self.mCaptureVideoOutput connectionWithMediaType:AVMediaTypeVideo];
    [connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
    
    // 视频
    self.mPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.mCaptureSession];
    [self.mPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspect]; //设置预览时的视频缩放方式
    [self.mPreviewLayer setFrame:self.view.bounds];
    [self.view.layer addSublayer:self.mPreviewLayer];
    
    // 获取麦克风设备
    AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] lastObject];
    // 把设备设置到输入设备,并添加到会话
    self.mCaptureAudioDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:nil];
    if ([self.mCaptureSession canAddInput:self.mCaptureAudioDeviceInput]) {
        [self.mCaptureSession addInput:self.mCaptureAudioDeviceInput];
    }
    // 设置输出设备的参数,并把输出设备添加到会话
    self.mCaptureAudioOutput = [[AVCaptureAudioDataOutput alloc] init];
    if ([self.mCaptureSession canAddOutput:self.mCaptureAudioOutput]) {
        [self.mCaptureSession addOutput:self.mCaptureAudioOutput];
    }
    [self.mCaptureAudioOutput setSampleBufferDelegate:self queue:self.mCaptureQueue];
    
    
    
    [self.mCaptureSession startRunning];
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    if (captureOutput == self.mCaptureVideoOutput) {
        dispatch_sync(self.mEncodeQueue, ^{
            NSLog(@"视频:::sampleBuffer");
        });
    }
    else {
        dispatch_sync(self.mEncodeQueue, ^{
            NSLog(@"音频:::sampleBuffer");
        });
    }
}

@end
原文地址:https://www.cnblogs.com/-ljj/p/6016407.html