iOS获取音/视频设备输入输出流

1,add AVFoundation.framework,AssetsLibrary.framework。

第一个用于获取设备摄像头,话筒,第二个保存到相册将视频。

2,

//apple媒体框架
#import <AVFoundation/AVFoundation.h>
//C内存管理库
#import <malloc/malloc.h>
//apple 媒体文件框架
#import <AssetsLibrary/AssetsLibrary.h>
3,add delegate

//AVCaptureVideoDataOutputSampleBufferDelegate视频输出代理
//AVCaptureAudioDataOutputSampleBufferDelegate音频输出代理
@interface FourController : UIViewController<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>
4,.h文件
//
//  FourController.h
//  VideoDemo
//
//  Created by skyzizhu on 15/12/17.
//  Copyright (c) 2015年 skyzizhu. All rights reserved.
//
 
#import <UIKit/UIKit.h>
 
#import <AVFoundation/AVFoundation.h>
 
#import <malloc/malloc.h>
 
#import <AssetsLibrary/AssetsLibrary.h>
 
@interface FourController : UIViewController<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>
//视频输出
@property (nonatomic,strong)AVCaptureVideoDataOutput *videoOutput;
//音频输出
@property (nonatomic,strong)AVCaptureAudioDataOutput *audioOutput;
//当前录制session
@property (nonatomic,strong)AVCaptureSession *mediaSession;
//视频写入文件
@property (nonatomic,strong)AVAssetWriterInput *videoWriterInput;
//音频写入文件
@property (nonatomic,strong)AVAssetWriterInput *audioWriterInput;
//流写入
@property (nonatomic,strong)AVAssetWriter *assetWriter;
//单独获取视频可以用这个属性,备用
@property (nonatomic,strong)AVAssetWriterInputPixelBufferAdaptor *videoAssetWriterPixelBufferAdaptor;
 
//layer
//apple提供流媒体layer,用于时时展现录制的media
@property (strong,nonatomic) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
 
 
//
@end

5,.m文件,分别获取设备的摄像头,话筒(输入设备)。

-(AVCaptureDeviceInput *)getVideoInput
{
    NSError *error = nil;
    
    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
    
    return input;
}
 
-(AVCaptureDeviceInput *)getAudioInput
{
    NSError *error = nil;
    
    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
    
    return input;
}

分别配置视频的输出形式,配置类型等属性,这个是输出的属性,和写入的属性不同,视频和音频同事传入一个线程,两个用同一个线程就行,否则会崩溃,因为不同步。

-(AVCaptureVideoDataOutput *)getVideoOutputWithQueue:(dispatch_queue_t)queue
{
    
    if(_videoOutput != nil){
    
        return _videoOutput;
    
    }
    
    _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
    
    _videoOutput.alwaysDiscardsLateVideoFrames = YES;
    
    _videoOutput.videoSettings = [NSDictionary dictionaryWithObject:
                                 [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
    
    [_videoOutput setSampleBufferDelegate:self queue:queue];
    
    return _videoOutput;
    
}
 
-(AVCaptureAudioDataOutput *)getAudioOutputWithQueue:(dispatch_queue_t)queue
{
    if(_audioOutput != nil){
        
        return _audioOutput;
        
    }
    
    _audioOutput = [[AVCaptureAudioDataOutput alloc] init];
    
    [_audioOutput setSampleBufferDelegate:self queue:queue];
    
    return _audioOutput;
}

配置当前录制session,可以看成一种会话,分别将上边设置的输入输出添加到会话里边:

-(AVCaptureSession *)setMediaSession
{
    
    if(_mediaSession != nil){
    
        return _mediaSession;
    
    }
    
    _mediaSession = [[AVCaptureSession alloc] init];
    //通知系统开始配置session
    [_mediaSession beginConfiguration];
    
    _mediaSession.sessionPreset = AVCaptureSessionPresetLow;
    //提交当前配置
    [_mediaSession commitConfiguration];
    //add input and output
    [_mediaSession addInput:[self getVideoInput]];
    
    [_mediaSession addInput:[self getAudioInput]];
    
    //dispatch_queue_t videoQueue = dispatch_queue_create("com.videos.queue", NULL);
    //传入一个线程
    dispatch_queue_t audioQueue = dispatch_queue_create("com.audios.queue", NULL);
    
    [_mediaSession addOutput:[self getVideoOutputWithQueue:audioQueue]];
    
    [_mediaSession addOutput:[self getAudioOutputWithQueue:audioQueue]];
    //开始录制
    [_mediaSession startRunning];
    
    return _mediaSession;
}

画一个view,限制当前录制流的layer,顺便添加一个停止按钮,用于停止之后将文件保存在相册中:

-(void)setLayer
{
    UIView *v = [[UIView alloc]initWithFrame:CGRectMake(50, 100, 280, 400)];
    
    [self.view addSubview:v];
    //当前流layer
    _captureVideoPreviewLayer=[[AVCaptureVideoPreviewLayer alloc]initWithSession:self.mediaSession];
    
    CALayer *layer = v.layer;
    
    layer.masksToBounds=YES;
    
    _captureVideoPreviewLayer.frame=layer.bounds;
    //显示模式
    _captureVideoPreviewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;
    
    [layer addSublayer:_captureVideoPreviewLayer];
    
    //
    
    UIButton *stop = [[UIButton alloc]initWithFrame:CGRectMake(200, 550, 100, 60)];
    
    stop.center = CGPointMake(self.view.center.x, stop.center.y);
    
    stop.backgroundColor = [UIColor grayColor];
    
    [stop setTitle:@"停止" forState:UIControlStateNormal];
    
    [stop setTitleColor:[UIColor blackColor] forState:UIControlStateNormal];
    
    [self.view addSubview:stop];
    
    [stop addTarget:self action:@selector(stopAction:) forControlEvents:UIControlEventTouchUpInside];
    
}

配置AVAssetWriter,将视频流实时写入到文件中:

//save file path by document
- (NSString *)generateFilePathForMovie {
    
    return [NSString stringWithFormat:@"%@/play.mp4",
            [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"]];
}
-(AVAssetWriter *)setMediaWriter
{
    //首先判断当前文件的路径是否存在,如果存在则删除文件,否则写入会报错
    if ([[NSFileManager defaultManager] fileExistsAtPath:[self generateFilePathForMovie]]) {
        NSLog(@"already exists");
        NSError *error;
        if ([[NSFileManager defaultManager] removeItemAtPath:[self generateFilePathForMovie] error:&error] == NO) {
            NSLog(@"removeitematpath %@ error :%@", [self generateFilePathForMovie], error);
        }
    }
    
    //
    NSError *error = nil;
    //创建AVAssetWriter,用于添加写入流的形式
    //写入器传一个文件的URL,本地文件的URL,然后会自动写入。
    _assetWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:[self generateFilePathForMovie]] fileType:AVFileTypeQuickTimeMovie error:&error];
    
    //[_assetWriter startSessionAtSourceTime:kCMTimeZero];
    
    //video,配置视频的写入形式。
    int bitRate = (300 + /*self.currentQuality*/5 * 90) * 1024;      //NORMAL 750 * 1024
    
    NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   [NSNumber numberWithInt:bitRate], AVVideoAverageBitRateKey,
                                   nil];
    
    
    //h264
    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:480], AVVideoWidthKey,
                                   [NSNumber numberWithInt:320], AVVideoHeightKey,
                                   codecSettings, AVVideoCompressionPropertiesKey,
                                   nil];
    
    _videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
    
    _videoWriterInput.expectsMediaDataInRealTime = YES;
    
    /*这个注释的是单独写入视频用这个。
    self.videoAssetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:
                                          [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey, nil]];
     */
    
    //audio
    // Add the audio input
    //配置音频,aac
    AudioChannelLayout acl;
    
    bzero( &acl, sizeof(acl));
    
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
    
    NSDictionary* audioOutputSettings = nil;
    // Both type of audio inputs causes output video file to be corrupted.
    if( /* DISABLES CODE */ (NO) ) {
        // should work from iphone 3GS on and from ipod 3rd generation
        audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
                               [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                               [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                               [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
                               [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                               nil];
    } else {
        // should work on any device requires more space
        audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
                               [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
                               [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
                               [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                               [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                               [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                               nil ];
    }
    
    _audioWriterInput = [AVAssetWriterInput
                        assetWriterInputWithMediaType: AVMediaTypeAudio
                        outputSettings: audioOutputSettings ];
    //视频和音频的expectsMediaDataInRealTime属性必须是yes,这样才能获取实时数据
    _audioWriterInput.expectsMediaDataInRealTime = YES;
    
    //将视频写入和音频写入加入到媒体写入器里边
    [_assetWriter addInput:_videoWriterInput];
        
    [_assetWriter addInput:_audioWriterInput];
    
    return _assetWriter;
}

获取实时的代理方法:

-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    //sampleBuffer是实时流,转换成data,查看大小
    NSData *data = [NSData dataWithBytes:&sampleBuffer length:malloc_size(sampleBuffer)];
    
    NSLog(@"%ld",data.length);
   
    if(!CMSampleBufferDataIsReady(sampleBuffer)){
    
        NSLog( @"sample buffer is not ready. Skipping sample" );
        return;
        
    }
    //设置写入器的写入时间,开启写入
    if (_assetWriter.status == AVAssetWriterStatusUnknown)
    {
        CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
        
        [_assetWriter startWriting];
        [_assetWriter startSessionAtSourceTime:startTime];
    }
    
    if(_assetWriter.status == AVAssetWriterStatusFailed){
        
        NSLog(@"error - %@",_assetWriter.error);
        
    }
    //判断如果正在读取,则直接写入
    if(_assetWriter.status == AVAssetWriterStatusWriting){
     //写入视频
        if([captureOutput isKindOfClass:[_audioOutput class]]){
            
            [_audioWriterInput appendSampleBuffer:sampleBuffer];
        
        }
        //写入音频
        if([captureOutput isKindOfClass:[_videoOutput class]]){
        
            [_videoWriterInput appendSampleBuffer:sampleBuffer];
        }
}

button event

#pragma mark - button action
 
-(void)stopAction:(UIButton *)bt
{
    //停止录制
    [_mediaSession stopRunning];
    //写入器写入完成调用的方法
    [_assetWriter finishWritingWithCompletionHandler:^{
        
        NSString *filePath = [self generateFilePathForMovie];
        
        NSData *data = [NSData dataWithContentsOfFile:filePath];
        
        NSLog(@"%@",filePath);
        
        NSLog(@"data = = = = %ld",data.length);
        //写入到相册
        [self saveMedia:filePath];        
 
    }];
    
}
 
#pragma mark - save media
 
- (void)saveMedia:(NSString*)urlString{
    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
    [library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:urlString]
                                completionBlock:^(NSURL *assetURL, NSError *error) {
                                    
                                    NSLog(@"%@",assetURL);
                                    if (error && assetURL == nil) {
                                        NSLog(@"Save video fail:%@",error);
                                    } else {
                                        NSLog(@"Save video succeed.");
                                        
                                        if ([[NSFileManager defaultManager] fileExistsAtPath:[self generateFilePathForMovie]]) {
                                            NSError *error;
                                            if ([[NSFileManager defaultManager] removeItemAtPath:[self generateFilePathForMovie] error:&error] == NO) {
                                                NSLog(@"removeitematpath %@ error :%@", [self generateFilePathForMovie], error);
                                            }
                                        }
                                    }
                                }];
}

调用:

- (void)viewDidLoad {
    [super viewDidLoad];
    
    //write
    //先配置写入,然后录制,否则两个不在同一个线程,导致崩溃
    [self setMediaWriter];
    
    //session
    [self setMediaSession];
    
    //add media layer
    [self setLayer];
    // Do any additional setup after loading the view.
}

延伸:

如果单独写入视频,则用AVAssetWriterInputPixelBufferAdaptor类型就可以写入,在代理方法里边:

#pragma mark - avcapturevideo delegate
 
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
       fromConnection:(AVCaptureConnection *)connection
{
    //write
    //转换成imageBuffer
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    
    // a very dense way to keep track of the time at which this frame
    // occurs relative to the output stream, but it's just an example!
    
    //CFRelease(sampleBuffer);
    //通过assetWriterPixelBufferAdaptor属性写入视频
    static int64_t frameNumber = 0;
    if(self.assetWriterInput.readyForMoreMediaData)
        [self.assetWriterPixelBufferAdaptor appendPixelBuffer:imageBuffer withPresentationTime:CMTimeMake(frameNumber, 14)];
    frameNumber++;
    /*/
    //
    
    /*
    NSData *data = [NSData dataWithBytes:&sampleBuffer length:malloc_size(sampleBuffer)];
    
    NSLog(@"temp - -  %ld",data.length);
    
    [self recieveVideoFromData:data];
    */
}

如果不想用系统的实时layer,则可以将转换的data转换成iamge,通过切换iamgeview的图片实时播放视频:

//将buffer转换的data转换成image,放在view上
-(void)recieveVideoFromData:(NSData *)data{
    CMSampleBufferRef sampleBuffer;
    [data getBytes:&sampleBuffer length:sizeof(sampleBuffer)];
    
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef newContext = CGBitmapContextCreate(baseAddress,
                                                    width, height, 8, bytesPerRow, colorSpace,
                                                    kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef newImage = CGBitmapContextCreateImage(newContext);
    
    CGContextRelease(newContext);
    CGColorSpaceRelease(colorSpace);
    
    UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0
                                  orientation:UIImageOrientationRight];
    
    CGImageRelease(newImage);
    [self.imageView performSelectorOnMainThread:@selector(setImage:)
                                     withObject:image waitUntilDone:YES];
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
    
    
}

Leave a Reply

Required fields are marked *