打开APP
userphoto
未登录

开通VIP,畅享免费电子书等14项超值服

开通VIP
ios录制视频并写视频文件
转自:http://flhs-wdw.blog.sohu.com/207300574.html
iphone提供了AVFoundation库来方便的操作多媒体设备,AVAssetWriter这个类可以方便的将图像和音频写成一个完整的视频文件。甚至将整个应用的操作录制下来,也不是什么困难的事情。
这里先说一下如何将录像的视频写到指定文件中去:
首先先准备好AVCaptureSession,当录制开始后,可以控制调用相关回调来取音视频的每一贞数据。
[cpp] view plaincopy
NSError * error;
session = [[AVCaptureSession alloc] init];
[session beginConfiguration];
[session setSessionPreset:AVCaptureSessionPreset640x480];
[self initVideoAudioWriter];
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
AVCaptureDevice * audioDevice1 = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioInput1 = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice1 error:&error];
videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[videoOutput setAlwaysDiscardsLateVideoFrames:YES];
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
[videoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
audioOutput = [[AVCaptureAudioDataOutput alloc] init];
numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
[audioOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[session addInput:videoInput];
[session addInput:audioInput1];
[session addOutput:videoOutput];
[session addOutput:audioOutput];
[session commitConfiguration];
[session startRunning];
回调函数:
[cpp] view plaincopy
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
//CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
static int frame = 0;
CMTime lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if( frame == 0 && videoWriter.status != AVAssetWriterStatusWriting  )
{
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:lastSampleTime];
}
if (captureOutput == videoOutput)
{
/           if( videoWriter.status > AVAssetWriterStatusWriting )
{
NSLog(@"Warning: writer status is %d", videoWriter.status);
if( videoWriter.status == AVAssetWriterStatusFailed )
NSLog(@"Error: %@", videoWriter.error);
return;
}
if ([videoWriterInput isReadyForMoreMediaData])
if( ![videoWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(@"Unable to write to video input");
else
NSLog(@"already write vidio");
}
}
else if (captureOutput == audioOutput)
{
if( videoWriter.status > AVAssetWriterStatusWriting )
{
NSLog(@"Warning: writer status is %d", videoWriter.status);
if( videoWriter.status == AVAssetWriterStatusFailed )
NSLog(@"Error: %@", videoWriter.error);
return;
}
if ([audioWriterInput isReadyForMoreMediaData])
if( ![audioWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(@"Unable to write to audio input");
else
NSLog(@"already write audio");
}
if (frame == FrameCount)
{
[self closeVideoWriter];
}
frame ++;
[pool drain];
}
剩下的工作就是初始化AVAssetWriter,包括音频与视频输入输出:
[cpp] view plaincopy
-(void) initVideoAudioWriter
{
CGSize size = CGSizeMake(480, 320);
NSString *betaCompressionDirectory = [NSHomeDirectory()stringByAppendingPathComponent:@"Documents/Movie.mp4"];
NSError *error = nil;
unlink([betaCompressionDirectory UTF8String]);
//----initialize compression engine
self.videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURLfileURLWithPath:betaCompressionDirectory]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
if(error)
NSLog(@"error = %@", [error localizedDescription]);
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:128.0*1024.0],AVVideoAverageBitRateKey,
nil ];
NSDictionary *videoSettings = [NSDictionarydictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height],AVVideoHeightKey,videoCompressionProps, AVVideoCompressionPropertiesKey, nil];
self.videoWriterInput = [AVAssetWriterInputassetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionarydictionaryWithObjectsAndKeys:
[NSNumbernumberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
self.adaptor = [AVAssetWriterInputPixelBufferAdaptorassetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
if ([videoWriter canAddInput:videoWriterInput])
NSLog(@"I can add this input");
else
NSLog(@"i can't add this input");
// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = nil;
//    audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
//                           [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
//                           [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
//                           [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
//                           [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
//                           [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
//                           nil ];
audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt:64000], AVEncoderBitRateKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil ];
audioWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType: AVMediaTypeAudio
outputSettings: audioOutputSettings ] retain];
audioWriterInput.expectsMediaDataInRealTime = YES;
// add input
[videoWriter addInput:audioWriterInput];
[videoWriter addInput:videoWriterInput];
}
这里音频的参数一定要注意,如果添错了,可能会没声音,这个问题折磨了我不少时间,这样,当回调函数开始执行后,就会调用写视频和音频的函数,文件就被写到指定位置去了。如果想加些滤镜效果,只要有好的图像处理算法,针对图像每一帧进行处理就可以了
本站仅提供存储服务,所有内容均由用户发布,如发现有害或侵权内容,请点击举报
打开APP,阅读全文并永久保存 查看更多类似文章
猜你喜欢
类似文章
【热】打开小程序,算一算2024你的财运
Objective
Objective-C基础教程读书笔记(8)
IOS- Json
FMDB官方使用文档
用objective-c 实现常用算法(冒泡、选择、快速、插入)
iOS开发 录音以及播放 AVAudioRecorder
更多类似文章 >>
生活服务
热点新闻
分享 收藏 导长图 关注 下载文章
绑定账号成功
后续可登录账号畅享VIP特权!
如果VIP功能使用有故障,
可点击这里联系客服!

联系客服