近期项目由于要使用拍摄视频,用的是UIImagePickerViewController,要求是分辨率640x480,但是试了各种
imagePickerController.videoQuality
拍出来的视频导出来依然达不到640x480.
并且旋转设备拍摄后拍摄出来的视频导出来也是反的。
最后查阅了各种网站找了各种资料之后,看到国外大牛在视频拍摄完成之后使用AVFoundation做的压缩和视频调整。整理了一下代码做了部分调整,感觉到AVfoundation的强大之处,哈哈
这些代码用在拍摄之后的回掉中,
AVAsset *firstAsset = [AVAsset assetWithURL:[info objectForKey:UIImagePickerControllerMediaURL]];
if(firstAsset !=nil && [[firstAsset tracksWithMediaType:AVMediaTypeVideo] count]>0){
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//VIDEO TRACK
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
if ([[firstAsset tracksWithMediaType:AVMediaTypeAudio] count]>0) {
//AUDIO TRACK
AVMutableCompositionTrack *firstAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[firstAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}else{
NSLog(@"warning: video has no audio");
}
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0)
{
FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0)
{
FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0)
{
FirstAssetOrientation_ = UIImageOrientationUp;
}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0)
{
FirstAssetOrientation_ = UIImageOrientationDown;
}
CGFloat FirstAssetScaleToFitRatio = 768.0/FirstAssetTrack.naturalSize.width;
if(isFirstAssetPortrait_)
{
FirstAssetScaleToFitRatio = 768.0/FirstAssetTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}
else
{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:kCMTimeZero];
}
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
if(isFirstAssetPortrait_)
{
MainCompositionInst.renderSize = CGSizeMake(480.0, 640.0);
}else{
MainCompositionInst.renderSize = CGSizeMake(640.0, 480.0);
}
// MainCompositionInst.renderSize = CGSizeMake(640.0, 480.0);
NSURL *url = [NSURL fileURLWithPath:videoPath];
exporter = nil;
exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset640x480];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
// [self exportDidFinish:exporter];
if(exporter.status == AVAssetExportSessionStatusCompleted){
//视频保存成功之后的处理
}else{
NSLog(@"error fixing orientation");
}
});
}];
关于UIImagePickerViewController拍摄video的分辨率和方向的一些代码整理