Task:将传单图像合并到传单视频中。
Cases:
Case1
- 按后退按钮[用户将转到传单屏幕的应用程序列表],在此期间我们将 FlyerSnapShoot 合并到 FlyerVideo.and它工作得很好.
- 转到手机图库,我们可以看到其中更新的视频。
Case2
- 按iPhone Home按钮,我正在做与上面相同的事情,但面临以下问题error.
FAIL = Error Domain=AVFoundationErrorDomain Code=-11800“操作无法完成”UserInfo=0x17266d40 {NSLocalizedDescription=操作无法完成,NSUnderlyingError=0x172b3920“操作无法完成。(OSStatus 错误 -16980。) ", NSLocalizedFailureReason=发生未知错误 (-16980)}
Code:
- (void)modifyVideo:(NSURL *)src destination:(NSURL *)dest crop:(CGRect)crop
scale:(CGFloat)scale overlay:(UIImage *)image
completion:(void (^)(NSInteger, NSError *))callback {
// Get a pointer to the asset
AVURLAsset* firstAsset = [AVURLAsset URLAssetWithURL:src options:nil];
// Make an instance of avmutablecomposition so that we can edit this asset:
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// Add tracks to this composition
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// Audio track
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// Image video is always 30 seconds. So we use that unless the background video is smaller.
CMTime inTime = CMTimeMake( MAX_VIDEO_LENGTH * VIDEOFRAME, VIDEOFRAME );
if ( CMTimeCompare( firstAsset.duration, inTime ) < 0 ) {
inTime = firstAsset.duration;
}
// Add to the video track.
NSArray *videos = [firstAsset tracksWithMediaType:AVMediaTypeVideo];
CGAffineTransform transform;
if ( videos.count > 0 ) {
AVAssetTrack *track = [videos objectAtIndex:0];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, inTime) ofTrack:track atTime:kCMTimeZero error:nil];
transform = track.preferredTransform;
videoTrack.preferredTransform = transform;
}
// Add the audio track.
NSArray *audios = [firstAsset tracksWithMediaType:AVMediaTypeAudio];
if ( audios.count > 0 ) {
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, inTime) ofTrack:[audios objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
NSLog(@"Natural size: %.2f x %.2f", videoTrack.naturalSize.width, videoTrack.naturalSize.height);
// Set the mix composition size.
mixComposition.naturalSize = crop.size;
// Set up the composition parameters.
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, VIDEOFRAME );
videoComposition.renderSize = crop.size;
videoComposition.renderScale = 1.0;
// Pass through parameters for animation.
AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
passThroughInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, inTime);
// Layer instructions
AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
// Set the transform to maintain orientation
if ( scale != 1.0 ) {
CGAffineTransform scaleTransform = CGAffineTransformMakeScale( scale, scale);
CGAffineTransform translateTransform = CGAffineTransformTranslate( CGAffineTransformIdentity,
-crop.origin.x,
-crop.origin.y);
transform = CGAffineTransformConcat( transform, scaleTransform );
transform = CGAffineTransformConcat( transform, translateTransform);
}
[passThroughLayer setTransform:transform atTime:kCMTimeZero];
passThroughInstruction.layerInstructions = @[ passThroughLayer ];
videoComposition.instructions = @[passThroughInstruction];
// If an image is given, then put that in the animation.
if ( image != nil ) {
// Layer that merges the video and image
CALayer *parentLayer = [CALayer layer];
parentLayer.frame = CGRectMake( 0, 0, crop.size.width, crop.size.height);
// Layer that renders the video.
CALayer *videoLayer = [CALayer layer];
videoLayer.frame = CGRectMake(0, 0, crop.size.width, crop.size.height );
[parentLayer addSublayer:videoLayer];
// Layer that renders flyerly image.
CALayer *imageLayer = [CALayer layer];
imageLayer.frame = CGRectMake(0, 0, crop.size.width, crop.size.height );
imageLayer.contents = (id)image.CGImage;
[imageLayer setMasksToBounds:YES];
[parentLayer addSublayer:imageLayer];
// Setup the animation tool
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
// Now export the movie
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.videoComposition = videoComposition;
// Export the URL
exportSession.outputURL = dest;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
exportSession.shouldOptimizeForNetworkUse = YES;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
callback( exportSession.status, exportSession.error );
}];
}
我从 AppDelegate.m 调用这个函数
- (void)applicationDidEnterBackground:(UIApplication *)application
{
bgTask = [application beginBackgroundTaskWithName:@"MyTask" expirationHandler:^{
// Clean up any unfinished task business by marking where you
// stopped or ending the task outright.
[application endBackgroundTask:bgTask];
bgTask = UIBackgroundTaskInvalid;
}];
// Start the long-running task and return immediately.
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
// Do the work associated with the task, preferably in chunks.
[self goingToBg];
[application endBackgroundTask:bgTask];
bgTask = UIBackgroundTaskInvalid;
});
NSLog(@"backgroundTimeRemaining: %f", [[UIApplication sharedApplication] backgroundTimeRemaining]);
}