1
AVAsset
にAVAssetTrack
があります。サイズは(たとえば - (width = 1920、height = 1080))です。必要なもの - このアセットを所定の画面サイズに合わせます(たとえば、(width = 320、height = 568))。アセットがランドスケープの場合は - 90度回転させます(正方形の場合は上下に黒いストライプを追加します)。少しズームしている回転景観資産が、結果資産 - それはほとんどの最初のステップを行いますCGAffineデバイスの画面に合わせるようにAVAssetを変換する
- (void)changeAsset:(AVAsset*)asset savetoURL:(NSURL*)toURL withSize:(CGSize)toSize offsetRatioPoint:(CGPoint*)offsetRatioPoint completion:(void (^)(NSURL* in_url, NSError* error))handler
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
if (!videoTrack)
{
if (handler)
handler(nil, [NSError errorWithDomain:@"com.myapp.error" code:-1 userInfo:@{@"message" : @"there are no video tracks in asset"}]);
return;
}
CGFloat newHeight = [videoTrack naturalSize].height/3*4;
CGFloat newWidth = [videoTrack naturalSize].width/3*4;
const CGFloat videoAspectRatio = newWidth/newHeight;
const CGFloat toSizeAspectRatio = toSize.width/toSize.height;
CGFloat scale = 1.f;
if (videoAspectRatio > toSizeAspectRatio)
{
scale = toSize.height/newHeight;
}
else
{
scale = toSize.width /newWidth;
}
CGAffineTransform scaleTrans = CGAffineTransformMakeScale(scale, scale);
CGAffineTransform translateTrans = CGAffineTransformIdentity;
if (videoAspectRatio > toSizeAspectRatio)
{
if (offsetRatioPoint)
{
const CGFloat dx = offsetRatioPoint->x * newWidth * scale;
translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
}
else
{
const CGFloat dx = 0.5f * (newWidth * scale - toSize.width);
translateTrans = CGAffineTransformMakeTranslation(-dx, 0.f);
}
}
else
{
if (offsetRatioPoint)
{
const CGFloat dy = offsetRatioPoint->y * newHeight * scale;
translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
}
else
{
const CGFloat dy = 0.5f * (newHeight * scale - toSize.height);
translateTrans = CGAffineTransformMakeTranslation(0.f, -dy);
}
}
CGAffineTransform t1 = CGAffineTransformTranslate(translateTrans, toSize.height, -scale*toSize.width);
// Rotate transformation
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTrans = CGAffineTransformConcat(scaleTrans, t2);
AVMutableVideoComposition *videoComposition = [[AVMutableVideoComposition alloc] init];
videoComposition.renderSize = toSize;
int32_t frameRate = 30;
videoComposition.frameDuration = CMTimeMake(1, frameRate);
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInstruction setTransform:finalTrans atTime:kCMTimeZero];
instruction.layerInstructions = @[layerInstruction];
videoComposition.instructions = @[instruction];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPreset640x480];
exporter.videoComposition = videoComposition;
exporter.shouldOptimizeForNetworkUse = YES;
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:kCroppedFileName];
if (toURL)
exportPath = toURL.path;
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath] == YES)
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
__block NSURL* outURL = [NSURL fileURLWithPath: exportPath];
exporter.outputURL = outURL;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.timeRange = instruction.timeRange;
NSLog(@"%@", exportPath);
[exporter exportAsynchronouslyWithCompletionHandler:^(void) {
if (!toURL)
{
if ([[NSFileManager defaultManager] fileExistsAtPath:[GlobalConst fullMoviePath]] == YES)
[[NSFileManager defaultManager] removeItemAtPath:[GlobalConst fullMoviePath] error:nil];
NSError *error;
if (![[NSFileManager defaultManager] moveItemAtPath: exportPath toPath:[GlobalConst fullMoviePath] error:&error]) {
NSLog(@"Error %@", error);
}
outURL = [NSURL fileURLWithPath: [GlobalConst fullMoviePath] ];
}
NSLog(@"%@", outURL);
handler(outURL, nil);
}];
}
:私はこれを試してみました。すべてのアドバイスを事前に感謝します。