1
現在の時刻を表示するウォーターマークを作成しましたが、毎秒変更されるわけではありません。テキストを毎秒変更するにはどうしたらいいですか?私はセキュリティ映像のようなビデオに時間を割きたい以下は、ビデオの上にテキストを追加するためにうまく動作するコードです。Swift 3で1秒ごとに変化するビデオにテキストウォーターマークを追加するにはどうすればよいですか?
func waterMark(){
let filePath: String = Bundle.main.path(forResource: "Zombie", ofType: "mp4")!
let videoAsset = AVURLAsset(url: URL(fileURLWithPath: filePath), options: nil)
let mixComposition = AVMutableComposition()
let compositionVideoTrack: AVMutableCompositionTrack? = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
try? compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
compositionVideoTrack?.preferredTransform = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0].preferredTransform
let videoSize: CGSize = clipVideoTrack.naturalSize
let aLayer = CALayer()
aLayer.contents = (Any).self
aLayer.frame = CGRect(x: videoSize.width - 65, y: videoSize.height - 75, width: 57, height: 57)
aLayer.opacity = 0.65
let parentLayer = CALayer()
let videoLayer = CALayer()
parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(aLayer)
let titleLayer = CATextLayer()
let dateFormatter = DateFormatter()
dateFormatter.timeStyle = .medium
// titleLayer.backgroundColor = UIColor.black.cgColor
titleLayer.string = String((dateFormatter.string(from: Date() as Date)))
titleLayer.font = UIFont.systemFont(ofSize: 100)
titleLayer.shadowOpacity = 0.5
titleLayer.frame = parentLayer.frame
titleLayer.display()
//You may need to adjust this for proper display
parentLayer.addSublayer(titleLayer as? CALayer ?? CALayer())
let videoComp = AVMutableVideoComposition()
videoComp.renderSize = videoSize
videoComp.frameDuration = CMTimeMake(1, 30)
videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
let videoTrack = mixComposition.tracks(withMediaType: AVMediaTypeVideo)[0]
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
let assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
//AVAssetExportPresetPassthrough
assetExport?.videoComposition = videoComp
var paths: [Any] = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let documentsDirectory: String = paths[0] as? String ?? ""
let VideoName: String = "\(documentsDirectory)/mynewwatermarkedvideo.mp4"
let exportUrl = URL(fileURLWithPath: VideoName)
if FileManager.default.fileExists(atPath: VideoName) {
print(VideoName)
try? FileManager.default.removeItem(atPath: VideoName)
print("file found again")
}
assetExport?.outputFileType = AVFileTypeQuickTimeMovie
assetExport?.outputURL = exportUrl
assetExport?.shouldOptimizeForNetworkUse = true
//[strRecordedFilename setString: exportPath];
assetExport?.exportAsynchronously(completionHandler: {() -> Void in
DispatchQueue.main.async(execute: {() -> Void in
})
})
print("Completed")
}
///call
waterMark()
こんにちはMwcsMac、ありがとうございます。この方法は、リアルタイムで処理を行っていればうまくいくでしょう。残念ながら、私の知る限りNSTimersはオフラインレンダリングでは動作しません。 –