web-dev-qa-db-ja.com

ビデオの画像/テキストオーバーレイswift

私はSwiftを使用してビデオの透かし効果の画像オーバーレイを使用しています。これにはAVFoundationを使用していますが、どういうわけか成功しません。

以下はオーバーレイ画像/テキストの私のコードです

    let path = NSBundle.mainBundle().pathForResource("sample_movie", ofType:"mp4")
    let fileURL = NSURL(fileURLWithPath: path!)

    let composition = AVMutableComposition()
    var vidAsset = AVURLAsset(URL: fileURL, options: nil)

    // get video track
    let vtrack =  vidAsset.tracksWithMediaType(AVMediaTypeVideo)
    let videoTrack:AVAssetTrack = vtrack[0] as! AVAssetTrack
    let vid_duration = videoTrack.timeRange.duration
    let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)

    var error: NSError?
    let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
    compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero, error: &error)

    compositionvideoTrack.preferredTransform = videoTrack.preferredTransform

    // Watermark Effect
    let size = videoTrack.naturalSize

    let imglogo = UIImage(named: "image.png")
    let imglayer = CALayer()
    imglayer.contents = imglogo?.CGImage
    imglayer.frame = CGRectMake(5, 5, 100, 100)
    imglayer.opacity = 0.6

    // create text Layer
    let titleLayer = CATextLayer()
    titleLayer.backgroundColor = UIColor.whiteColor().CGColor
    titleLayer.string = "Dummy text"
    titleLayer.font = UIFont(name: "Helvetica", size: 28)
    titleLayer.shadowOpacity = 0.5
    titleLayer.alignmentMode = kCAAlignmentCenter
    titleLayer.frame = CGRectMake(0, 50, size.width, size.height / 6)

    let videolayer = CALayer()
    videolayer.frame = CGRectMake(0, 0, size.width, size.height)

    let parentlayer = CALayer()
    parentlayer.frame = CGRectMake(0, 0, size.width, size.height)
    parentlayer.addSublayer(videolayer)
    parentlayer.addSublayer(imglayer)
    parentlayer.addSublayer(titleLayer)

    let layercomposition = AVMutableVideoComposition()
    layercomposition.frameDuration = CMTimeMake(1, 30)
    layercomposition.renderSize = size
    layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, inLayer: parentlayer)

    // instruction for watermark
    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
    let videotrack = composition.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
    let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
    instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject]
    layercomposition.instructions = NSArray(object: instruction) as [AnyObject]

    //  create new file to receive data
    let dirPaths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
    let docsDir: AnyObject = dirPaths[0]
    let movieFilePath = docsDir.stringByAppendingPathComponent("result.mov")
    let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)       

    // use AVAssetExportSession to export video
    let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
    assetExport.outputFileType = AVFileTypeQuickTimeMovie 
    assetExport.outputURL = movieDestinationUrl
    assetExport.exportAsynchronouslyWithCompletionHandler({
        switch assetExport.status{
        case  AVAssetExportSessionStatus.Failed:
            println("failed \(assetExport.error)")
        case AVAssetExportSessionStatus.Cancelled:
            println("cancelled \(assetExport.error)")
        default:
            println("Movie complete")


            // play video
            NSOperationQueue.mainQueue().addOperationWithBlock({ () -> Void in
                self.playVideo(movieDestinationUrl!)
            })
        }
    })    

このコードでは、オーバーレイを実現できません...何が間違っているのかわかりません...

質問:

  • このコードに欠けているものはありますか?または、このコードに問題がありますか?
  • このコードは、録画されたビデオまたはギャラリーからのビデオを含むすべてのビデオでのみ機能しますか?
13
EI Captain v2.0

@ElCaptainによって提供されたコードは機能します。足りないだけです:

    assetExport.videoComposition = layercomposition

AVAssetExportSessionのインスタンス化の直後にこれを追加できます

注:最初に提供されたコードは、ビデオトラックのみをエクスポートし、オーディオトラックはエクスポートしません。オーディオトラックが必要な場合は、compositionvideoTrackを構成した後に次のようなものを追加できます。

let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
    for audioTrack in audioTracks {
        try! compositionAudioTrack.insertTimeRange(audioTrack.timeRange, ofTrack: audioTrack, atTime: kCMTimeZero)
    }
11
Rey Hernandez

Swift 4:で機能するアップデートは次のとおりです。

import UIKit
import AVFoundation
import AVKit
import Photos

class ViewController: UIViewController {

var myurl: URL?

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.

}

@IBAction func saveVideoTapper(_ sender: Any) {

    let path = Bundle.main.path(forResource: "sample_video", ofType:"mp4")
    let fileURL = NSURL(fileURLWithPath: path!)

    let composition = AVMutableComposition()
    let vidAsset = AVURLAsset(url: fileURL as URL, options: nil)

    // get video track
    let vtrack =  vidAsset.tracks(withMediaType: AVMediaType.video)
    let videoTrack: AVAssetTrack = vtrack[0]
    let vid_timerange = CMTimeRangeMake(start: CMTime.zero, duration: vidAsset.duration)

    let tr: CMTimeRange = CMTimeRange(start: CMTime.zero, duration: CMTime(seconds: 10.0, preferredTimescale: 600))
    composition.insertEmptyTimeRange(tr)

    let trackID:CMPersistentTrackID = CMPersistentTrackID(kCMPersistentTrackID_Invalid)

    if let compositionvideoTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: trackID) {

        do {
            try compositionvideoTrack.insertTimeRange(vid_timerange, of: videoTrack, at: CMTime.zero)
        } catch {
            print("error")
        }

        compositionvideoTrack.preferredTransform = videoTrack.preferredTransform

    } else {
        print("unable to add video track")
        return
    }


    // Watermark Effect
    let size = videoTrack.naturalSize

    let imglogo = UIImage(named: "image.png")
    let imglayer = CALayer()
    imglayer.contents = imglogo?.cgImage
    imglayer.frame = CGRect(x: 5, y: 5, width: 100, height: 100)
    imglayer.opacity = 0.6

    // create text Layer
    let titleLayer = CATextLayer()
    titleLayer.backgroundColor = UIColor.white.cgColor
    titleLayer.string = "Dummy text"
    titleLayer.font = UIFont(name: "Helvetica", size: 28)
    titleLayer.shadowOpacity = 0.5
    titleLayer.alignmentMode = CATextLayerAlignmentMode.center
    titleLayer.frame = CGRect(x: 0, y: 50, width: size.width, height: size.height / 6)


    let videolayer = CALayer()
    videolayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)

    let parentlayer = CALayer()
    parentlayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
    parentlayer.addSublayer(videolayer)
    parentlayer.addSublayer(imglayer)
    parentlayer.addSublayer(titleLayer)

    let layercomposition = AVMutableVideoComposition()
    layercomposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
    layercomposition.renderSize = size
    layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)

    // instruction for watermark
    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: composition.duration)
    let videotrack = composition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
    let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
    instruction.layerInstructions = NSArray(object: layerinstruction) as [AnyObject] as! [AVVideoCompositionLayerInstruction]
    layercomposition.instructions = NSArray(object: instruction) as [AnyObject] as! [AVVideoCompositionInstructionProtocol]

    //  create new file to receive data
    let dirPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
    let docsDir = dirPaths[0] as NSString
    let movieFilePath = docsDir.appendingPathComponent("result.mov")
    let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)

    // use AVAssetExportSession to export video
    let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
    assetExport?.outputFileType = AVFileType.mov
    assetExport?.videoComposition = layercomposition

    // Check exist and remove old file
    FileManager.default.removeItemIfExisted(movieDestinationUrl as URL)

    assetExport?.outputURL = movieDestinationUrl as URL
    assetExport?.exportAsynchronously(completionHandler: {
        switch assetExport!.status {
        case AVAssetExportSession.Status.failed:
            print("failed")
            print(assetExport?.error ?? "unknown error")
        case AVAssetExportSession.Status.cancelled:
            print("cancelled")
            print(assetExport?.error ?? "unknown error")
        default:
            print("Movie complete")

            self.myurl = movieDestinationUrl as URL

            PHPhotoLibrary.shared().performChanges({
                PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: movieDestinationUrl as URL)
            }) { saved, error in
                if saved {
                    print("Saved")
                }
            }

            self.playVideo()

        }
    })

}


func playVideo() {
    let player = AVPlayer(url: myurl!)
    let playerLayer = AVPlayerLayer(player: player)
    playerLayer.frame = self.view.bounds
    self.view.layer.addSublayer(playerLayer)
    player.play()
    print("playing...")
}



}


extension FileManager {
func removeItemIfExisted(_ url:URL) -> Void {
    if FileManager.default.fileExists(atPath: url.path) {
        do {
            try FileManager.default.removeItem(atPath: url.path)
        }
        catch {
            print("Failed to delete file")
        }
    }
}
}
0
brontea

@Rey Hernandezこれは私を大いに助けてくれました!オーディオアセットをビデオに追加する方法についてさらに明確にしたい場合は、ここにそれらを組み合わせるコードがあります

    let vtrack =  vidAsset.tracksWithMediaType(AVMediaTypeVideo)
    let videoTrack:AVAssetTrack = vtrack[0] 
    let vid_duration = videoTrack.timeRange.duration
    let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)

    let atrack =  vidAsset.tracksWithMediaType(AVMediaTypeAudio)
    let audioTrack:AVAssetTrack = atrack[0]
    let audio_duration = audioTrack.timeRange.duration
    let audio_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)

    do {
        let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())

        try compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero)

        compositionvideoTrack.preferredTransform = videoTrack.preferredTransform



        let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
        try! compositionAudioTrack.insertTimeRange(audio_timerange, ofTrack: audioTrack, atTime: kCMTimeZero)

        compositionvideoTrack.preferredTransform = audioTrack.preferredTransform

    } catch {
        print(error)
    }
0
spe

私にとって(私があなたのコードで見ているもの)、あなたはparentlayerを画面に追加していません。

CALayer()を作成して、videolayerimglayer、およびtitleLayerを新しいレイヤーに追加しますが、この最後のレイヤーは画面に追加しません。

yourView.layer.addSublayer(parentlayer)

これがお役に立てば幸いです

補足として、回転、縮尺、フォントをコピーして提供される配列UITextViewsに基づいてCATextLayersを作成する関数を次に示します。これらをAVVideoCompositionCoreAnimationToolに提供されるコンテナレイヤーに追加するだけです。

private static func createTextLayer(totalSize: CGSize,
                                        textView: UITextView) -> CATextLayer {
        let textLayer: CACenteredTextLayer = CACenteredTextLayer()
        textLayer.backgroundColor = UIColor.clear
        textLayer.foregroundColor = textView.textColor?.cgColor
        textLayer.masksToBounds = false
        textLayer.isWrapped = true

        let scale: CGFloat = UIScreen.main.scale

        if let font: UIFont = textView.font {
            let upscaledFont: UIFont = font.withSize(font.pointSize * scale)
            let attributedString = NSAttributedString(
                string: textView.text,
                attributes: [NSAttributedString.Key.font: upscaledFont,
                             NSAttributedString.Key.foregroundColor: textView.textColor ?? UIColor.white])
            textLayer.string = attributedString
        }

        // Set text alignment
        let alignment: CATextLayerAlignmentMode
        switch textView.textAlignment {
        case NSTextAlignment.left:
            alignment = CATextLayerAlignmentMode.left
        case NSTextAlignment.center:
            alignment = CATextLayerAlignmentMode.center
        default:
            alignment = CATextLayerAlignmentMode.right
        }
        textLayer.alignmentMode = alignment

        let originalFrame: CGRect = textView.frame

        // Also take scale into consideration
        let targetSize: CGSize = CGSize(width: originalFrame.width * scale,
                                        height: originalFrame.height * scale)

        // The CALayer positioning is inverted on the Y-axes, so apply this
        let Origin: CGPoint = CGPoint(x: originalFrame.Origin.x * scale,
                                      y: (totalSize.height - (originalFrame.Origin.y * scale)) - targetSize.height)

        textLayer.frame = CGRect(x: Origin.x,
                                 y: Origin.y,
                                 width: targetSize.width,
                                 height: targetSize.height)

        // Determine the scale
        textLayer.anchorPoint = CGPoint(x: 0.5,
                                        y: 0.5)

        var newTransform: CATransform3D = CATransform3DMakeScale(textView.transform.xScale,
                                                                 textView.transform.yScale,
                                                                 0)

        // Convert to degrees, invert the amount and convert back to radians to apply
        newTransform = CATransform3DRotate(newTransform,
                                           textView.transform.radiansFor3DTransform,
                                           0,
                                           0,
                                           1)
        textLayer.transform = newTransform

        return textLayer
}

これをCATextLayerのこのサブクラス化と組み合わせて、テキストを垂直方向に中央揃えにします。

final class CACenteredTextLayer: CATextLayer {
    override func draw(in ctx: CGContext) {
        guard let attributedString = string as? NSAttributedString else { return }

        let height = self.bounds.size.height
        let boundingRect: CGRect = attributedString.boundingRect(
            with: CGSize(width: bounds.width,
                         height: CGFloat.greatestFiniteMagnitude),
            options: NSStringDrawingOptions.usesLineFragmentOrigin,
            context: nil)
        let yDiff: CGFloat = (height - boundingRect.size.height) / 2

        ctx.saveGState()
        ctx.translateBy(x: 0.0, y: yDiff)
        super.draw(in: ctx)
        ctx.restoreGState()
    }
}

private extension CGAffineTransform {
    var xScale: CGFloat {
        return sqrt((a*a) + (c*c))
    }

    var yScale: CGFloat {
        return sqrt((b*b) + (d*d))
    }

    var radiansFor3DTransform: CGFloat {
        let radians: CGFloat = atan2(b, a);
        let degrees: CGFloat = -(radians * 180 / CGFloat.pi)
        let convertedRadians: CGFloat = CGFloat(degrees * (CGFloat.pi / 180))
        return convertedRadians
    }
}
0
Yasper