Skip to content

Instantly share code, notes, and snippets.

@MTattin
Created November 9, 2016 20:27
Show Gist options
  • Save MTattin/05861891680f0789e78792086c496fbe to your computer and use it in GitHub Desktop.
Save MTattin/05861891680f0789e78792086c496fbe to your computer and use it in GitHub Desktop.
Swift3 動画をカメラロールに保存 ref: http://qiita.com/MTattin/items/ebb321adabe5282f0ebf
import UIKit
import AVFoundation
import Photos
///
/// アルバム保存コントローラーサンプル
///
class SaveAlbumSample: NSObject {
// MARK: ---------------------------------------- static
///
/// 出力ファイルパス(文字列)
///
static let ExportFileStr: String = NSHomeDirectory() + "/Documents/sample.mov"
// MARK: ---------------------------------------- func
///
/// 固定出力サンプル
///
static func SaveStartSample2(a: AVURLAsset, b: AVURLAsset) {
do {
///
/// コンポジションを作成する
///
let mixCmp: AVMutableComposition = AVMutableComposition()
///
/// ビデオトラック追加A
///
/// kCMPersistentTrackID_InvalidでトラックIDを自動設定
///
let cmpTrcA: AVMutableCompositionTrack = mixCmp.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
try cmpTrcA.insertTimeRange(CMTimeRangeMake(kCMTimeZero, a.duration), of: a.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
///
/// ビデオトラック追加B
///
/// kCMPersistentTrackID_InvalidでトラックIDを自動設定
///
let cmpTrcB: AVMutableCompositionTrack = mixCmp.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
try cmpTrcB.insertTimeRange(CMTimeRangeMake(kCMTimeZero, b.duration), of: b.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
///
/// ビデオコンポジション命令作成
///
/// You should add the length equal to the length of the longer asset in terms of duration.
///
let mainIns: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction.init()
mainIns.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMaximum(a.duration, b.duration))
mainIns.backgroundColor = UIColor.darkGray.cgColor
///
/// 加工用レイヤー作成A
///
let lyrInsA: AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction.init(assetTrack: cmpTrcA)
let scaleA: CGAffineTransform = CGAffineTransform.init(scaleX: 1.5, y: 1.5)
let moveA: CGAffineTransform = CGAffineTransform.init(translationX: 100, y: 100)
lyrInsA.setOpacity(0.5, at: kCMTimeZero)
lyrInsA.setTransform(scaleA.concatenating(moveA), at: kCMTimeZero)
///
/// 加工用レイヤー作成B
///
let lyrInsB: AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction.init(assetTrack: cmpTrcB)
let scaleB: CGAffineTransform = CGAffineTransform.init(scaleX: 0.8, y: 0.8)
lyrInsB.setOpacity(0.5, at: kCMTimeZero)
lyrInsB.setTransform(scaleB, at: kCMTimeZero)
///
/// テキストレイヤー作成
///
let lyrText: CATextLayer = CATextLayer()
lyrText.string = "TextLayer"
lyrText.font = UIFont.systemFont(ofSize: 300) as CFTypeRef!
lyrText.foregroundColor = UIColor.yellow.cgColor
lyrText.opacity = 0.5
lyrText.shadowOpacity = 1
lyrText.alignmentMode = kCAAlignmentCenter
lyrText.frame = CGRect.init(x: 0, y: 0, width: UIScreen.main.bounds.size.width * 2, height: UIScreen.main.bounds.size.height * 2 * 0.5)
///
/// 親レイヤーとビデオレイヤー作成
///
let lyrParent: CALayer = CALayer()
lyrParent.backgroundColor = UIColor.clear.cgColor
lyrParent.frame = CGRect.Make(0, 0, UIScreen.main.bounds.size.width * 2, UIScreen.main.bounds.size.height * 2)
let lyrVideo: CALayer = CALayer()
lyrVideo.frame = CGRect.Make(0, 0, UIScreen.main.bounds.size.width * 2, UIScreen.main.bounds.size.height * 2)
lyrParent.addSublayer(lyrVideo)
lyrParent.addSublayer(lyrText)
///
/// 命令にセット
///
mainIns.layerInstructions = [ lyrInsA, lyrInsB ]
///
/// ビデオコンポジション作成
///
let mainCmp: AVMutableVideoComposition = AVMutableVideoComposition.init()
mainCmp.instructions = [ mainIns ]
mainCmp.frameDuration = CMTimeMake(1, 30)
mainCmp.renderSize = CGSize.init(width: UIScreen.main.bounds.size.width * 2, height: UIScreen.main.bounds.size.height * 2)
mainCmp.animationTool = AVVideoCompositionCoreAnimationTool.init(postProcessingAsVideoLayer: lyrVideo, in: lyrParent)
///
/// 出力ファイル設定
///
///
/// ファイルが存在している場合は削除
///
if FileManager.default.fileExists(atPath: SaveAlbumSample.ExportFileStr) == true {
try FileManager.default.removeItem(atPath: SaveAlbumSample.ExportFileStr)
}
///
/// エクスポートセッションを作成してカメラロールへ保存
///
/// ```
/// AVF_EXPORT NSString *const AVAssetExportPresetLowQuality NS_AVAILABLE(10_11, 4_0)
/// AVF_EXPORT NSString *const AVAssetExportPresetMediumQuality NS_AVAILABLE(10_11, 4_0)
/// AVF_EXPORT NSString *const AVAssetExportPresetHighestQuality NS_AVAILABLE(10_11, 4_0)
/// AVF_EXPORT NSString *const AVAssetExportPreset640x480 NS_AVAILABLE(10_7, 4_0)
/// AVF_EXPORT NSString *const AVAssetExportPreset960x540 NS_AVAILABLE(10_7, 4_0)
/// AVF_EXPORT NSString *const AVAssetExportPreset1280x720 NS_AVAILABLE(10_7, 4_0)
/// AVF_EXPORT NSString *const AVAssetExportPreset1920x1080 NS_AVAILABLE(10_7, 5_0)
/// AVF_EXPORT NSString *const AVAssetExportPreset3840x2160 NS_AVAILABLE(10_10, 9_0)
/// ```
///
let exporter: AVAssetExportSession = AVAssetExportSession(asset: mixCmp, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputURL = URL.init(fileURLWithPath: SaveAlbumSample.ExportFileStr)
exporter.videoComposition = mainCmp
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.exportAsynchronously(completionHandler: {
///
/// メインスレッドで処理
///
DispatchQueue.main.async(execute: {
if exporter.status == AVAssetExportSessionStatus.completed {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exporter.outputURL!)
}, completionHandler: { (success, err) in
if success == true {
print("保存成功!")
} else {
print("保存失敗! \(err) \(err?.localizedDescription)")
}
})
} else {
print("失敗 \(exporter.status) \(exporter.error)")
}
})
})
} catch let e as NSError {
print("例外発生 \(e) \(e.localizedDescription)")
return
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment