Skip to content

Instantly share code, notes, and snippets.

@kboy-silvergym
Created September 13, 2018 07:22
Show Gist options
  • Save kboy-silvergym/e0d20b762a0471b1c983a903a2d3a2ad to your computer and use it in GitHub Desktop.
Save kboy-silvergym/e0d20b762a0471b1c983a903a2d3a2ad to your computer and use it in GitHub Desktop.
save video with muting and attaching logo images.
import AVFoundation
import Hydra
import Photos
struct VideoUtils {
// 動画の保存名
private static let logoVideoName = "logo.mp4"
private static let muteVideoName = "mute.mp4"
// 加工タイプ
enum ProcessingType {
case mute
case atatchLogo
}
// ロゴを動画の右下につける
// 参考: https://qiita.com/masapp/items/dd0589d2a894a2a32b82
// 音声OFFの場合もある
static func createLogoPrintedVideo(path: String, isMuted: Bool) -> Promise<String> {
let name = logoVideoName
var types: [ProcessingType] = [.atatchLogo]
if isMuted {
types.append(.mute)
}
return processMovie(path: path, types: types, exportVideoName: name)
}
// 音声を消した動画のpathを取得する
// 音声OFFで動画POSTする時用
static func createMutedVideo(path: String) -> Promise<String> {
let name = muteVideoName
return processMovie(path: path, types: [.mute], exportVideoName: name)
}
// 動画を加工するメソッド
// https://qiita.com/tastas/items/225eb3c6aa864d950f65 を編集
private static func processMovie(path: String, types: [ProcessingType], exportVideoName: String) -> Promise<String> {
return Promise<String>(in: .background, { resolve, reject, _ in
let baseMovieURL = URL(fileURLWithPath: path)
let videoAsset = AVURLAsset(url: baseMovieURL, options: nil)
let mixComposition: AVMutableComposition = AVMutableComposition()
// MARK: - 動画コンポジション ---------------
guard let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first else {
reject(ComposeVideoError.videoTrackIsNil)
return
}
// ベースとなる動画のコンポジション作成
let compositionVideoTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
// 動画の長さ設定
try! compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoTrack, at: kCMTimeZero)
// 回転方向の設定
compositionVideoTrack.preferredTransform = videoAsset.tracks(withMediaType: AVMediaType.video)[0].preferredTransform
// MARK: - 音声コンポジション ----------------------
if !types.contains(.mute),
let audioTrack = videoAsset.tracks(withMediaType: AVMediaType.audio).first {
// ベースとなる音声のコンポジション作成
let compositionAudioTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
// 音声の長さ設定
try! compositionAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: audioTrack, at: kCMTimeZero)
}
// ----------------------------------------------
// 動画のサイズを取得
let videoSize: CGSize = videoTrack.naturalSize
// 合成用コンポジション作成
let videoComp: AVMutableVideoComposition = AVMutableVideoComposition()
videoComp.renderSize = videoSize
videoComp.frameDuration = CMTimeMake(1, 30)
if types.contains(.atatchLogo) {
// ロゴのCALayerの作成
let logoImage = #imageLiteral(resourceName: "video_logomark")
let logoLayer = CALayer()
logoLayer.contents = logoImage.cgImage
logoLayer.contentsGravity = kCAGravityResizeAspect
logoLayer.frame = CGRect(x: videoSize.width * 7 / 12,
y: videoSize.width/48,
width: videoSize.width/3,
height: videoSize.width/6)
// 親レイヤーを作成
let parentLayer = CALayer()
let videoLayer = CALayer()
parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(logoLayer)
videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)
}
// インストラクションを合成用コンポジションに設定
let instruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
let layerInstruction: AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction.init(assetTrack: compositionVideoTrack)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
// 動画のコンポジションをベースにAVAssetExportを生成
let _assetExport = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
// 合成用コンポジションを設定
_assetExport?.videoComposition = videoComp
// エクスポートファイルの設定
let videoName = exportVideoName
let exportPath: String = NSTemporaryDirectory() + "/" + videoName
let exportUrl: URL = URL(fileURLWithPath: exportPath)
_assetExport?.outputFileType = AVFileType.mp4
_assetExport?.outputURL = exportUrl
_assetExport?.shouldOptimizeForNetworkUse = true
// ファイルが存在している場合は削除
if FileManager.default.fileExists(atPath: exportPath) {
try? FileManager.default.removeItem(atPath: exportPath)
}
_assetExport?.exportAsynchronously(completionHandler: {
if let error = _assetExport?.error {
reject(error)
} else {
resolve(exportPath)
}
})
})
}
static func checkAuthorizationAndSaveVideo(path: String) -> Promise<URL> {
return checkAuthorization().then { granted in
if granted {
return saveVideo(path: path)
} else {
return Promise<URL>.init(rejected: ClientError.cameraRollAccess)
}
}
}
/// カメラロールへのアクセスをチェック
private static func checkAuthorization() -> Promise<Bool> {
return Promise<Bool>(in: .background, { resolve, reject, _ in
let status = PHPhotoLibrary.authorizationStatus()
if status == .authorized {
resolve(true)
} else {
PHPhotoLibrary.requestAuthorization({ (newStatus) in
resolve(newStatus == .authorized)
})
}
})
}
/// 動画をアルバムに保存する
private static func saveVideo(path: String) -> Promise<URL> {
return Promise<URL>(in: .background, { resolve, reject, _ in
let url = URL(fileURLWithPath: path)
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: url)
}, completionHandler: { success, error in
if success {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: true)]
// After uploading we fetch the PHAsset for most recent video
// and then get its current location url
let fetchResult = PHAsset.fetchAssets(with: .video, options: fetchOptions).lastObject
PHImageManager().requestAVAsset(forVideo: fetchResult!, options: nil, resultHandler: { (avurlAsset, audioMix, dict) in
let obj = avurlAsset as! AVURLAsset
resolve(obj.url)
})
} else {
reject(error ?? ClientError.unknown)
}
})
})
}
enum SaveVideoError: Error {
case faild
}
static func saveRemoteVideo(url: URL) -> Promise<URL> {
return Promise<URL>(in: .background, { resolve, reject, _ in
guard let path = getSaveVideoPath() else {
reject(ClientError.unknown)
return
}
// 前の消す
if FileManager.default.fileExists(atPath: path) {
try? FileManager.default.removeItem(atPath: path)
}
let request = URLRequest(url: url)
let task = URLSession.shared.dataTask(with: request) { (data, response, error) in
guard let data = data else {
reject(ClientError.unknown)
return
}
if data.count == 0 {
reject(ClientError.unknown)
} else {
//アプリ内に保存
do {
let saveURL = URL(fileURLWithPath: path)
try data.write(to: saveURL)
resolve(saveURL)
} catch {
reject(error)
}
}
}
task.resume()
})
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment