Skip to content

Instantly share code, notes, and snippets.

@Coder-ACJHP
Created November 5, 2025 14:21
Show Gist options
  • Select an option

  • Save Coder-ACJHP/fd48d3d4215ee33692cc68545e2c4248 to your computer and use it in GitHub Desktop.

Select an option

Save Coder-ACJHP/fd48d3d4215ee33692cc68545e2c4248 to your computer and use it in GitHub Desktop.
Convert UIimage into VImage with applying effects (Choosen image remains in background, selected transparent video become in front and applies effects)
//
// VideoProcessor.swift
// VImage Editor
//
// Created by Onur Işık on 25.02.2022.
// Copyright © 2022 Coder ACJHP. All rights reserved.
//
import UIKit
import AVKit
// Custom Error
enum MediaProcessorError: Error {
// Throw when an invalid password is entered
case metalNotSupported
// Throw when an expected resource is not found
case cannotLoadVideoTracks
// Throw when an expected background image resource is not found
case requiredMeterialsNotExist
// Throw when metal context cannot be created
case cannotCreateMetalDevice
// Throw in all other cases
case unexpected(code: Int)
}
// For each error type return the appropriate description
extension MediaProcessorError: CustomStringConvertible {
public var description: String {
switch self {
case .metalNotSupported:
return "Your device not support MetalKit."
case .cannotCreateMetalDevice:
return "Internal error occurred while preparing video processor."
case .requiredMeterialsNotExist:
return "Canoot load background selected image."
case .cannotLoadVideoTracks:
return "The specified media item video tracks not be found."
case .unexpected(_):
return "An unexpected error occurred."
}
}
}
extension MediaProcessorError: LocalizedError {
public var errorDescription: String? {
switch self {
case .metalNotSupported:
return NSLocalizedString(
"Your device not support MetalKit.",
comment: "MetalKit Resource Not Found"
)
case .cannotCreateMetalDevice:
return NSLocalizedString(
"Internal error occurred while preparing video processor.",
comment: "Video Processor Error"
)
case .cannotLoadVideoTracks:
return NSLocalizedString(
"The specified media item video tracks not be found.",
comment: "Resource Not Found"
)
case .requiredMeterialsNotExist:
return NSLocalizedString(
"The specified media item background image not be found.",
comment: "Resource Not Found"
)
case .unexpected(_):
return NSLocalizedString(
"An unexpected error occurred.",
comment: "Unexpected Error"
)
}
}
}
final class MediaProcessor {
typealias CropTaskCompletion = (Result<URL, Error>) -> Void
typealias ChromaKeyTaskCompletion = (Result<AVMutableVideoComposition, MediaProcessorError>) -> Void
private var context: CIContext! = nil
private var opacityFilter: CIFilter! = nil
private var straightenFilter: CIFilter! = nil
private var blendScreenFilter: CIFilter! = nil
private var lanczosScaleFilter: CIFilter! = nil
private var affineTransformFilter: CIFilter! = nil
private var transverseChromaticAberration: TransverseChromaticAberration! = nil
// AnimationTime Constants
private final let vhsTrackingLinesMaxInputTime = CGFloat(2048)
init() {
if let blendScreen = CIFilter(name: "CIScreenBlendMode"),
let transformFilter = CIFilter(name: "CIAffineTransform"),
let resizeFilter = CIFilter(name:"CILanczosScaleTransform"),
let filterStraighten = CIFilter(name: "CIStraightenFilter"),
let colorMatrixFilter: CIFilter = CIFilter(name: "CIColorMatrix") {
blendScreenFilter = blendScreen
lanczosScaleFilter = resizeFilter
affineTransformFilter = transformFilter
straightenFilter = filterStraighten
opacityFilter = colorMatrixFilter
transverseChromaticAberration = TransverseChromaticAberration()
}
}
final func removeBackgroundColorWithEffectFor(asset: AVAsset,
onBackgroundImage backgroundImage: UIImage,
effectType: SuperMode,
opacityOfVideo opacity: CGFloat = 1.0,
completionHandler: @escaping ChromaKeyTaskCompletion) {
// Load asset tracks asynchronously
let tracksKey = #keyPath(AVAsset.tracks)
asset.loadValuesAsynchronously(forKeys: [tracksKey]) {
DispatchQueue.main.async {
var error: NSError? = nil
switch asset.statusOfValue(forKey: tracksKey, error: &error) {
case .loaded:
// The property successfully loaded. Continue processing.
guard let backgroundImage = autoreleasepool(invoking: { CIImage(image: backgroundImage) }) else {
completionHandler(.failure(.requiredMeterialsNotExist))
return
}
// Streighten filter constant
var zoomRotateFactor: CGFloat = .zero
// Zoom-in & out filter constant
var zoomInOutFactor: CGFloat = 1.0
// Background image size
let baseSize = backgroundImage.extent.size
// Assign prepared background image to blend filter
self.blendScreenFilter.setValue(backgroundImage, forKey: kCIInputBackgroundImageKey)
// Adjust color matrix (opacity) values
let colorMatrix: [CGFloat] = [0, 0, 0, opacity]
let alphaVector: CIVector = CIVector(values: colorMatrix, count: 4)
self.opacityFilter.setValue(alphaVector, forKey: "inputAVector")
let composition = AVMutableVideoComposition(asset: asset, applyingCIFiltersWithHandler: { [weak self] request in
guard let `self` = self else {
request.finish(with: MediaProcessorError.unexpected(code: 0))
return
}
autoreleasepool {
let targetSize = backgroundImage.extent.size
let scale = targetSize.height / request.sourceImage.extent.height
self.lanczosScaleFilter.setValue(request.sourceImage, forKey: kCIInputImageKey)
self.lanczosScaleFilter.setValue(scale, forKey: kCIInputScaleKey)
let scaledImageSize = self.lanczosScaleFilter.outputImage?.extent.size ?? request.sourceImage.extent.size
// STEP 1:- Crop video frame to fit background image
let cropRect = CGRect(
x: (scaledImageSize.width - backgroundImage.extent.width) / 2,
y: (scaledImageSize.height - backgroundImage.extent.height) / 2,
width: backgroundImage.extent.width,
height: backgroundImage.extent.height
)
// Flip x, y coordinates by transforming result image
let imageAtOrigin = self.lanczosScaleFilter.outputImage?.cropped(to: cropRect).transformed(
by: CGAffineTransform(translationX: -cropRect.origin.x, y: -cropRect.origin.y)
)
// STEP 2:- Change alpha value if needed
if opacity < 1.0 {
self.opacityFilter.setValue(imageAtOrigin, forKey: kCIInputImageKey)
self.blendScreenFilter.setValue(self.opacityFilter.outputImage, forKey: kCIInputImageKey)
} else {
self.blendScreenFilter.setValue(imageAtOrigin, forKey: kCIInputImageKey)
}
// STEP 3:- Blend two images
guard let blendedImage = self.blendScreenFilter.outputImage else {
request.finish(with: request.sourceImage, context: nil)
return
}
// STEP 4:- (Optional) if there is effect apply
if effectType != .None {
self.applyFilterIfNeeded(
videoFrame: blendedImage,
zoomRotateFactor: &zoomRotateFactor,
zoomInOutFactor: &zoomInOutFactor,
actualZoomingImageSize: baseSize,
totalDuration: asset.duration.seconds,
request: request,
effectType: effectType
)
} else {
// STEP 4:- Return processed result image
request.finish(with: blendedImage, context: nil)
}
}
})
// STEP 7:- Update composition size
composition.renderSize = baseSize
completionHandler(.success(composition))
case .failed:
// Examine the NSError pointer to determine the failure.
print(error!.localizedDescription)
completionHandler(.failure(.cannotLoadVideoTracks))
default:
// Handle all other cases.
completionHandler(.failure(.unexpected(code: 1)))
}
}
}
}
private final func applyFilterIfNeeded(
videoFrame image: CIImage,
zoomRotateFactor: inout CGFloat,
zoomInOutFactor: inout CGFloat,
actualZoomingImageSize: CGSize,
totalDuration: Double,
request: AVAsynchronousCIImageFilteringRequest, effectType: SuperMode) {
autoreleasepool {
let seconds = CMTimeGetSeconds(request.compositionTime)
let halfOfAnimationDuration = totalDuration / 2
switch effectType {
case .None: break
case .Twrill:
if seconds < halfOfAnimationDuration {
if seconds < halfOfAnimationDuration / 2 {
zoomRotateFactor += 0.001
} else {
zoomRotateFactor -= 0.001
}
} else {
if seconds > halfOfAnimationDuration + (halfOfAnimationDuration / 2) {
zoomRotateFactor += 0.001
} else {
zoomRotateFactor -= 0.001
}
}
self.straightenFilter.setValue(image, forKey: kCIInputImageKey)
self.straightenFilter.setValue(zoomRotateFactor, forKey: kCIInputAngleKey)
guard let resultImage = self.straightenFilter.outputImage else {
request.finish(with: image, context: nil)
return
}
// Return result image
request.finish(with: resultImage, context: nil)
break
case .Chromatic:
self.transverseChromaticAberration.inputImage = image
self.transverseChromaticAberration.inputBlur = CGFloat.random(in: 10...30)
self.transverseChromaticAberration.inputFalloff = CGFloat.random(in: 0.1...0.2)
guard let resultImage = self.transverseChromaticAberration.outputImage else {
request.finish(with: image, context: nil)
return
}
// Return result image
request.finish(with: resultImage, context: nil)
break
case .ZoomInOut:
// Define center point of base image not video frame
let centerPoint = CGPoint(
x: actualZoomingImageSize.width / 2,
y: actualZoomingImageSize.height / 2
)
// Create affine transform and translate it to center point
let affineTransform = CGAffineTransform(translationX: centerPoint.x, y: centerPoint.y)
.scaledBy(x: zoomInOutFactor, y: zoomInOutFactor)
.translatedBy(x: -centerPoint.x, y: -centerPoint.y)
// Convert it to NSValue to pass it as parameter for filter
let inputTransform: NSValue = NSValue(cgAffineTransform: affineTransform)
affineTransformFilter.setValue(image, forKey: kCIInputImageKey)
affineTransformFilter.setValue(inputTransform, forKey: kCIInputTransformKey)
// Update zoom factor
if seconds < halfOfAnimationDuration {
zoomInOutFactor += 0.001
} else {
zoomInOutFactor -= 0.001
}
guard let resultImage = self.affineTransformFilter.outputImage else {
request.finish(with: image, context: nil)
return
}
// Return result image
request.finish(with: resultImage, context: nil)
break
}
}
}
// Helper function uses for resize displaying image
func downsampleUIImage(image: UIImage,
to pointSize: CGSize,
scale: CGFloat = UIScreen.main.scale, completionHandler: @escaping (UIImage?) -> Void) {
DispatchQueue.global(qos: .userInitiated).async {
let originalScale = image.scale
let originalOrientation = image.imageOrientation
guard let imageData = image.pngData() else {
DispatchQueue.main.async {
completionHandler(nil)
}
return
}
// Create an CGImageSource that represent an image
let imageSourceOptions = [kCGImageSourceShouldCache: false] as CFDictionary
guard let imageSource = CGImageSourceCreateWithData(imageData as CFData, imageSourceOptions) else {
DispatchQueue.main.async {
completionHandler(nil)
}
return
}
// Calculate the desired dimension
let maxDimensionInPixels = max(pointSize.width, pointSize.height) * scale
// Perform downsampling
let downsampleOptions = [
kCGImageSourceCreateThumbnailFromImageAlways: true,
kCGImageSourceShouldCacheImmediately: true,
kCGImageSourceCreateThumbnailWithTransform: true,
kCGImageSourceThumbnailMaxPixelSize: maxDimensionInPixels
] as CFDictionary
guard let downsampledImage = CGImageSourceCreateThumbnailAtIndex(imageSource, 0, downsampleOptions) else {
DispatchQueue.main.async {
completionHandler(nil)
}
return
}
DispatchQueue.main.async {
// Return the downsampled image as UIImage
completionHandler(UIImage(cgImage: downsampledImage, scale: originalScale, orientation: originalOrientation))
}
}
}
// Resize image based on max height given as argument
private final func resizeImage(image: UIImage, newHeight: CGFloat) -> UIImage {
let scale = newHeight / image.size.height
let newWidth = image.size.width * scale
let newSize = CGSize(width: newWidth, height: newHeight)
UIGraphicsBeginImageContextWithOptions(newSize, false, UIScreen.main.scale)
image.draw(in: CGRect(origin: .zero, size: newSize))
guard let scaledImage = UIGraphicsGetImageFromCurrentImageContext() else {
return image
}
UIGraphicsEndImageContext()
return scaledImage
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment