-
-
Save Martini024/9d84c9cd230ab171b9fd054035f5c260 to your computer and use it in GitHub Desktop.
| import AVKit | |
| import Foundation | |
| class VideoHelper { | |
| static func getThumbnail(from player: AVPlayer, at time: CMTime, maximumSize: CGSize? = nil) -> CGImage? { | |
| guard let currentItem = player.currentItem else { return nil } | |
| return getThumbnail(from: currentItem.asset, at: time, maximumSize: maximumSize) | |
| } | |
| static func getThumbnail(from asset: AVAsset?, at time: CMTime, maximumSize: CGSize? = nil) -> CGImage? { | |
| do { | |
| guard let asset = asset else { return nil } | |
| let imgGenerator = AVAssetImageGenerator(asset: asset) | |
| imgGenerator.appliesPreferredTrackTransform = true | |
| if let size = maximumSize { | |
| imgGenerator.maximumSize = size | |
| } | |
| let cgImage = try imgGenerator.copyCGImage(at: time, actualTime: nil) | |
| return cgImage | |
| } catch { | |
| return nil | |
| } | |
| } | |
| static func generateThumbnailImages(_ player: AVPlayer, _ containerSize: CGSize) -> [UIImage] { | |
| var images: [UIImage] = [] | |
| guard let currentItem = player.currentItem else { return images } | |
| guard let track = currentItem.asset.tracks(withMediaType: .video).first else { | |
| return images | |
| } | |
| let assetSize = track.naturalSize.applying(track.preferredTransform) | |
| let height = containerSize.height | |
| let aspectRatio = abs(assetSize.width / assetSize.height) | |
| let width = height * aspectRatio | |
| let thumbnailSize = CGSize(width: width, height: height) | |
| let thumbnailCount = Int(ceil(containerSize.width / width)) | |
| let interval = currentItem.asset.duration.seconds / Double(thumbnailCount) | |
| for i in 0..<thumbnailCount { | |
| let time = CMTime(seconds: Double(i) * interval, preferredTimescale: 1000) | |
| guard | |
| let thumbnail = VideoHelper.getThumbnail( | |
| from: currentItem.asset, at: time, maximumSize: thumbnailSize) | |
| else { | |
| return images | |
| } | |
| images.append(UIImage(cgImage: thumbnail)) | |
| } | |
| return images | |
| } | |
| static func getVideoAspectRatio(_ player: AVPlayer) -> CGFloat? { | |
| guard let track = player.currentItem?.asset.tracks(withMediaType: .video).first else { | |
| return nil | |
| } | |
| let assetSize = track.naturalSize.applying(track.preferredTransform) | |
| return assetSize.width / assetSize.height | |
| } | |
| static func getCurrentTime(_ player: AVPlayer) -> CMTime? { | |
| guard let currentItem = player.currentItem else { return nil } | |
| return currentItem.currentTime() | |
| } | |
| static func getDuration(_ player: AVPlayer) -> CMTime? { | |
| guard let currentItem = player.currentItem else { return nil } | |
| return currentItem.asset.duration | |
| } | |
| } |
| import SwiftUI | |
| import AVKit | |
| struct VideoPlayerControls: View { | |
| let player: AVPlayer | |
| @Binding var currentTime: CGFloat | |
| var height: CGFloat = 50 | |
| var actionImage: String = "plus" | |
| @State private var isPlaying: Bool = false | |
| @State private var isTracking: Bool = false | |
| @State private var timeObserver: Any? | |
| var action: (() -> Void)? | |
| var body: some View { | |
| HStack(spacing: 0) { | |
| Button { | |
| isPlaying ? player.pause() : player.play() | |
| isPlaying.toggle() | |
| } label: { | |
| Image(systemName: isPlaying ? "pause.fill" : "play.fill") | |
| .resizable() | |
| .padding() | |
| .frame(width: height, height: height, alignment: .center) | |
| } | |
| .foregroundColor(.white) | |
| .overlay(Rectangle().frame(width: 1, height: nil).foregroundColor(Color.black), alignment: .trailing) | |
| VideoScrollPreview(player: player, isPlaying: $isPlaying, currentTime: $currentTime, isTracking: $isTracking) | |
| .padding(4) | |
| .frame(width: nil, height: height) | |
| if let action = action { | |
| Button { | |
| action() | |
| } label: { | |
| Image(systemName: actionImage) | |
| .resizable() | |
| .padding() | |
| .frame(width: height, height: height, alignment: .center) | |
| } | |
| .foregroundColor(.white) | |
| .overlay(Rectangle().frame(width: 1, height: nil).foregroundColor(Color.black), alignment: .leading) | |
| } | |
| } | |
| .background(Color.darkGray) | |
| .cornerRadius(5) | |
| .onAppear { | |
| startPeriodicTimeObserver() | |
| } | |
| .onDisappear { | |
| stopPeriodicTimeObserver() | |
| } | |
| } | |
| func startPeriodicTimeObserver() { | |
| timeObserver = player.addPeriodicTimeObserver(forInterval: CMTime(seconds: 0.1, preferredTimescale: CMTimeScale(NSEC_PER_SEC)), queue: nil) { time in | |
| guard isTracking == false else { return } | |
| guard let duration = VideoHelper.getDuration(player) else { return } | |
| self.currentTime = CGFloat(CMTimeGetSeconds(time) / CMTimeGetSeconds(duration)) | |
| if self.currentTime == 1.0 { | |
| self.isPlaying = false | |
| } | |
| } | |
| } | |
| func stopPeriodicTimeObserver() { | |
| guard let observer = timeObserver else { return } | |
| player.removeTimeObserver(observer) | |
| } | |
| } |
| import SwiftUI | |
| import AVKit | |
| struct VideoScrollPreview: View { | |
| let player: AVPlayer | |
| @Binding var isPlaying: Bool | |
| @Binding var currentTime: CGFloat | |
| @Binding var isTracking: Bool | |
| @State private var images: [UIImage] = [] | |
| var body: some View { | |
| GeometryReader { geometry in | |
| ZStack { | |
| HStack(spacing: 0) { | |
| ForEach(images, id: \.self) { image in | |
| Image(uiImage: image) | |
| .resizable() | |
| .scaledToFit() | |
| } | |
| } | |
| RoundedRectangle(cornerRadius: 10, style: .continuous) | |
| .frame(width: 4, height: geometry.size.height + 4) | |
| .position(x: currentTime * geometry.size.width, y: geometry.size.height / 2) | |
| .foregroundColor(.white) | |
| .shadow(radius: 10) | |
| } | |
| .gesture( | |
| DragGesture(minimumDistance: 0) | |
| .onChanged({ | |
| isTracking = true | |
| if isPlaying { | |
| player.pause() | |
| } | |
| currentTime = min(geometry.size.width, max(0, $0.location.x)) / geometry.size.width | |
| guard let duration = VideoHelper.getDuration(player) else { return } | |
| let targetTime = CMTimeMultiplyByFloat64(duration, multiplier: Float64(currentTime)) | |
| player.seek(to: targetTime) | |
| }) | |
| .onEnded({ _ in | |
| isTracking = false | |
| if isPlaying { | |
| player.play() | |
| } | |
| }) | |
| ) | |
| .position(x: geometry.frame(in: .local).midX, y: geometry.frame(in: .local).midY) | |
| .onAppear { | |
| images = VideoHelper.generateThumbnailImages(player, geometry.size) | |
| } | |
| } | |
| } | |
| } |
Thank you for this important code but VideoHelper is missing
Oh ya, totally forgot about it, just updated, please check. @arshiacont @inder663
@Martini024 Thanks for this. A quick question. Letโs assume that I have a video called video.mov. How would use this in ContentView?
You probably should have another Higher order component accepting video.mov's URL as a param, creating AVPlayer instance by player = AVPlayer(url: videoUrl) , then pass player to VideoPlayerControls. This is generally how I use the component, and hopefully, can help you a bit.
The only thing I'm not sure about is whether AVPlayer supports loading .mov or not, currently, I only tested .mp4, that one is another research topic, you better take some time to validate it.
@Martini024 Thanks for the prompt reply. Looks like AVPlayer is supporting .mov when using VideoPlayer but have an issue passing currentTime to VideoPlayerControls
VideoPlayerControls(player:player, currentTime:CGFloat(0.0)) throws up an error in
struct ContentView: View {
@State var player = AVPlayer(url: Bundle.main.url(forResource: "Video", withExtension: "mov")!)
var body: some View {
VStack{
VideoPlayerControls(player:player, currentTime:CGFloat(0.0))
.frame(width: .infinity, height: .infinity, alignment: .center)
}
}
}
Okay, I think you probably need some preknowledge about SwiftUI, check on this about how to initialize @Binding https://stackoverflow.com/questions/56685964/swiftui-binding-initialize, in short for your quick solution you should use VideoPlayerControls(player: player, currentTime: .constant(GFloat(0.0))).
Thanks!
THANK YOU SO MUCH!!! You freaking genius
I recommend setting imgGenerator.maximumSize = size using the width/height values in generateThumbnailImages, otherwise it will generate full size images, which was like 60MB each for me ๐
@lordzsolt Thanks for the heads-up! I've implemented imgGenerator.maximumSize using the computed width/height from generateThumbnailImages to avoid full-size thumbnails โ much more memory-friendly now ๐๐
Thanks for sharing! Any chance you could point to an implementation of
VideoHelper.generateThumbnailImages(:)?