Created
June 5, 2013 11:14
-
-
Save tuler/5713172 to your computer and use it in GitHub Desktop.
Script to generate video thumbnails using xuggler
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
@GrabResolver(name='xuggle', root='http://xuggle.googlecode.com/svn/trunk/repo/share/java/') | |
@Grab( 'xuggle:xuggle-xuggler:5.2' ) | |
import javax.imageio.ImageIO; | |
import java.io.File; | |
import java.awt.image.BufferedImage; | |
import com.xuggle.xuggler.Global; | |
import com.xuggle.xuggler.IContainer; | |
import com.xuggle.xuggler.IPacket; | |
import com.xuggle.xuggler.IPixelFormat; | |
import com.xuggle.xuggler.IStream; | |
import com.xuggle.xuggler.IStreamCoder; | |
import com.xuggle.xuggler.ICodec; | |
import com.xuggle.xuggler.IVideoPicture; | |
import com.xuggle.xuggler.IVideoResampler; | |
import com.xuggle.xuggler.Utils; | |
import com.xuggle.xuggler.io.IURLProtocolHandler | |
/** | |
* Takes a media container, finds the first video stream, decodes that | |
* stream, and then writes video frames at some interval based on the | |
* video presentation time stamps. | |
* | |
* @author trebor | |
*/ | |
public class DecodeAndCaptureFrames { | |
/** The number of seconds between frames. */ | |
public static final double SECONDS_BETWEEN_FRAMES = 5; | |
/** The number of nano-seconds between frames. */ | |
public static final long NANO_SECONDS_BETWEEN_FRAMES = (long)(Global.DEFAULT_PTS_PER_SECOND * SECONDS_BETWEEN_FRAMES); | |
/** Time of last frame write. */ | |
private static long mLastPtsWrite = Global.NO_PTS; | |
/** Write the video frame out to a PNG file every once and a while. | |
* The files are written out to the system's temporary directory. | |
* | |
* @param picture the video frame which contains the time stamp. | |
* @param image the buffered image to write out | |
*/ | |
private static void processFrame(IVideoPicture picture, BufferedImage image) { | |
try { | |
// if uninitialized, backdate mLastPtsWrite so we get the very first frame | |
// if (mLastPtsWrite == Global.NO_PTS) { | |
// mLastPtsWrite = picture.getPts() - NANO_SECONDS_BETWEEN_FRAMES; | |
// } | |
// if it's time to write the next frame | |
// if (picture.getPts() - mLastPtsWrite >= NANO_SECONDS_BETWEEN_FRAMES) { | |
// Make a temorary file name | |
File file = File.createTempFile("frame", ".png"); | |
// write out PNG | |
ImageIO.write(image, "png", file); | |
// indicate file written | |
double seconds = ((double)picture.getPts()) / Global.DEFAULT_PTS_PER_SECOND; | |
System.out.printf("at elapsed time of %6.3f seconds wrote: %s\n", seconds, file); | |
// update last write time | |
// mLastPtsWrite += NANO_SECONDS_BETWEEN_FRAMES; | |
// } | |
} catch (Exception e) { | |
e.printStackTrace(); | |
} | |
} | |
/** | |
* Takes a media container (file) as the first argument, opens it, | |
* reads through the file and captures video frames periodically as | |
* specified by SECONDS_BETWEEN_FRAMES. The frames are written as PNG | |
* files into the system's temporary directory. | |
* | |
* @param args must contain one string which represents a filename | |
*/ | |
public static void main(String[] args) { | |
if (args.length <= 0) { | |
throw new IllegalArgumentException("must pass in a filename as the first argument"); | |
} | |
String filename = args[0]; | |
int numberOfFrames = 2 | |
int frames = 0 | |
// make sure that we can actually convert video pixel formats | |
if (!IVideoResampler.isSupported(IVideoResampler.Feature.FEATURE_COLORSPACECONVERSION)) { | |
throw new RuntimeException("you must install the GPL version of Xuggler (with IVideoResampler support) for this demo to work"); | |
} | |
// create a Xuggler container object | |
IContainer container = IContainer.make(); | |
// open up the container | |
if (container.open(filename, IContainer.Type.READ, null) < 0) { | |
throw new IllegalArgumentException("could not open file: " + filename); | |
} | |
// define duration and seek interval of stream (in stream.timeBase) | |
long duration | |
long seekInterval | |
// query how many streams the call to open found | |
int numStreams = container.getNumStreams(); | |
// and iterate through the streams to find the first video stream | |
int videoStreamId = -1; | |
IStreamCoder videoCoder = null; | |
for (int i = 0; i < numStreams; i++) { | |
// find the stream object | |
IStream stream = container.getStream(i); | |
// get the pre-configured decoder that can decode this stream; | |
IStreamCoder coder = stream.getStreamCoder(); | |
if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO) { | |
videoStreamId = i; | |
videoCoder = coder; | |
// keep duration of stream | |
duration = stream.duration; | |
if (duration == Global.NO_PTS) { | |
throw new RuntimeException("duration of stream is unknown") | |
} | |
// calculate seek interval based on duration and numberOfFrames | |
seekInterval = Math.floor(duration / numberOfFrames); | |
println "${duration} / ${numberOfFrames} = ${seekInterval}" | |
println "${duration * stream.timeBase.value} s / ${numberOfFrames} = ${seekInterval * stream.timeBase.value} s" | |
println "${duration * stream.timeBase.value / 60} min / ${numberOfFrames} = ${seekInterval * stream.timeBase.value / 60} min" | |
break; | |
} | |
} | |
if (videoStreamId == -1) { | |
throw new RuntimeException("could not find video stream in container: " + filename); | |
} | |
// Now we have found the video stream in this file. Let's open up | |
// our decoder so it can do work | |
if (videoCoder.open() < 0) { | |
throw new RuntimeException("could not open video decoder for container: " + filename); | |
} | |
IVideoResampler resampler = null; | |
if (videoCoder.getPixelType() != IPixelFormat.Type.BGR24) { | |
// if this stream is not in BGR24, we're going to need to | |
// convert it. The VideoResampler does that for us. | |
resampler = IVideoResampler.make( | |
videoCoder.getWidth(), videoCoder.getHeight(), IPixelFormat.Type.BGR24, | |
videoCoder.getWidth(), videoCoder.getHeight(), videoCoder.getPixelType()); | |
if (resampler == null) { | |
throw new RuntimeException("could not create color space resampler for: " + filename); | |
} | |
} | |
// Now, we start walking through the container | |
IPacket packet = IPacket.make(); | |
while (container.readNextPacket(packet) >= 0 && frames < numberOfFrames) { | |
// Now we have a packet, let's see if it belongs to our video stream | |
if (packet.getStreamIndex() == videoStreamId) { | |
// We allocate a new picture to get the data out of Xuggle | |
IVideoPicture picture = IVideoPicture.make(videoCoder.getPixelType(), videoCoder.getWidth(), videoCoder.getHeight()); | |
int offset = 0; | |
while(offset < packet.getSize()) { | |
// Now, we decode the video, checking for any errors. | |
int bytesDecoded = videoCoder.decodeVideo(picture, packet, offset); | |
if (bytesDecoded < 0) { | |
throw new RuntimeException("got error decoding video in: " + filename); | |
} | |
offset += bytesDecoded; | |
// Some decoders will consume data in a packet, but will not | |
// be able to construct a full video picture yet. Therefore | |
// you should always check if you got a complete picture from | |
// the decode. | |
if (picture.isComplete()) { | |
IVideoPicture newPic = picture; | |
// If the resampler is not null, it means we didn't get the | |
// video in BGR24 format and need to convert it into BGR24 | |
// format. | |
if (resampler != null) { | |
// we must resample | |
newPic = IVideoPicture.make(resampler.getOutputPixelFormat(), picture.getWidth(), picture.getHeight()); | |
if (resampler.resample(newPic, picture) < 0) { | |
throw new RuntimeException("could not resample video from: " + filename); | |
} | |
} | |
if (newPic.getPixelType() != IPixelFormat.Type.BGR24) { | |
throw new RuntimeException("could not decode video as BGR 24 bit data in: " + filename); | |
} | |
// convert the BGR24 to an Java buffered image | |
BufferedImage javaImage = Utils.videoPictureToImage(newPic); | |
// process the video frame | |
processFrame(newPic, javaImage); | |
// increment captured frames | |
frames++ | |
// seek | |
println "seeking from ${packet.pts} forward by ${seekInterval}" | |
def pos = packet.pts | |
container.seekKeyFrame(videoStreamId, -1, 0); | |
if (container.seekKeyFrame(videoStreamId, pos + seekInterval, IContainer.SEEK_FLAG_ANY) < 0) { | |
throw new RuntimeException("could not seek video") | |
} | |
// if (container.seekKeyFrame(videoStreamId, picture.pts + seekInterval, picture.pts + seekInterval, picture.pts + seekInterval, 0) < 0) { | |
// throw new RuntimeException("could not seek video") | |
// } | |
} | |
} | |
} else { | |
// This packet isn't part of our video stream, so we just silently drop it. | |
} | |
} | |
// Technically since we're exiting anyway, these will be cleaned up | |
// by the garbage collector... but because we're nice people and | |
// want to be invited places for Christmas, we're going to show how | |
// to clean up. | |
if (videoCoder != null) { | |
videoCoder.close(); | |
videoCoder = null; | |
} | |
if (container !=null) { | |
container.close(); | |
container = null; | |
} | |
} | |
} | |
DecodeAndCaptureFrames.main(this.args) |
Yes Please Share Jar with us we have urgently required.
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Hi, I was not able to find dependency jar for source code. Can you share the maven url for the dependency?