Created
July 17, 2010 21:55
-
-
Save eagsalazar/479877 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
using Clutter; | |
using Gst; | |
using ClutterGst; | |
class AwesomeVideoActor { | |
private Clutter.Stage stage; | |
private Gst.Pipeline pipeline; | |
private Gst.Bus bus; | |
private Gst.Element src; | |
private Gst.Element decode; | |
private Gst.Element colorspace; | |
private Clutter.Texture videoTexture; | |
private ClutterGst.VideoSink sink; | |
// Input args | |
private string videoFile; | |
private double scale; | |
private double loopTime; | |
// We don't care to handle bus messages. Maybe just | |
// print them out to understand. | |
private void busHandler(Gst.Message message) { | |
if(message.type == MessageType.ERROR) | |
stdout.printf("WTF message ERROR\n"); | |
} | |
private void onDynamicLoad(Gst.Pad pad) { | |
stdout.printf("!!Connecting pad!!\n"); | |
pad.link(colorspace.get_pad("sink")); | |
} | |
public AwesomeVideoActor(string _videoFile, Clutter.Stage _stage, double _scale, double _loopTime) { | |
videoFile = _videoFile; | |
stage = _stage; | |
scale = _scale; | |
loopTime = _loopTime; | |
// Clutter.Texture is an actor, for drawing images on. We are going | |
// to draw video frames on this texture. | |
videoTexture = new Clutter.Texture(); | |
// Top level bin (collection of gst elements) | |
pipeline = new Gst.Pipeline("shitPipe"); | |
// Get a reference to Gst.Bus where all gst threads talk so we can listen for | |
// notifications in other gst threads without doing too much work. | |
bus = pipeline.get_bus(); | |
// Enables bus to emit "message" signal | |
bus.add_signal_watch(); | |
// Connect to "message" signal on bus. Handle with busHandler. | |
bus.message.connect(busHandler); | |
// Create first stage in pipeline, a filesrc element. This is a src | |
// (vs. Gst also has filters and sinks). This src needs to have its | |
// "location" poperty set so we can read the file. | |
// | |
// We know about this property by running: | |
// gst-inspect-0.10 filesrc | |
// More info about types/args available in plugins reference on gst site | |
src = Gst.ElementFactory.make("filesrc", "filesrc"); | |
src.set_property("location", videoFile); | |
// decodebin will create a new decoder element that automatically decodes | |
// vids for us. Unfortunately, it's output pad is not available on creation so | |
// we need to connect to new_decoded_pad signal and hook that pad up to | |
// the next stage in the pipeline (colorspace) when the pad is created. | |
decode = Gst.ElementFactory.make("decodebin", "decode"); | |
// When the video is decoded, a new pad will dynamically be created. We can't | |
// connect this new pad to the next stage (color conversion via ffmpegcolorspace) | |
// until then. So on new_decoded_pad we do that connection in onDynamicLoad. | |
// FIXME - This event doesn't exist! I thought the event name was supposed to | |
// be converted to underscores?? | |
// decode.new_decoded_pad.connect(onDynamicLoad); | |
Signal.connect_swapped(decode, "new-decoded-pad", (GLib.Callback) onDynamicLoad, this); | |
// ffmpegcolorspace automatically converts from one colorspace to another for | |
// us. It is a filter with known in/out pads so no special new_decoded_pad | |
// juju is required. | |
colorspace = Gst.ElementFactory.make("ffmpegcolorspace", "colorspace"); | |
// ClutterGst.VideoSink is a Gst sink element. We pass in the videoTexture | |
// actor so that this sink knows to copy frames to that texture. | |
sink = new ClutterGst.VideoSink(videoTexture); | |
// Place our elements under the control and clocks of the toplevel pipeline | |
pipeline.add_many(src, decode, colorspace, sink); | |
// Connect elements in bin together. Automatically figures out what pads to | |
// connect. | |
pipeline.link_many(src, decode); | |
pipeline.link_many(colorspace, sink); | |
// Everything is hooked up and ready to go so start playing! The | |
// decodebin's new_decoded_pad even will fire after video starts. | |
pipeline.set_state(Gst.State.PLAYING); | |
// Add the target of the pipeline, the clutter texture videoTexture, | |
// to the stage so we can see it. | |
stage.add_actor(videoTexture); | |
} | |
} | |
void main(string[] args) { | |
var vid1 = "/home/esalazar/Desktop/Rendezvous.avi"; | |
// var vid2 = "/home/esalazar/Desktop/uPitts.mpg"; | |
// Each window by default has a stage associated with it | |
var stage = Stage.get_default(); | |
stage.color = Color.from_string("black"); | |
stage.title = "Awesome"; | |
stage.x = 800; | |
stage.y = 600; | |
// Exit when stage is killed (x button) | |
stage.hide.connect(Clutter.main_quit); | |
// Exit on any keypress event | |
// stage.key_press_event.connect(Clutter.main_quit); | |
// Initialize both Clutter and ClutterGst | |
Clutter.init(ref args); | |
ClutterGst.init(ref args); | |
new AwesomeVideoActor(vid1, stage, 1.5, 5400); | |
// new AwesomeVideoActor(vid2, stage, 0.4, 1500); | |
stage.show_all(); | |
Clutter.main(); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment