Skip to content

Instantly share code, notes, and snippets.

@UriShX
Created November 24, 2020 13:18
Show Gist options
  • Save UriShX/fc34825843c36849af2dbc49afa1d0a6 to your computer and use it in GitHub Desktop.
Save UriShX/fc34825843c36849af2dbc49afa1d0a6 to your computer and use it in GitHub Desktop.
Processing sketch - stream video to v4l2loopback example
/**
* Streaming a Processing script to a v4l2loopback target example
* by Uri Shani
*
* Depends on running v4l2loopback before starting the script:
* '$ sudo modprobe v4l2loopback devices=3 video_nr=10,11,12 card_label="Loopback_1","Loopback_2","Virtual_cam" exclusive_caps=1,1,1'
*
* Ffmpeg is used first for streaming default webcam (/dev/video0) to two loopback devices,
* the sketch then uses Loopback_1 (/dev/video10) as its video source by listing available cameras, and selecting the second device (cameras[1], line 79).
*
* Exiting the script should be done using the "q" key, otherwise the ffmpeg cloning process does not close.
*
* After exiting the script securely, run '$ sudo modprobe -r v4l2loopback' to remove the v4l2loopback devices.
*
* based on:
* 1. zCustomCommand example from Video Export library by Abe Pazos https://funprogramming.org/VideoExport-for-Processing/
* 2. Mirror Processing example by Daniel Shiffman.
*/
import processing.video.*;
import com.hamoid.*;
VideoExport videoExport;
VideoExport ffmpegLoopback;
// Size of each cell in the grid
int cellSize = 10;
// Number of columns and rows in our system
int cols, rows;
// Variable for capture device
Capture video;
boolean camReady = false;
void setup() {
ffmpegLoopback = new VideoExport(this);
// ffmpeg -i /dev/video0 -f v4l2 -vcodec rawvideo -pix_fmt yuv420p -vf hflip /dev/video10 -f v4l2 -vf hflip /dev/video11
ffmpegLoopback.setFfmpegVideoSettings(
new String[]{
"[ffmpeg]", // ffmpeg executable
"-i", "/dev/video0", // pipe input
"-an", // no audio
"-f", "v4l2",
"-vcodec", "rawvideo", // out codec h264
"-pix_fmt", "yuv420p", // color space yuv420p
"-vf", "hflip", // flip video for loopback device 1
"/dev/video10", // output file
"-an", // no audio
"-f", "v4l2",
"-vcodec", "rawvideo", // out codec h264
"-vf", "hflip", // flip video for loopback device 2
"/dev/video11" // output file
});
ffmpegLoopback.startMovie();
size(320, 240);
frameRate(25);
cols = width / cellSize;
rows = height / cellSize;
colorMode(RGB, 255, 255, 255, 100);
delay(1000); // Needed so that Capture discovers loopback devices
String[] cameras = Capture.list();
if (cameras.length == 0) {
println("There are no cameras available for capture.");
exit();
} else {
println("Available cameras:");
for (int i = 0; i < cameras.length; i++) {
println(cameras[i]);
}
}
// This the default video input, see the GettingStartedCapture
// example if it creates an error
video = new Capture(this, width, height, cameras[1]);
// Start capturing the images from the camera
video.start();
background(0);
videoExport = new VideoExport(this);
//ffmpeg -stream_loop -1 -re -i VID_20190925_102203.mp4 -vf format=yuv420p -f v4l2 /dev/video3
// Everything as by default except -vf (video filter)
videoExport.setFfmpegVideoSettings(
new String[]{
"[ffmpeg]", // ffmpeg executable
"-y", // overwrite old file
"-f", "rawvideo", // format rgb raw
"-vcodec", "rawvideo", // in codec rgb raw
"-s", "[width]x[height]", // size
"-pix_fmt", "rgb24", // pix format rgb24
"-r", "[fps]", // frame rate
"-i", "-", // pipe input
// video filter with vignette, blur,
// noise and text. font commented out
"-vf", "vignette,gblur=sigma=1,noise=alls=10:allf=t+u," +
"drawtext=text='Made with Processing':x=50:y=(h-text_h-50):fontsize=24:[email protected]",
// drawtext=fontfile=/path/to/a/font/myfont.ttf:text='Made...
"-an", // no audio
"-f", "v4l2",
"-vcodec", "rawvideo", // out codec h264
"-pix_fmt", "yuv420p", // color space yuv420p
"-crf", "[crf]", // quality
"-metadata", "comment=[comment]", // comment
"/dev/video12" // output file
});
videoExport.startMovie();
}
// New images from camera
void captureEvent(Capture video) {
video.read();
camReady = true;
}
void draw() {
if (camReady) {
video.loadPixels();
// Begin loop for columns
for (int i = 0; i < cols; i++) {
// Begin loop for rows
for (int j = 0; j < rows; j++) {
// Where are we, pixel-wise?
int x = i*cellSize;
int y = j*cellSize;
//int loc = (video.width - x - 1) + y*video.width; // Reversing x to mirror the image
int loc = x + y * video.width; // Do not reverse video in P3 (use with reversed feed, or in client)
float r = red(video.pixels[loc]);
float g = green(video.pixels[loc]);
float b = blue(video.pixels[loc]);
// Make a new color with an alpha component
color c = color(r, g, b, 75);
// Code for drawing a single rect
// Using translate in order for rotation to work properly
pushMatrix();
translate(x+cellSize/2, y+cellSize/2);
// Rotation formula based on brightness
rotate((2 * PI * brightness(c) / 255.0));
rectMode(CENTER);
fill(c);
noStroke();
// Rects are larger than the cell for some overlap
rect(0, 0, cellSize+6, cellSize+6);
popMatrix();
}
}
videoExport.saveFrame();
}
}
void keyPressed() {
if (key == 'q') {
videoExport.endMovie();
delay(100);
ffmpegLoopback.endMovie();
delay(100);
exit();
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment