-
-
Save DashW/74d726293c0d3aeb53f4 to your computer and use it in GitHub Desktop.
using UnityEngine; | |
using System; | |
using System.Collections; | |
using System.Collections.Generic; | |
using System.IO; | |
using System.Threading; | |
class BitmapEncoder | |
{ | |
public static void WriteBitmap(Stream stream, int width, int height, byte[] imageData) | |
{ | |
using (BinaryWriter bw = new BinaryWriter(stream)) { | |
// define the bitmap file header | |
bw.Write ((UInt16)0x4D42); // bfType; | |
bw.Write ((UInt32)(14 + 40 + (width * height * 4))); // bfSize; | |
bw.Write ((UInt16)0); // bfReserved1; | |
bw.Write ((UInt16)0); // bfReserved2; | |
bw.Write ((UInt32)14 + 40); // bfOffBits; | |
// define the bitmap information header | |
bw.Write ((UInt32)40); // biSize; | |
bw.Write ((Int32)width); // biWidth; | |
bw.Write ((Int32)height); // biHeight; | |
bw.Write ((UInt16)1); // biPlanes; | |
bw.Write ((UInt16)32); // biBitCount; | |
bw.Write ((UInt32)0); // biCompression; | |
bw.Write ((UInt32)(width * height * 4)); // biSizeImage; | |
bw.Write ((Int32)0); // biXPelsPerMeter; | |
bw.Write ((Int32)0); // biYPelsPerMeter; | |
bw.Write ((UInt32)0); // biClrUsed; | |
bw.Write ((UInt32)0); // biClrImportant; | |
// switch the image data from RGB to BGR | |
for (int imageIdx = 0; imageIdx < imageData.Length; imageIdx += 3) { | |
bw.Write(imageData[imageIdx + 2]); | |
bw.Write(imageData[imageIdx + 1]); | |
bw.Write(imageData[imageIdx + 0]); | |
bw.Write((byte)255); | |
} | |
} | |
} | |
} | |
/// <summary> | |
/// Captures frames from a Unity camera in real time | |
/// and writes them to disk using a background thread. | |
/// </summary> | |
/// | |
/// <description> | |
/// Maximises speed and quality by reading-back raw | |
/// texture data with no conversion and writing | |
/// frames in uncompressed BMP format. | |
/// Created by Richard Copperwaite. | |
/// </description> | |
/// | |
[RequireComponent(typeof(Camera))] | |
public class ScreenRecorder : MonoBehaviour | |
{ | |
// Public Properties | |
public int maxFrames; // maximum number of frames you want to record in one video | |
public int frameRate = 30; // number of frames to capture per second | |
// The Encoder Thread | |
private Thread encoderThread; | |
// Texture Readback Objects | |
private RenderTexture tempRenderTexture; | |
private Texture2D tempTexture2D; | |
// Timing Data | |
private float captureFrameTime; | |
private float lastFrameTime; | |
private int frameNumber; | |
private int savingFrameNumber; | |
// Encoder Thread Shared Resources | |
private Queue<byte[]> frameQueue; | |
private string persistentDataPath; | |
private int screenWidth; | |
private int screenHeight; | |
private bool threadIsProcessing; | |
private bool terminateThreadWhenDone; | |
void Start () | |
{ | |
// Set target frame rate (optional) | |
Application.targetFrameRate = frameRate; | |
// Prepare the data directory | |
persistentDataPath = Application.persistentDataPath + "/ScreenRecorder"; | |
print ("Capturing to: " + persistentDataPath + "/"); | |
if (!System.IO.Directory.Exists(persistentDataPath)) | |
{ | |
System.IO.Directory.CreateDirectory(persistentDataPath); | |
} | |
// Prepare textures and initial values | |
screenWidth = GetComponent<Camera>().pixelWidth; | |
screenHeight = GetComponent<Camera>().pixelHeight; | |
tempRenderTexture = new RenderTexture(screenWidth, screenHeight, 0); | |
tempTexture2D = new Texture2D(screenWidth, screenHeight, TextureFormat.RGB24, false); | |
frameQueue = new Queue<byte[]> (); | |
frameNumber = 0; | |
savingFrameNumber = 0; | |
captureFrameTime = 1.0f / (float)frameRate; | |
lastFrameTime = Time.time; | |
// Kill the encoder thread if running from a previous execution | |
if (encoderThread != null && (threadIsProcessing || encoderThread.IsAlive)) { | |
threadIsProcessing = false; | |
encoderThread.Join(); | |
} | |
// Start a new encoder thread | |
threadIsProcessing = true; | |
encoderThread = new Thread (EncodeAndSave); | |
encoderThread.Start (); | |
} | |
void OnDisable() | |
{ | |
// Reset target frame rate | |
Application.targetFrameRate = -1; | |
// Inform thread to terminate when finished processing frames | |
terminateThreadWhenDone = true; | |
} | |
void OnRenderImage(RenderTexture source, RenderTexture destination) | |
{ | |
if (frameNumber <= maxFrames) | |
{ | |
// Check if render target size has changed, if so, terminate | |
if(source.width != screenWidth || source.height != screenHeight) | |
{ | |
threadIsProcessing = false; | |
this.enabled = false; | |
throw new UnityException("ScreenRecorder render target size has changed!"); | |
} | |
// Calculate number of video frames to produce from this game frame | |
// Generate 'padding' frames if desired framerate is higher than actual framerate | |
float thisFrameTime = Time.time; | |
int framesToCapture = ((int)(thisFrameTime / captureFrameTime)) - ((int)(lastFrameTime / captureFrameTime)); | |
// Capture the frame | |
if(framesToCapture > 0) | |
{ | |
Graphics.Blit (source, tempRenderTexture); | |
RenderTexture.active = tempRenderTexture; | |
tempTexture2D.ReadPixels(new Rect(0, 0, Screen.width, Screen.height),0,0); | |
RenderTexture.active = null; | |
} | |
// Add the required number of copies to the queue | |
for(int i = 0; i < framesToCapture && frameNumber <= maxFrames; ++i) | |
{ | |
frameQueue.Enqueue(tempTexture2D.GetRawTextureData()); | |
frameNumber ++; | |
if(frameNumber % frameRate == 0) | |
{ | |
print ("Frame " + frameNumber); | |
} | |
} | |
lastFrameTime = thisFrameTime; | |
} | |
else //keep making screenshots until it reaches the max frame amount | |
{ | |
// Inform thread to terminate when finished processing frames | |
terminateThreadWhenDone = true; | |
// Disable script | |
this.enabled = false; | |
} | |
// Passthrough | |
Graphics.Blit (source, destination); | |
} | |
private void EncodeAndSave() | |
{ | |
print ("SCREENRECORDER IO THREAD STARTED"); | |
while (threadIsProcessing) | |
{ | |
if(frameQueue.Count > 0) | |
{ | |
// Generate file path | |
string path = persistentDataPath + "/frame" + savingFrameNumber + ".bmp"; | |
// Dequeue the frame, encode it as a bitmap, and write it to the file | |
using(FileStream fileStream = new FileStream(path, FileMode.Create)) | |
{ | |
BitmapEncoder.WriteBitmap(fileStream, screenWidth, screenHeight, frameQueue.Dequeue()); | |
fileStream.Close(); | |
} | |
// Done | |
savingFrameNumber ++; | |
print ("Saved " + savingFrameNumber + " frames. " + frameQueue.Count + " frames remaining."); | |
} | |
else | |
{ | |
if(terminateThreadWhenDone) | |
{ | |
break; | |
} | |
Thread.Sleep(1); | |
} | |
} | |
terminateThreadWhenDone = false; | |
threadIsProcessing = false; | |
print ("SCREENRECORDER IO THREAD FINISHED"); | |
} | |
} |
amazing and fast, thanks so much!! May I ask, how do you encode it to a video format?
edit: couldn't find how to convert it to mp4, but was able to use it to make gifs in-game! ^_^
how did you create the gifs in-game?
"You need to comment a part responsible for render texture change detection to make it work.", where is that code?
It works perfectly! Though I need to convert it into single GIF and make it shareable through android's share intent. I can't figure it out, any help?
How to use this code? Anyone please?
i wanna know how to use this script
amazing and fast, thanks so much!! May I ask, how do you encode it to a video format?
edit: couldn't find how to convert it to mp4, but was able to use it to make gifs in-game! ^_^
how did you create the gifs in-game?
"You need to comment a part responsible for render texture change detection to make it work.", where is that code?It works perfectly! Though I need to convert it into single GIF and make it shareable through android's share intent. I can't figure it out, any help?
How to use this code? Anyone please?
Fast Answer for main quastion, for make video you need to use library convertaion or write own :D - good way use http://www.aforgenet.com/framework/docs/html/4ee1742c-44d3-b250-d6aa-90cd2d606611.htm
Base Advenced Example https://stackoverflow.com/questions/9744026/image-sequence-to-video-stream
Unity Simple Example https://stackoverflow.com/questions/45677275/converting-texture2d-into-a-video
Also how its work currently Need to Be on The Camera and Enabled, (also set max frames - for example (rate 30, max frames 60 its 2 seconds of record) Then "Update" Like ~ when ScreenRendered, callback called to OnRenderImage (https://docs.unity3d.com/ScriptReference/MonoBehaviour.OnRenderImage.html) like one of Base Unity Function (like Update or FixedUpdate, Awake, Enabled end etc... https://docs.unity3d.com/ScriptReference/MonoBehaviour.html )
not working. I have set max frames to 60. It creates the folder but not working. It stops at line if (frameQueue.Count > 0) under EncodeAndSave() function.
and also OnRenderImage(RenderTexture source, RenderTexture destination) is not called
Whoa, apologies for my lack of activity on this Gist, I wasn't getting notifications about it! Frankly, I can't believe people are still using it, I'd totally forgotten I wrote it and there are much better alternatives out there.
I would recommend Unity's Recorder, which is totally free on the Asset Store:
https://assetstore.unity.com/packages/essentials/unity-recorder-94079
Hello DashW
I would recommend Unity's Recorder, which is totally free on the Asset Store:
https://assetstore.unity.com/packages/essentials/unity-recorder-94079
OK but can it be scripted in build instead that in editor only?
Regards
works perfectly!
Hi, DashW! Is it possible to capture video from specific camera (because I have 2 cameras in the game) with your script and how? Thx
I had the same wish... extend the OnRenderImage event by an iterator over inactive cameras (if you want to record more than one). You can use something like this to capture images from inactive cameras:
foreach (Camera inactiveCam in inactiveCameras)
{
int resWidth = inactiveCam.pixelWidth;
int resHeight = inactiveCam.pixelHeight;
RenderTexture rt = RenderTexture.GetTemporary(resWidth, resHeight, 24);
inactiveCam.targetTexture = rt;
inactiveCam.Render();
Texture2D ss = new Texture2D(resWidth, resHeight, TextureFormat.RGB24, false);
RenderTexture.active = rt;
ss.ReadPixels(inactiveCam.pixelRect, 0, 0);
ss.Apply();
queues[inactiveCam.name].Enqueue(ss.EncodeToPNG());
inactiveCam.targetTexture = null;
RenderTexture.active = null;
rt.Release();
}
Don't forget to extend the encoder as well
I would recommend Unity's Recorder, which is totally free on the Asset Store:
https://assetstore.unity.com/packages/essentials/unity-recorder-94079
As someone else said, that official solution is only usable in Editor, it uses the media encoder supplied in the UnityEditor.Media namespace which is not included in build.
Can this script be used in android or ios build?
Unity Recorder is no longer available on the asset store.
Unity Recorder is no longer available on the asset store.
It's a package now and you can find it in package manager.
Unity Recorder is no longer available on the asset store.
Would this work to capture audio with fmod? Unity recorder doesn't work with Fmod enabled.
simple and beautiful, works like a charm! You need to comment a part responsible for render texture change detection to make it work.