Created
January 5, 2018 13:19
-
-
Save abalta/101d49123c3e29470a77897fba6b462f to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package com.example.android.screencapture; | |
import android.graphics.Bitmap; | |
import android.graphics.Canvas; | |
import android.media.MediaCodec; | |
import android.media.MediaCodecInfo; | |
import android.media.MediaFormat; | |
import android.media.MediaMuxer; | |
import android.os.Build; | |
import android.os.Bundle; | |
import android.os.Environment; | |
import android.support.annotation.Nullable; | |
import android.support.v7.app.AppCompatActivity; | |
import android.util.Log; | |
import android.view.View; | |
import java.io.ByteArrayOutputStream; | |
import java.io.File; | |
import java.io.IOException; | |
import java.nio.ByteBuffer; | |
/** | |
* Created by abalta on 3.01.2018. | |
*/ | |
public class MediaCodecActivity extends AppCompatActivity { | |
// parameters for the encoder | |
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding | |
private static final int FRAME_RATE = 10; // 15fps | |
private static final int IFRAME_INTERVAL = 1;// 10 seconds between I-frames | |
private static final int VIDEO_BITRATE = 1000 * 1000; | |
final int TIMEOUT_USEC = 10000; | |
MediaCodec mediaCodec; | |
MediaFormat mediaFormat; | |
MediaMuxer mediaMuxer; | |
MediaCodec.BufferInfo mBufferInfo; | |
CaptureImageView captureImageView; | |
public static Bitmap ccc; | |
Runnable runnable; | |
volatile boolean mRunning = true; | |
public int mTrackIndex; | |
int generateIndex = 0; | |
boolean outputDone = false; | |
private int WIDTH = 1080; | |
private int HEIGHT = 1248; | |
@Override | |
protected void onCreate(@Nullable Bundle savedInstanceState) { | |
super.onCreate(savedInstanceState); | |
setContentView(R.layout.codec_activity); | |
captureImageView = findViewById(R.id.imageView); | |
prepareEncoder(); | |
findViewById(R.id.btn_start).setOnClickListener(new View.OnClickListener() { | |
@Override | |
public void onClick(View view) { | |
mRunning = true; | |
bufferEncoder(); | |
} | |
}); | |
findViewById(R.id.btn_finish).setOnClickListener(new View.OnClickListener() { | |
@Override | |
public void onClick(View view) { | |
mRunning = false; | |
} | |
}); | |
} | |
private void prepareEncoder() { | |
try { | |
mBufferInfo = new MediaCodec.BufferInfo(); | |
mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); | |
mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT); | |
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_BITRATE); | |
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); | |
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible); | |
//2130708361, 2135033992, 21 | |
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); | |
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); | |
mediaCodec.start(); | |
try { | |
String outputPath = new File(Environment.getExternalStorageDirectory() + File.separator + "CAPTURE", | |
"test.mp4").toString(); | |
mediaMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); | |
} catch (IOException ioe) { | |
throw new RuntimeException("MediaMuxer creation failed", ioe); | |
} | |
} catch (IOException e) { | |
e.printStackTrace(); | |
} | |
} | |
public static Bitmap loadBitmapFromView(View v) { | |
ccc = Bitmap.createBitmap(v.getWidth(), v.getHeight(), Bitmap.Config.ARGB_8888); | |
Canvas c = new Canvas(ccc); | |
v.draw(c); | |
return ccc; | |
} | |
private void bufferEncoder() { | |
runnable = new Runnable() { | |
@Override | |
public void run() { | |
prepareEncoder(); | |
try { | |
while (mRunning) { | |
encode(); | |
} | |
encode(); | |
} finally { | |
release(); | |
} | |
} | |
}; | |
Thread thread = new Thread(runnable); | |
thread.start(); | |
} | |
private void release() { | |
Log.i("TIME", "encodeEnd: " + System.currentTimeMillis()); | |
if (mediaCodec != null) { | |
mediaCodec.stop(); | |
mediaCodec.release(); | |
mediaCodec = null; | |
Log.i("CODEC", "RELEASE CODEC"); | |
} | |
if (mediaMuxer != null) { | |
mediaMuxer.stop(); | |
mediaMuxer.release(); | |
mediaMuxer = null; | |
Log.i("CODEC", "RELEASE MUXER"); | |
} | |
} | |
public void encode() { | |
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.LOLLIPOP) { | |
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers(); | |
while (!outputDone) { | |
// MediaCodec is asynchronous, that's why we have a blocking check | |
// to see if we have something to do | |
int status = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); | |
if (status == MediaCodec.INFO_TRY_AGAIN_LATER) { | |
if (!mRunning) break; | |
} else if (status == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { | |
outputBuffers = mediaCodec.getOutputBuffers(); | |
MediaFormat newFormat = mediaCodec.getOutputFormat(); | |
mTrackIndex = mediaMuxer.addTrack(newFormat); | |
mediaMuxer.start(); | |
} else if (status >= 0) { | |
// encoded sample | |
Log.i("CODEC", "STATUS: " + status); | |
Bitmap image = loadBitmapFromView(captureImageView); | |
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); | |
image.compress(Bitmap.CompressFormat.PNG, 100, byteArrayOutputStream); | |
byte[] input = byteArrayOutputStream.toByteArray(); | |
ByteBuffer data = outputBuffers[status]; | |
data.clear(); | |
data.put(input); | |
mediaMuxer.writeSampleData(mTrackIndex, data, mBufferInfo); | |
mediaCodec.releaseOutputBuffer(status, false); | |
} | |
} | |
release(); | |
} else { | |
Log.i("TIME", "encodeStart: " + System.currentTimeMillis()); | |
while (true) { | |
if (!mRunning) break; | |
int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC); | |
if (inputBufIndex >= 0) { | |
ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputBufIndex); | |
Bitmap image = loadBitmapFromView(captureImageView); | |
byte[] input = getNV21(image.getWidth(), image.getHeight(), image); | |
inputBuffer.clear(); | |
inputBuffer.put(input); | |
long ptsUsec = computePresentationTime(generateIndex); | |
mediaCodec.queueInputBuffer(inputBufIndex, 0, input.length, ptsUsec, 0); | |
generateIndex++; | |
} | |
int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); | |
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { | |
// no output available yet | |
Log.d("CODEC", "no output from encoder available"); | |
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { | |
// not expected for an encoder | |
MediaFormat newFormat = mediaCodec.getOutputFormat(); | |
mTrackIndex = mediaMuxer.addTrack(newFormat); | |
mediaMuxer.start(); | |
} else if (encoderStatus < 0) { | |
Log.i("CODEC", "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); | |
} else if (mBufferInfo.size != 0) { | |
ByteBuffer encodedData = mediaCodec.getOutputBuffer(encoderStatus); | |
if (encodedData == null) { | |
Log.i("CODEC", "encoderOutputBuffer " + encoderStatus + " was null"); | |
} else { | |
encodedData.position(mBufferInfo.offset); | |
encodedData.limit(mBufferInfo.offset + mBufferInfo.size); | |
mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); | |
mediaCodec.releaseOutputBuffer(encoderStatus, false); | |
} | |
} | |
} | |
} | |
} | |
private static long computePresentationTime(int frameIndex) { | |
return 132 + frameIndex * 1000000 / FRAME_RATE; | |
} | |
byte[] getNV21(int inputWidth, int inputHeight, Bitmap scaled) { | |
int[] argb = new int[inputWidth * inputHeight]; | |
scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight); | |
byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2]; | |
encodeYUV420SP(yuv, argb, inputWidth, inputHeight); | |
scaled.recycle(); | |
return yuv; | |
} | |
void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) { | |
final int frameSize = width * height; | |
int yIndex = 0; | |
int uvIndex = frameSize; | |
int a, R, G, B, Y, U, V; | |
int index = 0; | |
for (int j = 0; j < height; j++) { | |
for (int i = 0; i < width; i++) { | |
a = (argb[index] & 0xff000000) >> 24; // a is not used obviously | |
R = (argb[index] & 0xff0000) >> 16; | |
G = (argb[index] & 0xff00) >> 8; | |
B = (argb[index] & 0xff) >> 0; | |
// well known RGB to YUV algorithm | |
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16; | |
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128; | |
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128; | |
// NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2 | |
// meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other | |
// pixel AND every other scanline. | |
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y)); | |
if (j % 2 == 0 && index % 2 == 0) { | |
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V)); | |
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U)); | |
} | |
index++; | |
} | |
} | |
} | |
private byte[] getYV12(int inputWidth, int inputHeight, Bitmap scaled) { | |
int[] argb = new int[inputWidth * inputHeight]; | |
scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight); | |
byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2]; | |
encodeYV12(yuv, argb, inputWidth, inputHeight); | |
scaled.recycle(); | |
return yuv; | |
} | |
private void encodeYV12(byte[] yuv420sp, int[] argb, int width, int height) { | |
final int frameSize = width * height; | |
int yIndex = 0; | |
int uIndex = frameSize; | |
int vIndex = frameSize + (frameSize / 4); | |
int a, R, G, B, Y, U, V; | |
int index = 0; | |
for (int j = 0; j < height; j++) { | |
for (int i = 0; i < width; i++) { | |
a = (argb[index] & 0xff000000) >> 24; // a is not used obviously | |
R = (argb[index] & 0xff0000) >> 16; | |
G = (argb[index] & 0xff00) >> 8; | |
B = (argb[index] & 0xff) >> 0; | |
// well known RGB to YUV algorithm | |
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16; | |
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128; | |
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128; | |
// YV12 has a plane of Y and two chroma plans (U, V) planes each sampled by a factor of 2 | |
// meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other | |
// pixel AND every other scanline. | |
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y)); | |
if (j % 2 == 0 && index % 2 == 0) { | |
yuv420sp[uIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V)); | |
yuv420sp[vIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U)); | |
} | |
index++; | |
} | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment