Created
May 30, 2016 16:18
-
-
Save ds-hwang/78ee197882bbd7f6155a76055d1af288 to your computer and use it in GitHub Desktop.
webgl video
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
+++ b/third_party/WebKit/Source/modules/webgl/WebGLRenderingContextBase.cpp | |
@@ -4266,30 +4266,30 @@ void WebGLRenderingContextBase::texImage2D(GLenum target, GLint level, GLint int | |
// Go through the fast path doing a GPU-GPU textures copy without a readback to system memory if possible. | |
// Otherwise, it will fall back to the normal SW path. | |
- if (GL_TEXTURE_2D == target) { | |
- if (Extensions3DUtil::canUseCopyTextureCHROMIUM(target, internalformat, type, level) | |
- && video->copyVideoTextureToPlatformTexture(contextGL(), texture->object(), internalformat, type, m_unpackPremultiplyAlpha, m_unpackFlipY)) { | |
- return; | |
- } | |
- | |
- // Try using an accelerated image buffer, this allows YUV conversion to be done on the GPU. | |
- OwnPtr<ImageBufferSurface> surface = adoptPtr(new AcceleratedImageBufferSurface(IntSize(video->videoWidth(), video->videoHeight()))); | |
- if (surface->isValid()) { | |
- OwnPtr<ImageBuffer> imageBuffer(ImageBuffer::create(std::move(surface))); | |
- if (imageBuffer) { | |
- // The video element paints an RGBA frame into our surface here. By using an AcceleratedImageBufferSurface, | |
- // we enable the WebMediaPlayer implementation to do any necessary color space conversion on the GPU (though it | |
- // may still do a CPU conversion and upload the results). | |
- video->paintCurrentFrame(imageBuffer->canvas(), IntRect(0, 0, video->videoWidth(), video->videoHeight()), nullptr); | |
- | |
- // This is a straight GPU-GPU copy, any necessary color space conversion was handled in the paintCurrentFrameInContext() call. | |
- if (imageBuffer->copyToPlatformTexture(contextGL(), texture->object(), internalformat, type, | |
- level, m_unpackPremultiplyAlpha, m_unpackFlipY)) { | |
- return; | |
- } | |
- } | |
- } | |
- } | |
+// if (GL_TEXTURE_2D == target) { | |
+// if (Extensions3DUtil::canUseCopyTextureCHROMIUM(target, internalformat, type, level) | |
+// && video->copyVideoTextureToPlatformTexture(contextGL(), texture->object(), internalformat, type, m_unpackPremultiplyAlpha, m_unpackFlipY)) { | |
+// return; | |
+// } | |
+// | |
+// // Try using an accelerated image buffer, this allows YUV conversion to be done on the GPU. | |
+// OwnPtr<ImageBufferSurface> surface = adoptPtr(new AcceleratedImageBufferSurface(IntSize(video->videoWidth(), video->videoHeight()))); | |
+// if (surface->isValid()) { | |
+// OwnPtr<ImageBuffer> imageBuffer(ImageBuffer::create(std::move(surface))); | |
+// if (imageBuffer) { | |
+// // The video element paints an RGBA frame into our surface here. By using an AcceleratedImageBufferSurface, | |
+// // we enable the WebMediaPlayer implementation to do any necessary color space conversion on the GPU (though it | |
+// // may still do a CPU conversion and upload the results). | |
+// video->paintCurrentFrame(imageBuffer->canvas(), IntRect(0, 0, video->videoWidth(), video->videoHeight()), nullptr); | |
+// | |
+// // This is a straight GPU-GPU copy, any necessary color space conversion was handled in the paintCurrentFrameInContext() call. | |
+// if (imageBuffer->copyToPlatformTexture(contextGL(), texture->object(), internalformat, type, | |
+// level, m_unpackPremultiplyAlpha, m_unpackFlipY)) { | |
+// return; | |
+// } | |
+// } | |
+// } | |
+// } | |
// Normal pure SW path. |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment