Created
December 18, 2019 02:46
-
-
Save PhanSon95/bbf7e25f700ec8a00b43406debdacb2c to your computer and use it in GitHub Desktop.
MonoscopicView
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package com.fc2.contents.viewer.views.vr; | |
import android.content.Context; | |
import android.content.Intent; | |
import android.graphics.PointF; | |
import android.hardware.Sensor; | |
import android.hardware.SensorEvent; | |
import android.hardware.SensorEventListener; | |
import android.hardware.SensorManager; | |
import android.net.Uri; | |
import android.opengl.GLES20; | |
import android.opengl.GLSurfaceView; | |
import android.opengl.Matrix; | |
import android.support.annotation.AnyThread; | |
import android.support.annotation.BinderThread; | |
import android.support.annotation.UiThread; | |
import android.util.AttributeSet; | |
import android.view.Display; | |
import android.view.MotionEvent; | |
import android.view.OrientationEventListener; | |
import android.view.Surface; | |
import android.view.View; | |
import android.view.WindowManager; | |
import com.fc2.contents.viewer.views.vr.rendering.SceneRenderer; | |
import com.google.vr.sdk.base.Eye; | |
import javax.microedition.khronos.egl.EGLConfig; | |
import javax.microedition.khronos.opengles.GL10; | |
public class MonoscopicView extends GLSurfaceView { | |
// We handle all the sensor orientation detection ourselves. | |
private SensorManager sensorManager; | |
private Sensor orientationSensor; | |
private PhoneOrientationListener phoneOrientationListener; | |
private MediaLoader mediaLoader; | |
private Renderer renderer; | |
private TouchTracker touchTracker; | |
private VideoUiView uiView; | |
private DisplayOrientationListener displayOrientationListener; | |
private int displayRotationDegrees; | |
/** | |
* Inflates a standard GLSurfaceView. | |
*/ | |
public MonoscopicView(Context context, AttributeSet attributeSet) { | |
super(context, attributeSet); | |
setPreserveEGLContextOnPause(true); | |
} | |
/** | |
* Finishes initialization. This should be called immediately after the View is inflated. | |
*/ | |
public void initialize(VideoUiView uiView) { | |
this.uiView = uiView; | |
mediaLoader = new MediaLoader(getContext()); | |
// Configure OpenGL. | |
renderer = new Renderer(uiView, mediaLoader); | |
setEGLContextClientVersion(2); | |
setRenderer(renderer); | |
setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY); | |
// Configure sensors and touch. | |
sensorManager = (SensorManager) getContext().getSystemService(Context.SENSOR_SERVICE); | |
// TYPE_GAME_ROTATION_VECTOR is the easiest sensor since it handles all the complex math for | |
// fusion. It's used instead of TYPE_ROTATION_VECTOR since the latter uses the mangetometer on | |
// devices. When used indoors, the magnetometer can take some time to settle depending on the | |
// device and amount of metal in the environment. | |
orientationSensor = sensorManager.getDefaultSensor(Sensor.TYPE_GAME_ROTATION_VECTOR); | |
phoneOrientationListener = new PhoneOrientationListener(); | |
// When a phone rotates from portrait <-> landscape or portrait <-> reverse landscape, this flow | |
// is used. However, this flow isn't used for landscape <-> reverse landscape changes. For that | |
// case, displayOrientationListener's onOrientationChanged callback is used. | |
displayOrientationListener = new DisplayOrientationListener(getContext()); | |
displayOrientationListener.recomputeDisplayOrientation(); | |
touchTracker = new TouchTracker(renderer); | |
setOnTouchListener(touchTracker); | |
} | |
/** | |
* Starts the sensor & video only when this View is active. | |
*/ | |
@Override | |
public void onResume() { | |
super.onResume(); | |
// Use the fastest sensor readings. | |
sensorManager.registerListener( | |
phoneOrientationListener, orientationSensor, SensorManager.SENSOR_DELAY_FASTEST); | |
displayOrientationListener.enable(); | |
mediaLoader.resume(); | |
} | |
/** | |
* Stops the sensors & video when the View is inactive to avoid wasting battery. | |
*/ | |
@Override | |
public void onPause() { | |
mediaLoader.pause(); | |
sensorManager.unregisterListener(phoneOrientationListener); | |
displayOrientationListener.disable(); | |
super.onPause(); | |
} | |
/** | |
* Destroys the underlying resources. If this is not called, the MediaLoader may leak. | |
*/ | |
public void destroy() { | |
uiView.setMediaPlayer(null); | |
mediaLoader.destroy(); | |
} | |
public void setVideoUri(Uri uri) { | |
this.mediaLoader.setVideoUri(uri); | |
} | |
/** | |
* Parses the Intent and loads the appropriate media. | |
*/ | |
public void loadMedia(String stereoFormat, String horizontalDegrees, String videoUrl) { | |
mediaLoader.handleIntent(stereoFormat, horizontalDegrees, videoUrl, uiView); | |
} | |
/** | |
* Detects fine-grained sensor events and saves them as a matrix. | |
*/ | |
private class PhoneOrientationListener implements SensorEventListener { | |
private final float[] phoneInWorldSpaceMatrix = new float[16]; | |
private final float[] remappedPhoneMatrix = new float[16]; | |
private final float[] anglesRadians = new float[3]; | |
@Override | |
@BinderThread | |
public void onSensorChanged(SensorEvent event) { | |
SensorManager.getRotationMatrixFromVector(phoneInWorldSpaceMatrix, event.values); | |
// Extract the phone's roll and pass it on to touchTracker & renderer. Remapping is required | |
// since we need the calculated roll of the phone to be independent of the phone's pitch & | |
// yaw. Any operation that decomposes rotation to Euler angles needs to be performed | |
// carefully. | |
SensorManager.remapCoordinateSystem( | |
phoneInWorldSpaceMatrix, | |
SensorManager.AXIS_X, SensorManager.AXIS_MINUS_Z, | |
remappedPhoneMatrix); | |
SensorManager.getOrientation(remappedPhoneMatrix, anglesRadians); | |
float roll = anglesRadians[2]; | |
touchTracker.setRoll((float) (roll - Math.toRadians(displayRotationDegrees))); | |
// Rotate from Android coordinates to OpenGL coordinates. Android's coordinate system | |
// assumes Y points North and Z points to the sky. OpenGL has Y pointing up and Z pointing | |
// toward the user. | |
Matrix.rotateM(phoneInWorldSpaceMatrix, 0, 90, 1, 0, 0); | |
renderer.setDeviceOrientation(phoneInWorldSpaceMatrix, roll); | |
} | |
@Override | |
public void onAccuracyChanged(Sensor sensor, int accuracy) { | |
} | |
} | |
/** | |
* Basic touch input system. | |
* | |
* <p>Mixing touch input and gyro input results in a complicated UI so this should be used | |
* carefully. This touch system implements a basic (X, Y) -> (yaw, pitch) transform. This works | |
* for basic UI but fails in edge cases where the user tries to drag scene up or down. There is no | |
* good UX solution for this. The least bad solution is to disable pitch manipulation and only let | |
* the user adjust yaw. This example tries to limit the awkwardness by restricting pitch | |
* manipulation to +/- 45 degrees. | |
* | |
* <p>It is also important to get the order of operations correct. To match what users expect, | |
* touch interaction manipulates the scene by rotating the world by the yaw offset and tilting the | |
* camera by the pitch offset. If the order of operations is incorrect, the sensors & touch | |
* rotations will have strange interactions. The roll of the phone is also tracked so that the | |
* x & y are correctly mapped to yaw & pitch no matter how the user holds their phone. | |
* | |
* <p>This class doesn't handle any scrolling inertia but Android's | |
* this code for a nicer UI. An even more advanced UI would reproject the user's touch point into | |
* 3D and drag the Mesh as the user moves their finger. However, that requires quaternion | |
* interpolation and is beyond the scope of this sample. | |
*/ | |
static class TouchTracker implements OnTouchListener { | |
// Arbitrary touch speed number. This should be tweaked so the scene smoothly follows the | |
// finger or derived from DisplayMetrics. | |
static final float PX_PER_DEGREES = 25; | |
// Touch input won't change the pitch beyond +/- 45 degrees. This reduces awkward situations | |
// where the touch-based pitch and gyro-based pitch interact badly near the poles. | |
static final float MAX_PITCH_DEGREES = 45; | |
// With every touch event, update the accumulated degrees offset by the new pixel amount. | |
private final PointF previousTouchPointPx = new PointF(); | |
private final PointF accumulatedTouchOffsetDegrees = new PointF(); | |
// The conversion from touch to yaw & pitch requires compensating for device roll. This is set | |
// on the sensor thread and read on the UI thread. | |
private volatile float rollRadians; | |
private final Renderer renderer; | |
public TouchTracker(Renderer renderer) { | |
this.renderer = renderer; | |
} | |
/** | |
* Converts ACTION_MOVE events to pitch & yaw events while compensating for device roll. | |
* | |
* @return true if we handled the event | |
*/ | |
@Override | |
public boolean onTouch(View v, MotionEvent event) { | |
switch (event.getAction()) { | |
case MotionEvent.ACTION_DOWN: | |
// Initialize drag gesture. | |
previousTouchPointPx.set(event.getX(), event.getY()); | |
return true; | |
case MotionEvent.ACTION_MOVE: | |
// Calculate the touch delta in screen space. | |
float touchX = (event.getX() - previousTouchPointPx.x) / PX_PER_DEGREES; | |
float touchY = (event.getY() - previousTouchPointPx.y) / PX_PER_DEGREES; | |
previousTouchPointPx.set(event.getX(), event.getY()); | |
float r = rollRadians; // Copy volatile state. | |
float cr = (float) Math.cos(r); | |
float sr = (float) Math.sin(r); | |
// To convert from screen space to the 3D space, we need to adjust the drag vector based | |
// on the roll of the phone. This is standard rotationMatrix(roll) * vector math but has | |
// an inverted y-axis due to the screen-space coordinates vs GL coordinates. | |
// Handle yaw. | |
accumulatedTouchOffsetDegrees.x -= cr * touchX - sr * touchY; | |
// Handle pitch and limit it to 45 degrees. | |
accumulatedTouchOffsetDegrees.y += sr * touchX + cr * touchY; | |
accumulatedTouchOffsetDegrees.y = | |
Math.max( | |
-MAX_PITCH_DEGREES, Math.min(MAX_PITCH_DEGREES, accumulatedTouchOffsetDegrees.y)); | |
renderer.setPitchOffset(accumulatedTouchOffsetDegrees.y); | |
renderer.setYawOffset(accumulatedTouchOffsetDegrees.x); | |
return true; | |
default: | |
return false; | |
} | |
} | |
@BinderThread | |
public void setRoll(float rollRadians) { | |
// We compensate for roll by rotating in the opposite direction. | |
this.rollRadians = -rollRadians; | |
} | |
} | |
/** | |
* Standard GL Renderer implementation. The notable code is the matrix multiplication in | |
* onDrawFrame and updatePitchMatrix. | |
*/ | |
public static class Renderer implements GLSurfaceView.Renderer { | |
private final SceneRenderer scene = SceneRenderer.createFor2D(); | |
// Arbitrary vertical field of view. Adjust as desired. | |
private static final int FIELD_OF_VIEW_DEGREES = 90; | |
private static final float Z_NEAR = .1f; | |
private static final float Z_FAR = 100; | |
private final float[] projectionMatrix = new float[16]; | |
// There is no model matrix for this scene so viewProjectionMatrix is used for the mvpMatrix. | |
private final float[] viewProjectionMatrix = new float[16]; | |
// Device orientation is derived from sensor data. This is accessed in the sensor's thread and | |
// the GL thread. | |
private final float[] deviceOrientationMatrix = new float[16]; | |
// Optional pitch and yaw rotations are applied to the sensor orientation. These are accessed on | |
// the UI, sensor and GL Threads. | |
private final float[] touchPitchMatrix = new float[16]; | |
private final float[] touchYawMatrix = new float[16]; | |
private float touchPitch; | |
private float deviceRoll; | |
private final float[] displayRotationMatrix = new float[16]; | |
// viewMatrix = touchPitch * deviceOrientation * touchYaw. | |
private final float[] viewMatrix = new float[16]; | |
private final float[] tempMatrix = new float[16]; | |
private final VideoUiView uiView; | |
private final MediaLoader mediaLoader; | |
public Renderer(VideoUiView uiView, MediaLoader mediaLoader) { | |
Matrix.setIdentityM(deviceOrientationMatrix, 0); | |
Matrix.setIdentityM(displayRotationMatrix, 0); | |
Matrix.setIdentityM(touchPitchMatrix, 0); | |
Matrix.setIdentityM(touchYawMatrix, 0); | |
this.uiView = uiView; | |
this.mediaLoader = mediaLoader; | |
} | |
@Override | |
public void onSurfaceCreated(GL10 gl, EGLConfig config) { | |
scene.glInit(); | |
if (uiView != null) { | |
scene.setVideoFrameListener(uiView.getFrameListener()); | |
} | |
mediaLoader.onGlSceneReady(scene); | |
} | |
@Override | |
public void onSurfaceChanged(GL10 gl, int width, int height) { | |
GLES20.glViewport(0, 0, width, height); | |
float aspectRatio = (float) width / height; | |
float verticalFovDegrees; | |
if (aspectRatio < 1) { | |
// For portrait mode, use the max FOV for the vertical FOV. | |
verticalFovDegrees = FIELD_OF_VIEW_DEGREES; | |
} else { | |
// When in landscape mode, we need to compute the vertical FOV to pass into | |
// Matrix.perspectiveM. As a quick calculation we could use | |
// verticalFovDegrees = FIELD_OF_VIEW_DEGREES / aspectRatio. However, this results in an | |
// incorrect FOV for large values of FIELD_OF_VIEW_DEGREES. The correct calculation should | |
// compute the ratios of the tan of the vertical & horizontal FOVs. | |
double horizontalHalfFovRadians = Math.toRadians(FIELD_OF_VIEW_DEGREES / 2); | |
double horizontalHalfFovTanAngle = Math.tan(horizontalHalfFovRadians); | |
double verticalHalfFovTanAngle = horizontalHalfFovTanAngle / aspectRatio; | |
double verticalHalfFovRadians = Math.atan(verticalHalfFovTanAngle); | |
verticalFovDegrees = (float) Math.toDegrees(2 * verticalHalfFovRadians); | |
} | |
Matrix.perspectiveM(projectionMatrix, 0, verticalFovDegrees, aspectRatio, Z_NEAR, Z_FAR); | |
} | |
@Override | |
public void onDrawFrame(GL10 gl) { | |
// Combine touch & sensor data. | |
// Orientation = pitch * sensor * yaw since that is closest to what most users expect the | |
// behavior to be. | |
synchronized (this) { | |
Matrix.multiplyMM(tempMatrix, 0, deviceOrientationMatrix, 0, touchYawMatrix, 0); | |
Matrix.multiplyMM(viewMatrix, 0, touchPitchMatrix, 0, tempMatrix, 0); | |
Matrix.multiplyMM(tempMatrix, 0, displayRotationMatrix, 0, viewMatrix, 0); | |
} | |
Matrix.multiplyMM(viewProjectionMatrix, 0, projectionMatrix, 0, tempMatrix, 0); | |
scene.glDrawFrame(viewProjectionMatrix, Eye.Type.MONOCULAR); | |
} | |
/** | |
* * Adjust the rendered scene to handle portrait, landscape, etc display rotations. | |
* * | |
* * @param displayRotationDegrees should be a multiple of 90 degrees. | |
*/ | |
public synchronized void setDisplayRotation(int displayRotationDegrees) { | |
Matrix.setRotateM(displayRotationMatrix, 0, displayRotationDegrees, 0, 0, 1); | |
} | |
/** | |
* Adjusts the GL camera's rotation based on device rotation. Runs on the sensor thread. | |
*/ | |
@BinderThread | |
public synchronized void setDeviceOrientation(float[] matrix, float deviceRoll) { | |
System.arraycopy(matrix, 0, deviceOrientationMatrix, 0, deviceOrientationMatrix.length); | |
this.deviceRoll = -deviceRoll; | |
updatePitchMatrix(); | |
} | |
/** | |
* Updates the pitch matrix after a physical rotation or touch input. The pitch matrix rotation | |
* is applied on an axis that is dependent on device rotation so this must be called after | |
* either touch or sensor update. | |
*/ | |
@AnyThread | |
private void updatePitchMatrix() { | |
// The camera's pitch needs to be rotated along an axis that is parallel to the real world's | |
// horizon. This is the <1, 0, 0> axis after compensating for the device's roll. | |
Matrix.setRotateM(touchPitchMatrix, 0, | |
-touchPitch, (float) Math.cos(deviceRoll), (float) Math.sin(deviceRoll), 0); | |
} | |
/** | |
* Set the pitch offset matrix. | |
*/ | |
@UiThread | |
public synchronized void setPitchOffset(float pitchDegrees) { | |
touchPitch = pitchDegrees; | |
updatePitchMatrix(); | |
} | |
/** | |
* Set the yaw offset matrix. | |
*/ | |
@UiThread | |
public synchronized void setYawOffset(float yawDegrees) { | |
Matrix.setRotateM(touchYawMatrix, 0, -yawDegrees, 0, 1, 0); | |
} | |
} | |
/** | |
* Detects coarse-grained sensor events and saves them as a matrix. | |
*/ | |
private class DisplayOrientationListener extends OrientationEventListener { | |
public DisplayOrientationListener(Context context) { | |
super(context); | |
} | |
@Override | |
@BinderThread | |
public void onOrientationChanged(int orientation) { | |
int counterClockwiseOrientation = 360 - orientation; | |
int roundedOrientation = (((counterClockwiseOrientation + 45) % 360) / 90) * 90; | |
if (Math.abs(displayRotationDegrees - roundedOrientation) > 90) { | |
recomputeDisplayOrientation(); | |
} | |
} | |
@AnyThread | |
public void recomputeDisplayOrientation() { | |
WindowManager windowManager = | |
(WindowManager) getContext().getSystemService(Context.WINDOW_SERVICE); | |
Display display = windowManager.getDefaultDisplay(); | |
switch (display.getRotation()) { | |
case Surface.ROTATION_90: | |
displayRotationDegrees = 90; | |
break; | |
case Surface.ROTATION_180: | |
displayRotationDegrees = 180; | |
break; | |
case Surface.ROTATION_270: | |
displayRotationDegrees = 270; | |
break; | |
default: | |
displayRotationDegrees = 0; | |
} | |
renderer.setDisplayRotation(displayRotationDegrees); | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment