Bug 1237691 - Implement Oculus Head Pose Prediction

MozReview-Commit-ID: 4hbKmZycEcn
This commit is contained in:
Kearwood (Kip) Gilbert 2016-02-16 12:53:33 -08:00
parent 52003e933f
commit e3fd6329c9
14 changed files with 116 additions and 40 deletions

View File

@ -351,7 +351,7 @@ HMDPositionVRDevice::GetState()
already_AddRefed<VRPositionState>
HMDPositionVRDevice::GetImmediateState()
{
gfx::VRHMDSensorState state = mHMD->GetSensorState();
gfx::VRHMDSensorState state = mHMD->GetImmediateSensorState();
RefPtr<VRPositionState> obj = new VRPositionState(mParent, state);
return obj.forget();

View File

@ -199,6 +199,7 @@ private:
DECL_GFX_PREF(Once, "dom.vr.oculus050.enabled", VROculus050Enabled, bool, true);
DECL_GFX_PREF(Once, "dom.vr.cardboard.enabled", VRCardboardEnabled, bool, false);
DECL_GFX_PREF(Once, "dom.vr.add-test-devices", VRAddTestDevices, int32_t, 1);
DECL_GFX_PREF(Live, "dom.vr.poseprediction.enabled", VRPosePredictionEnabled, bool, false);
DECL_GFX_PREF(Live, "dom.w3c_pointer_events.enabled", PointerEventsEnabled, bool, false);
DECL_GFX_PREF(Live, "dom.w3c_touch_events.enabled", TouchEventsEnabled, int32_t, 0);

View File

@ -79,13 +79,21 @@ VRDeviceProxy::ZeroSensor()
}
VRHMDSensorState
VRDeviceProxy::GetSensorState(double timeOffset)
VRDeviceProxy::GetSensorState()
{
VRManagerChild *vm = VRManagerChild::Get();
Unused << vm->SendKeepSensorTracking(mDeviceInfo.mDeviceID);
return mSensorState;
}
VRHMDSensorState
VRDeviceProxy::GetImmediateSensorState()
{
// XXX TODO - Need to perform IPC call to get the current sensor
// state rather than the predictive state used for the frame rendering.
return GetSensorState();
}
void
VRDeviceProxy::UpdateSensorState(const VRHMDSensorState& aSensorState)
{

View File

@ -28,7 +28,8 @@ public:
void UpdateSensorState(const VRHMDSensorState& aSensorState);
const VRDeviceInfo& GetDeviceInfo() const { return mDeviceInfo; }
virtual VRHMDSensorState GetSensorState(double timeOffset = 0.0);
virtual VRHMDSensorState GetSensorState();
virtual VRHMDSensorState GetImmediateSensorState();
bool SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRight,
double zNear, double zFar);

View File

@ -187,13 +187,20 @@ VRDeviceProxyOrientationFallBack::ComputeStateFromLastSensor()
}
VRHMDSensorState
VRDeviceProxyOrientationFallBack::GetSensorState(double timeOffset)
VRDeviceProxyOrientationFallBack::GetSensorState()
{
StartSensorTracking();
ComputeStateFromLastSensor();
return mSensorState;
}
VRHMDSensorState
VRDeviceProxyOrientationFallBack::GetImmediateSensorState()
{
// XXX TODO - Should return actual immediate sensor state
return GetSensorState();
}
} // namespace gfx
} // namespace mozilla

View File

@ -23,7 +23,8 @@ public:
explicit VRDeviceProxyOrientationFallBack(const VRDeviceUpdate& aDeviceUpdate);
virtual void ZeroSensor() override;
virtual VRHMDSensorState GetSensorState(double timeOffset = 0.0) override;
virtual VRHMDSensorState GetSensorState() override;
virtual VRHMDSensorState GetImmediateSensorState() override;
// ISensorObserver interface
void Notify(const hal::SensorData& SensorData) override;

View File

@ -264,7 +264,8 @@ public:
virtual bool KeepSensorTracking() = 0;
virtual void NotifyVsync(const TimeStamp& aVsyncTimestamp) = 0;
virtual VRHMDSensorState GetSensorState(double timeOffset = 0.0) = 0;
virtual VRHMDSensorState GetSensorState() = 0;
virtual VRHMDSensorState GetImmediateSensorState() = 0;
virtual void ZeroSensor() = 0;

View File

@ -51,7 +51,7 @@ HMDInfoCardboard::HMDInfoCardboard()
VRHMDSensorState
HMDInfoCardboard::GetSensorState(double timeOffset)
HMDInfoCardboard::GetSensorState()
{
// Actual sensor state is calculated on the main thread,
// within VRDeviceProxyOrientationFallBack
@ -60,6 +60,13 @@ HMDInfoCardboard::GetSensorState(double timeOffset)
return result;
}
VRHMDSensorState
HMDInfoCardboard::GetImmediateSensorState()
{
return GetSensorState();
}
void
HMDInfoCardboard::ZeroSensor()
{

View File

@ -25,7 +25,8 @@ public:
bool SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRight,
double zNear, double zFar) override;
VRHMDSensorState GetSensorState(double timeOffset) override;
virtual VRHMDSensorState GetSensorState() override;
virtual VRHMDSensorState GetImmediateSensorState() override;
void ZeroSensor() override;
bool KeepSensorTracking() override;
void NotifyVsync(const TimeStamp& aVsyncTimestamp) override;

View File

@ -41,6 +41,7 @@ static pfn_ovr_Destroy ovr_Destroy = nullptr;
static pfn_ovr_RecenterPose ovr_RecenterPose = nullptr;
static pfn_ovr_GetTrackingState ovr_GetTrackingState = nullptr;
static pfn_ovr_GetPredictedDisplayTime ovr_GetPredictedDisplayTime = nullptr;
static pfn_ovr_GetFovTextureSize ovr_GetFovTextureSize = nullptr;
static pfn_ovr_GetRenderDesc ovr_GetRenderDesc = nullptr;
@ -170,6 +171,7 @@ InitializeOculusCAPI()
REQUIRE_FUNCTION(ovr_RecenterPose);
REQUIRE_FUNCTION(ovr_GetTrackingState);
REQUIRE_FUNCTION(ovr_GetPredictedDisplayTime);
REQUIRE_FUNCTION(ovr_GetFovTextureSize);
REQUIRE_FUNCTION(ovr_GetRenderDesc);
@ -199,26 +201,6 @@ static bool InitializeOculusCAPI()
#endif
static void
do_CalcEyePoses(ovrPosef headPose,
const ovrVector3f hmdToEyeViewOffset[2],
ovrPosef outEyePoses[2])
{
if (!hmdToEyeViewOffset || !outEyePoses)
return;
for (uint32_t i = 0; i < 2; ++i) {
gfx::Quaternion o(headPose.Orientation.x, headPose.Orientation.y, headPose.Orientation.z, headPose.Orientation.w);
Point3D vo(hmdToEyeViewOffset[i].x, hmdToEyeViewOffset[i].y, hmdToEyeViewOffset[i].z);
Point3D p = o.RotatePoint(vo);
outEyePoses[i].Orientation = headPose.Orientation;
outEyePoses[i].Position.x = p.x + headPose.Position.x;
outEyePoses[i].Position.y = p.y + headPose.Position.y;
outEyePoses[i].Position.z = p.z + headPose.Position.z;
}
}
ovrFovPort
ToFovPort(const gfx::VRFieldOfView& aFOV)
{
@ -280,6 +262,10 @@ HMDInfoOculus::HMDInfoOculus(ovrSession aSession)
mDeviceInfo.mIsFakeScreen = true;
SetFOV(mDeviceInfo.mRecommendedEyeFOV[VRDeviceInfo::Eye_Left], mDeviceInfo.mRecommendedEyeFOV[VRDeviceInfo::Eye_Right], 0.01, 10000.0);
for (int i = 0; i < kMaxLatencyFrames; i++) {
mLastSensorState[i].Clear();
}
}
void
@ -355,15 +341,33 @@ HMDInfoOculus::ZeroSensor()
ovr_RecenterPose(mSession);
}
VRHMDSensorState
HMDInfoOculus::GetSensorState()
{
VRHMDSensorState result;
double frameTiming = 0.0f;
if (gfxPrefs::VRPosePredictionEnabled()) {
frameTiming = ovr_GetPredictedDisplayTime(mSession, mInputFrameID);
}
result = GetSensorState(frameTiming);
result.inputFrameID = mInputFrameID;
mLastSensorState[mInputFrameID % kMaxLatencyFrames] = result;
return result;
}
VRHMDSensorState
HMDInfoOculus::GetImmediateSensorState()
{
return GetSensorState(0.0);
}
VRHMDSensorState
HMDInfoOculus::GetSensorState(double timeOffset)
{
VRHMDSensorState result;
result.Clear();
// XXX this is the wrong time base for timeOffset; we need to figure out how to synchronize
// the Oculus time base and the browser one.
ovrTrackingState state = ovr_GetTrackingState(mSession, ovr_GetTimeInSeconds() + timeOffset, true);
ovrTrackingState state = ovr_GetTrackingState(mSession, timeOffset, true);
ovrPoseStatef& pose(state.HeadPose);
result.timestamp = pose.TimeInSeconds;
@ -400,8 +404,6 @@ HMDInfoOculus::GetSensorState(double timeOffset)
result.linearAcceleration[1] = pose.LinearAcceleration.y;
result.linearAcceleration[2] = pose.LinearAcceleration.z;
}
mLastTrackingState = state;
return result;
}
@ -525,6 +527,13 @@ HMDInfoOculus::SubmitFrame(RenderTargetSet *aRTSet, int32_t aInputFrameID)
MOZ_ASSERT(rts->hmd != nullptr);
MOZ_ASSERT(rts->textureSet != nullptr);
VRHMDSensorState sensorState = mLastSensorState[aInputFrameID % kMaxLatencyFrames];
// It is possible to get a cache miss on mLastSensorState if latency is
// longer than kMaxLatencyFrames. An optimization would be to find a frame
// that is closer than the one selected with the modulus.
// If we hit this; however, latency is already so high that the site is
// un-viewable and a more accurate pose prediction is not likely to
// compensate.
ovrLayerEyeFov layer;
layer.Header.Type = ovrLayerType_EyeFov;
layer.Header.Flags = 0;
@ -545,7 +554,23 @@ HMDInfoOculus::SubmitFrame(RenderTargetSet *aRTSet, int32_t aInputFrameID)
const Point3D& r = rts->hmd->mDeviceInfo.mEyeTranslation[1];
const ovrVector3f hmdToEyeViewOffset[2] = { { l.x, l.y, l.z },
{ r.x, r.y, r.z } };
do_CalcEyePoses(rts->hmd->mLastTrackingState.HeadPose.ThePose, hmdToEyeViewOffset, layer.RenderPose);
for (uint32_t i = 0; i < 2; ++i) {
gfx::Quaternion o(sensorState.orientation[0],
sensorState.orientation[1],
sensorState.orientation[2],
sensorState.orientation[3]);
Point3D vo(hmdToEyeViewOffset[i].x, hmdToEyeViewOffset[i].y, hmdToEyeViewOffset[i].z);
Point3D p = o.RotatePoint(vo);
layer.RenderPose[i].Orientation.x = o.x;
layer.RenderPose[i].Orientation.y = o.y;
layer.RenderPose[i].Orientation.z = o.z;
layer.RenderPose[i].Orientation.w = o.w;
layer.RenderPose[i].Position.x = p.x + sensorState.position[0];
layer.RenderPose[i].Position.y = p.y + sensorState.position[1];
layer.RenderPose[i].Position.z = p.z + sensorState.position[2];
}
ovrLayerHeader *layers = &layer.Header;
ovrResult orv = ovr_SubmitFrame(mSession, aInputFrameID, nullptr, &layers, 1);

View File

@ -28,7 +28,8 @@ public:
bool SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRight,
double zNear, double zFar) override;
VRHMDSensorState GetSensorState(double timeOffset) override;
virtual VRHMDSensorState GetSensorState() override;
virtual VRHMDSensorState GetImmediateSensorState() override;
void ZeroSensor() override;
bool KeepSensorTracking() override;
void NotifyVsync(const TimeStamp& aVsyncTimestamp) override;
@ -67,8 +68,19 @@ protected:
ovrSession mSession;
ovrHmdDesc mDesc;
ovrFovPort mFOVPort[2];
ovrTrackingState mLastTrackingState;
int mInputFrameID;
VRHMDSensorState GetSensorState(double timeOffset);
// The maximum number of frames of latency that we would expect before we
// should give up applying pose prediction.
// If latency is greater than one second, then the experience is not likely
// to be corrected by pose prediction. Setting this value too
// high may result in unnecessary memory allocation.
// As the current fastest refresh rate is 90hz, 100 is selected as a
// conservative value.
static const int kMaxLatencyFrames = 100;
VRHMDSensorState mLastSensorState[kMaxLatencyFrames];
int32_t mInputFrameID;
};
} // namespace impl

View File

@ -462,14 +462,20 @@ HMDInfoOculus050::ZeroSensor()
}
VRHMDSensorState
HMDInfoOculus050::GetSensorState(double timeOffset)
HMDInfoOculus050::GetImmediateSensorState()
{
return GetSensorState();
}
VRHMDSensorState
HMDInfoOculus050::GetSensorState()
{
VRHMDSensorState result;
result.Clear();
// XXX this is the wrong time base for timeOffset; we need to figure out how to synchronize
// the Oculus time base and the browser one.
ovrTrackingState state = ovrHmd_GetTrackingState(mHMD, ovr_GetTimeInSeconds() + timeOffset);
ovrTrackingState state = ovrHmd_GetTrackingState(mHMD, ovr_GetTimeInSeconds());
ovrPoseStatef& pose(state.HeadPose);
result.timestamp = pose.TimeInSeconds;

View File

@ -26,7 +26,8 @@ public:
bool SetFOV(const VRFieldOfView& aFOVLeft, const VRFieldOfView& aFOVRight,
double zNear, double zFar) override;
VRHMDSensorState GetSensorState(double timeOffset) override;
virtual VRHMDSensorState GetSensorState() override;
virtual VRHMDSensorState GetImmediateSensorState() override;
void ZeroSensor() override;
bool KeepSensorTracking() override;
void NotifyVsync(const TimeStamp& aVsyncTimestamp) override;

View File

@ -4782,6 +4782,11 @@ pref("dom.vr.oculus050.enabled", true);
pref("dom.vr.cardboard.enabled", false);
// 0 = never; 1 = only if real devices aren't there; 2 = always
pref("dom.vr.add-test-devices", 0);
// Pose prediction reduces latency effects by returning future predicted HMD
// poses to callers of the WebVR API. This currently only has an effect for
// Oculus Rift on SDK 0.8 or greater. It is disabled by default for now due to
// frame uniformity issues with e10s.
pref("dom.vr.poseprediction.enabled", false);
// true = show the VR textures in our compositing output; false = don't.
// true might have performance impact
pref("gfx.vr.mirror-textures", false);