Skip to content

Commit

Permalink
fix(client): 🐛 Fix Pico wrong prediction; refactoring (#2612)
Browse files Browse the repository at this point in the history
  • Loading branch information
zmerp authored Jan 13, 2025
1 parent 6def974 commit 4f7c9e9
Show file tree
Hide file tree
Showing 6 changed files with 115 additions and 57 deletions.
38 changes: 13 additions & 25 deletions alvr/client_core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ pub mod video_decoder;

use alvr_common::{
dbg_client_core, error,
glam::{Quat, UVec2, Vec2, Vec3},
glam::{UVec2, Vec2, Vec3},
parking_lot::{Mutex, RwLock},
warn, ConnectionState, DeviceMotion, LifecycleState, Pose, HAND_LEFT_ID, HAND_RIGHT_ID,
HEAD_ID,
Expand Down Expand Up @@ -247,7 +247,7 @@ impl ClientCoreContext {
motion.angular_velocity *= velocity_multiplier;

if *id == *HEAD_ID {
*motion = predict_motion(target_timestamp, poll_timestamp, *motion);
*motion = motion.predict(poll_timestamp, target_timestamp);

let mut head_pose_queue = self.connection_context.head_pose_queue.write();

Expand All @@ -264,7 +264,7 @@ impl ClientCoreContext {
} else if let Some(stats) = &*self.connection_context.statistics_manager.lock() {
let tracker_timestamp = poll_timestamp + stats.tracker_prediction_offset();

*motion = predict_motion(tracker_timestamp, poll_timestamp, *motion);
*motion = motion.predict(poll_timestamp, tracker_timestamp);
}
}

Expand Down Expand Up @@ -310,6 +310,16 @@ impl ClientCoreContext {
}
}

pub fn get_total_prediction_offset(&self) -> Duration {
dbg_client_core!("get_total_prediction_offset");

if let Some(stats) = &*self.connection_context.statistics_manager.lock() {
stats.average_total_pipeline_latency()
} else {
Duration::ZERO
}
}

/// The callback should return true if the frame was successfully submitted to the decoder
pub fn set_decoder_input_callback(&self, callback: Box<DecoderCallback>) {
dbg_client_core!("set_decoder_input_callback");
Expand Down Expand Up @@ -395,25 +405,3 @@ impl Drop for ClientCoreContext {
alvr_system_info::set_wifi_lock(false);
}
}

pub fn predict_motion(
target_timestamp: Duration,
current_timestamp: Duration,
motion: DeviceMotion,
) -> DeviceMotion {
let delta_time_s = target_timestamp
.saturating_sub(current_timestamp)
.as_secs_f32();

let delta_position = motion.linear_velocity * delta_time_s;
let delta_orientation = Quat::from_scaled_axis(motion.angular_velocity * delta_time_s);

DeviceMotion {
pose: Pose {
orientation: delta_orientation * motion.pose.orientation,
position: motion.pose.position + delta_position,
},
linear_velocity: motion.linear_velocity,
angular_velocity: motion.angular_velocity,
}
}
40 changes: 26 additions & 14 deletions alvr/client_openxr/src/interaction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use alvr_common::{glam::Vec3, *};
use alvr_packets::{ButtonEntry, ButtonValue, StreamConfig, ViewParams};
use alvr_session::{BodyTrackingSourcesConfig, FaceTrackingSourcesConfig};
use openxr as xr;
use std::collections::HashMap;
use std::{collections::HashMap, time::Duration};
use xr::SpaceLocationFlags;

const IPD_CHANGE_EPS: f32 = 0.001;
Expand Down Expand Up @@ -474,11 +474,13 @@ pub fn get_head_data(
platform: Platform,
stage_reference_space: &xr::Space,
view_reference_space: &xr::Space,
time: xr::Time,
time: Duration,
last_view_params: &[ViewParams; 2],
) -> Option<(DeviceMotion, Option<[ViewParams; 2]>)> {
let xr_time = crate::to_xr_time(time);

let (head_location, head_velocity) = view_reference_space
.relate(stage_reference_space, time)
.relate(stage_reference_space, xr_time)
.ok()?;

if !head_location
Expand All @@ -491,7 +493,7 @@ pub fn get_head_data(
let (view_flags, views) = xr_session
.locate_views(
xr::ViewConfigurationType::PRIMARY_STEREO,
time,
xr_time,
stage_reference_space,
)
.ok()?;
Expand Down Expand Up @@ -550,17 +552,19 @@ pub fn get_hand_data(
xr_session: &xr::Session<xr::OpenGlEs>,
platform: Platform,
reference_space: &xr::Space,
time: xr::Time,
time: Duration,
hand_source: &HandInteraction,
last_controller_pose: &mut Pose,
last_palm_pose: &mut Pose,
) -> (Option<DeviceMotion>, Option<[Pose; 26]>) {
let xr_time = crate::to_xr_time(time);

let controller_motion = if hand_source
.grip_action
.is_active(xr_session, xr::Path::NULL)
.unwrap_or(false)
{
if let Ok((location, velocity)) = hand_source.grip_space.relate(reference_space, time) {
if let Ok((location, velocity)) = hand_source.grip_space.relate(reference_space, xr_time) {
if location
.location_flags
.contains(xr::SpaceLocationFlags::ORIENTATION_VALID)
Expand Down Expand Up @@ -596,8 +600,10 @@ pub fn get_hand_data(
};

let hand_joints = if let Some(tracker) = &hand_source.skeleton_tracker {
let xr_now = crate::xr_runtime_now(xr_session.instance()).unwrap_or(xr_time);

if let Some(joint_locations) = reference_space
.locate_hand_joints(tracker, time)
.locate_hand_joints(tracker, xr_now)
.ok()
.flatten()
{
Expand Down Expand Up @@ -677,14 +683,16 @@ pub fn get_eye_gazes(
xr_session: &xr::Session<xr::OpenGlEs>,
sources: &FaceSources,
reference_space: &xr::Space,
time: xr::Time,
time: Duration,
) -> [Option<Pose>; 2] {
let xr_time = crate::to_xr_time(time);

'fb_eyes: {
let Some(tracker) = &sources.eye_tracker_fb else {
break 'fb_eyes;
};

if let Ok(gazes) = tracker.get_eye_gazes(reference_space, time) {
if let Ok(gazes) = tracker.get_eye_gazes(reference_space, xr_time) {
return [
gazes[0].map(crate::from_xr_pose),
gazes[1].map(crate::from_xr_pose),
Expand All @@ -702,7 +710,7 @@ pub fn get_eye_gazes(
return [None, None];
}

if let Ok(location) = eyes_space.locate(reference_space, time) {
if let Ok(location) = eyes_space.locate(reference_space, xr_time) {
[
location
.location_flags
Expand All @@ -715,11 +723,13 @@ pub fn get_eye_gazes(
}
}

pub fn get_fb_face_expression(context: &FaceSources, time: xr::Time) -> Option<Vec<f32>> {
pub fn get_fb_face_expression(context: &FaceSources, time: Duration) -> Option<Vec<f32>> {
let xr_time = crate::to_xr_time(time);

context
.face_tracker_fb
.as_ref()
.and_then(|t| t.get_face_expression_weights(time).ok().flatten())
.and_then(|t| t.get_face_expression_weights(xr_time).ok().flatten())
.map(|weights| weights.into_iter().collect())
}

Expand Down Expand Up @@ -767,12 +777,14 @@ pub fn get_fb_body_skeleton(

pub fn get_fb_body_tracking_points(
reference_space: &xr::Space,
time: xr::Time,
time: Duration,
body_tracker: &BodyTrackerFB,
joint_count: usize,
) -> Vec<(u64, DeviceMotion)> {
let xr_time = crate::to_xr_time(time);

if let Some(joint_locations) = body_tracker
.locate_body_joints(time, reference_space, joint_count)
.locate_body_joints(xr_time, reference_space, joint_count)
.ok()
.flatten()
{
Expand Down
6 changes: 5 additions & 1 deletion alvr/client_openxr/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,10 @@ fn to_xr_fov(f: Fov) -> xr::Fovf {
}
}

fn from_xr_time(timestamp: xr::Time) -> Duration {
Duration::from_nanos(timestamp.as_nanos() as _)
}

fn to_xr_time(timestamp: Duration) -> xr::Time {
xr::Time::from_nanos(timestamp.as_nanos() as _)
}
Expand Down Expand Up @@ -452,7 +456,7 @@ pub fn entry_point() {
let (layer, display_time) = if let Some(stream) = &mut stream_context {
stream.render(frame_interval, vsync_time)
} else {
(lobby.render(frame_state.predicted_display_time), vsync_time)
(lobby.render(vsync_time), vsync_time)
};

let layers: &[&xr::CompositionLayerBase<_>] =
Expand Down
14 changes: 8 additions & 6 deletions alvr/client_openxr/src/lobby.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use alvr_client_core::graphics::{GraphicsContext, LobbyRenderer, LobbyViewParams
use alvr_common::{glam::UVec2, parking_lot::RwLock, Pose};
use alvr_system_info::Platform;
use openxr as xr;
use std::{rc::Rc, sync::Arc};
use std::{rc::Rc, sync::Arc, time::Duration};

// todo: add interaction?
pub struct Lobby {
Expand Down Expand Up @@ -85,12 +85,14 @@ impl Lobby {
self.renderer.update_hud_message(message);
}

pub fn render(&mut self, predicted_display_time: xr::Time) -> ProjectionLayerBuilder {
pub fn render(&mut self, vsync_time: Duration) -> ProjectionLayerBuilder {
let xr_vsync_time = xr::Time::from_nanos(vsync_time.as_nanos() as _);

let (flags, maybe_views) = self
.xr_session
.locate_views(
xr::ViewConfigurationType::PRIMARY_STEREO,
predicted_display_time,
xr_vsync_time,
&self.reference_space,
)
.unwrap();
Expand All @@ -108,7 +110,7 @@ impl Lobby {
&self.xr_session,
self.platform,
&self.reference_space,
predicted_display_time,
vsync_time,
&self.interaction_ctx.read().hands_interaction[0],
&mut Pose::default(),
&mut Pose::default(),
Expand All @@ -117,7 +119,7 @@ impl Lobby {
&self.xr_session,
self.platform,
&self.reference_space,
predicted_display_time,
vsync_time,
&self.interaction_ctx.read().hands_interaction[1],
&mut Pose::default(),
&mut Pose::default(),
Expand All @@ -132,7 +134,7 @@ impl Lobby {
.and_then(|(tracker, joint_count)| {
interaction::get_fb_body_skeleton(
&self.reference_space,
predicted_display_time,
xr_vsync_time,
tracker,
*joint_count,
)
Expand Down
49 changes: 39 additions & 10 deletions alvr/client_openxr/src/stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -457,22 +457,42 @@ fn stream_input_loop(
return;
}

let Some(xr_now) = crate::xr_runtime_now(xr_session.instance()) else {
let Some(now) = crate::xr_runtime_now(xr_session.instance()).map(crate::from_xr_time)
else {
error!("Cannot poll tracking: invalid time");
return;
};

// All Pico headsets seem to have a problem with velocity values, to different degrees.
// Calculating velocities by differentiation yields jittery results. In the following
// workaround, we predict using the runtime, then manually predict back in time in order to
// return poses in the "now" time, required by the ClientCore interface. This solution
// doesn't fix the issue completely, but most of the predicted time interval will be
// correct.
let target_time = if platform.is_pico() {
now + core_ctx.get_total_prediction_offset()
} else {
now
};

let Some((head_motion, local_views)) = interaction::get_head_data(
&xr_session,
platform,
stage_reference_space,
view_reference_space,
xr_now,
target_time,
&last_view_params,
) else {
continue;
};

let head_motion = if platform.is_pico() {
// Predict back in time, matching the prediction that is done on later on
head_motion.predict(target_time, now)
} else {
head_motion
};

if let Some(views) = local_views {
core_ctx.send_view_params(views);
last_view_params = views;
Expand All @@ -482,25 +502,34 @@ fn stream_input_loop(

device_motions.push((*HEAD_ID, head_motion));

let (left_hand_motion, left_hand_skeleton) = crate::interaction::get_hand_data(
let (mut left_hand_motion, left_hand_skeleton) = crate::interaction::get_hand_data(
&xr_session,
platform,
stage_reference_space,
xr_now,
target_time,
&int_ctx.hands_interaction[0],
&mut last_controller_poses[0],
&mut last_palm_poses[0],
);
let (right_hand_motion, right_hand_skeleton) = crate::interaction::get_hand_data(
let (mut right_hand_motion, right_hand_skeleton) = crate::interaction::get_hand_data(
&xr_session,
platform,
stage_reference_space,
xr_now,
target_time,
&int_ctx.hands_interaction[1],
&mut last_controller_poses[1],
&mut last_palm_poses[1],
);

if platform.is_pico() {
if let Some(left_hand_motion) = &mut left_hand_motion {
*left_hand_motion = left_hand_motion.predict(target_time, now);
}
if let Some(right_hand_motion) = &mut right_hand_motion {
*right_hand_motion = right_hand_motion.predict(target_time, now);
}
}

// Note: When multimodal input is enabled, we are sure that when free hands are used
// (not holding controllers) the controller data is None.
if int_ctx.multimodal_hands_enabled || left_hand_skeleton.is_none() {
Expand All @@ -519,24 +548,24 @@ fn stream_input_loop(
&xr_session,
&int_ctx.face_sources,
stage_reference_space,
xr_now,
now,
),
fb_face_expression: interaction::get_fb_face_expression(&int_ctx.face_sources, xr_now),
fb_face_expression: interaction::get_fb_face_expression(&int_ctx.face_sources, now),
htc_eye_expression: interaction::get_htc_eye_expression(&int_ctx.face_sources),
htc_lip_expression: interaction::get_htc_lip_expression(&int_ctx.face_sources),
};

if let Some((tracker, joint_count)) = &int_ctx.body_sources.body_tracker_fb {
device_motions.append(&mut interaction::get_fb_body_tracking_points(
stage_reference_space,
xr_now,
now,
tracker,
*joint_count,
));
}

core_ctx.send_tracking(
Duration::from_nanos(xr_now.as_nanos() as u64),
Duration::from_nanos(now.as_nanos() as u64),
device_motions,
[left_hand_skeleton, right_hand_skeleton],
face_data,
Expand Down
Loading

0 comments on commit 4f7c9e9

Please sign in to comment.