Skip to content

Commit

Permalink
Only use physical coords internally in bevy_ui (#16375)
Browse files Browse the repository at this point in the history
# Objective

We switch back and forwards between logical and physical coordinates all
over the place. Systems have to query for cameras and the UiScale when
they shouldn't need to. It's confusing and fragile and new scale factor
bugs get found constantly.

## Solution

* Use physical coordinates whereever possible in `bevy_ui`. 
* Store physical coords in `ComputedNode` and tear out all the unneeded
scale factor calculations and queries.
* Add an `inverse_scale_factor` field to `ComputedNode` and set nodes
changed when their scale factor changes.

## Migration Guide

`ComputedNode`'s fields and methods now use physical coordinates.
`ComputedNode` has a new field `inverse_scale_factor`. Multiplying the
physical coordinates by the `inverse_scale_factor` will give the logical
values.

---------

Co-authored-by: atlv <email@atlasdostal.com>
  • Loading branch information
ickshonpe and atlv24 authored Nov 22, 2024
1 parent 513be52 commit 8a3a8b5
Show file tree
Hide file tree
Showing 15 changed files with 195 additions and 243 deletions.
13 changes: 4 additions & 9 deletions crates/bevy_ui/src/focus.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use crate::{
CalculatedClip, ComputedNode, DefaultUiCamera, ResolvedBorderRadius, TargetCamera, UiScale,
UiStack,
CalculatedClip, ComputedNode, DefaultUiCamera, ResolvedBorderRadius, TargetCamera, UiStack,
};
use bevy_ecs::{
change_detection::DetectChangesMut,
Expand Down Expand Up @@ -158,7 +157,6 @@ pub fn ui_focus_system(
windows: Query<&Window>,
mouse_button_input: Res<ButtonInput<MouseButton>>,
touches_input: Res<Touches>,
ui_scale: Res<UiScale>,
ui_stack: Res<UiStack>,
mut node_query: Query<NodeQuery>,
) {
Expand Down Expand Up @@ -201,19 +199,16 @@ pub fn ui_focus_system(
};

let viewport_position = camera
.logical_viewport_rect()
.map(|rect| rect.min)
.physical_viewport_rect()
.map(|rect| rect.min.as_vec2())
.unwrap_or_default();
windows
.get(window_ref.entity())
.ok()
.and_then(Window::cursor_position)
.and_then(Window::physical_cursor_position)
.or_else(|| touches_input.first_pressed_position())
.map(|cursor_position| (entity, cursor_position - viewport_position))
})
// The cursor position returned by `Window` only takes into account the window scale factor and not `UiScale`.
// To convert the cursor position to logical UI viewport coordinates we have to divide it by `UiScale`.
.map(|(entity, cursor_position)| (entity, cursor_position / ui_scale.0))
.collect();

// prepare an iterator that contains all the nodes that have the cursor in their rect,
Expand Down
62 changes: 35 additions & 27 deletions crates/bevy_ui/src/layout/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::{
experimental::{UiChildren, UiRootNodes},
BorderRadius, ComputedNode, ContentSize, DefaultUiCamera, Display, Node, Outline, OverflowAxis,
ScrollPosition, TargetCamera, UiScale,
ScrollPosition, TargetCamera, UiScale, Val,
};
use bevy_ecs::{
change_detection::{DetectChanges, DetectChangesMut},
Expand Down Expand Up @@ -343,31 +343,33 @@ with UI components as a child of an entity without UI components, your UI layout
maybe_scroll_position,
)) = node_transform_query.get_mut(entity)
{
let Ok((layout, unrounded_unscaled_size)) = ui_surface.get_layout(entity) else {
let Ok((layout, unrounded_size)) = ui_surface.get_layout(entity) else {
return;
};

let layout_size =
inverse_target_scale_factor * Vec2::new(layout.size.width, layout.size.height);
let unrounded_size = inverse_target_scale_factor * unrounded_unscaled_size;
let layout_location =
inverse_target_scale_factor * Vec2::new(layout.location.x, layout.location.y);
let layout_size = Vec2::new(layout.size.width, layout.size.height);

let layout_location = Vec2::new(layout.location.x, layout.location.y);

// The position of the center of the node, stored in the node's transform
let node_center =
layout_location - parent_scroll_position + 0.5 * (layout_size - parent_size);

// only trigger change detection when the new values are different
if node.size != layout_size || node.unrounded_size != unrounded_size {
if node.size != layout_size
|| node.unrounded_size != unrounded_size
|| node.inverse_scale_factor != inverse_target_scale_factor
{
node.size = layout_size;
node.unrounded_size = unrounded_size;
node.inverse_scale_factor = inverse_target_scale_factor;
}

let taffy_rect_to_border_rect = |rect: taffy::Rect<f32>| BorderRect {
left: rect.left * inverse_target_scale_factor,
right: rect.right * inverse_target_scale_factor,
top: rect.top * inverse_target_scale_factor,
bottom: rect.bottom * inverse_target_scale_factor,
left: rect.left,
right: rect.right,
top: rect.top,
bottom: rect.bottom,
};

node.bypass_change_detection().border = taffy_rect_to_border_rect(layout.border);
Expand All @@ -377,28 +379,35 @@ with UI components as a child of an entity without UI components, your UI layout

if let Some(border_radius) = maybe_border_radius {
// We don't trigger change detection for changes to border radius
node.bypass_change_detection().border_radius =
border_radius.resolve(node.size, viewport_size);
node.bypass_change_detection().border_radius = border_radius.resolve(
node.size,
viewport_size,
inverse_target_scale_factor.recip(),
);
}

if let Some(outline) = maybe_outline {
// don't trigger change detection when only outlines are changed
let node = node.bypass_change_detection();
node.outline_width = if style.display != Display::None {
outline
.width
.resolve(node.size().x, viewport_size)
.unwrap_or(0.)
.max(0.)
match outline.width {
Val::Px(w) => Val::Px(w / inverse_target_scale_factor),
width => width,
}
.resolve(node.size().x, viewport_size)
.unwrap_or(0.)
.max(0.)
} else {
0.
};

node.outline_offset = outline
.offset
.resolve(node.size().x, viewport_size)
.unwrap_or(0.)
.max(0.);
node.outline_offset = match outline.offset {
Val::Px(offset) => Val::Px(offset / inverse_target_scale_factor),
offset => offset,
}
.resolve(node.size().x, viewport_size)
.unwrap_or(0.)
.max(0.);
}

if transform.translation.truncate() != node_center {
Expand All @@ -422,8 +431,7 @@ with UI components as a child of an entity without UI components, your UI layout
})
.unwrap_or_default();

let content_size = Vec2::new(layout.content_size.width, layout.content_size.height)
* inverse_target_scale_factor;
let content_size = Vec2::new(layout.content_size.width, layout.content_size.height);
let max_possible_offset = (content_size - layout_size).max(Vec2::ZERO);
let clamped_scroll_position = scroll_position.clamp(Vec2::ZERO, max_possible_offset);

Expand Down Expand Up @@ -1131,7 +1139,7 @@ mod tests {
.sum();
let parent_width = world.get::<ComputedNode>(parent).unwrap().size.x;
assert!((width_sum - parent_width).abs() < 0.001);
assert!((width_sum - 320.).abs() <= 1.);
assert!((width_sum - 320. * s).abs() <= 1.);
s += r;
}
}
Expand Down
11 changes: 5 additions & 6 deletions crates/bevy_ui/src/picking_backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@ pub fn ui_picking(
camera_query: Query<(Entity, &Camera, Has<IsDefaultUiCamera>)>,
default_ui_camera: DefaultUiCamera,
primary_window: Query<Entity, With<PrimaryWindow>>,
ui_scale: Res<UiScale>,
ui_stack: Res<UiStack>,
node_query: Query<NodeQuery>,
mut output: EventWriter<PointerHits>,
Expand Down Expand Up @@ -95,15 +94,15 @@ pub fn ui_picking(
let Ok((_, camera_data, _)) = camera_query.get(camera) else {
continue;
};
let mut pointer_pos = pointer_location.position;
if let Some(viewport) = camera_data.logical_viewport_rect() {
pointer_pos -= viewport.min;
let mut pointer_pos =
pointer_location.position * camera_data.target_scaling_factor().unwrap_or(1.);
if let Some(viewport) = camera_data.physical_viewport_rect() {
pointer_pos -= viewport.min.as_vec2();
}
let scaled_pointer_pos = pointer_pos / **ui_scale;
pointer_pos_by_camera
.entry(camera)
.or_default()
.insert(pointer_id, scaled_pointer_pos);
.insert(pointer_id, pointer_pos);
}
}

Expand Down
53 changes: 25 additions & 28 deletions crates/bevy_ui/src/render/box_shadow.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use core::{hash::Hash, ops::Range};

use crate::{
BoxShadow, CalculatedClip, ComputedNode, DefaultUiCamera, RenderUiSystem, ResolvedBorderRadius,
TargetCamera, TransparentUi, UiBoxShadowSamples, UiScale, Val,
TargetCamera, TransparentUi, UiBoxShadowSamples, Val,
};
use bevy_app::prelude::*;
use bevy_asset::*;
Expand Down Expand Up @@ -237,7 +237,6 @@ pub fn extract_shadows(
mut commands: Commands,
mut extracted_box_shadows: ResMut<ExtractedBoxShadows>,
default_ui_camera: Extract<DefaultUiCamera>,
ui_scale: Extract<Res<UiScale>>,
camera_query: Extract<Query<(Entity, &Camera)>>,
box_shadow_query: Extract<
Query<(
Expand Down Expand Up @@ -268,37 +267,36 @@ pub fn extract_shadows(
continue;
}

let ui_logical_viewport_size = camera_query
let ui_physical_viewport_size = camera_query
.get(camera_entity)
.ok()
.and_then(|(_, c)| c.logical_viewport_size())
.unwrap_or(Vec2::ZERO)
// The logical window resolution returned by `Window` only takes into account the window scale factor and not `UiScale`,
// so we have to divide by `UiScale` to get the size of the UI viewport.
/ ui_scale.0;
.and_then(|(_, c)| {
c.physical_viewport_size()
.map(|size| Vec2::new(size.x as f32, size.y as f32))
})
.unwrap_or(Vec2::ZERO);

let resolve_val = |val, base| match val {
let scale_factor = uinode.inverse_scale_factor.recip();

let resolve_val = |val, base, scale_factor| match val {
Val::Auto => 0.,
Val::Px(px) => px,
Val::Px(px) => px * scale_factor,
Val::Percent(percent) => percent / 100. * base,
Val::Vw(percent) => percent / 100. * ui_logical_viewport_size.x,
Val::Vh(percent) => percent / 100. * ui_logical_viewport_size.y,
Val::VMin(percent) => percent / 100. * ui_logical_viewport_size.min_element(),
Val::VMax(percent) => percent / 100. * ui_logical_viewport_size.max_element(),
Val::Vw(percent) => percent / 100. * ui_physical_viewport_size.x,
Val::Vh(percent) => percent / 100. * ui_physical_viewport_size.y,
Val::VMin(percent) => percent / 100. * ui_physical_viewport_size.min_element(),
Val::VMax(percent) => percent / 100. * ui_physical_viewport_size.max_element(),
};

let spread_x = resolve_val(box_shadow.spread_radius, uinode.size().x);
let spread_ratio_x = (spread_x + uinode.size().x) / uinode.size().x;
let spread_x = resolve_val(box_shadow.spread_radius, uinode.size().x, scale_factor);
let spread_ratio = (spread_x + uinode.size().x) / uinode.size().x;

let spread = vec2(
spread_x,
(spread_ratio_x * uinode.size().y) - uinode.size().y,
);
let spread = vec2(spread_x, uinode.size().y * spread_ratio - uinode.size().y);

let blur_radius = resolve_val(box_shadow.blur_radius, uinode.size().x);
let blur_radius = resolve_val(box_shadow.blur_radius, uinode.size().x, scale_factor);
let offset = vec2(
resolve_val(box_shadow.x_offset, uinode.size().x),
resolve_val(box_shadow.y_offset, uinode.size().y),
resolve_val(box_shadow.x_offset, uinode.size().x, scale_factor),
resolve_val(box_shadow.y_offset, uinode.size().y, scale_factor),
);

let shadow_size = uinode.size() + spread;
Expand All @@ -307,10 +305,10 @@ pub fn extract_shadows(
}

let radius = ResolvedBorderRadius {
top_left: uinode.border_radius.top_left * spread_ratio_x,
top_right: uinode.border_radius.top_right * spread_ratio_x,
bottom_left: uinode.border_radius.bottom_left * spread_ratio_x,
bottom_right: uinode.border_radius.bottom_right * spread_ratio_x,
top_left: uinode.border_radius.top_left * spread_ratio,
top_right: uinode.border_radius.top_right * spread_ratio,
bottom_left: uinode.border_radius.bottom_left * spread_ratio,
bottom_right: uinode.border_radius.bottom_right * spread_ratio,
};

extracted_box_shadows.box_shadows.insert(
Expand Down Expand Up @@ -373,7 +371,6 @@ pub fn queue_shadows(
),
batch_range: 0..0,
extra_index: PhaseItemExtraIndex::NONE,
inverse_scale_factor: 1.,
});
}
}
Expand Down
Loading

0 comments on commit 8a3a8b5

Please sign in to comment.