From 6b6df6dd5f8a9b843470109dea44e367d949f4eb Mon Sep 17 00:00:00 2001 From: Omar Bassam Date: Wed, 9 Mar 2022 22:48:56 +0200 Subject: [PATCH 1/7] fixing conflicets with main --- examples/2d/mesh2d_manual.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/examples/2d/mesh2d_manual.rs b/examples/2d/mesh2d_manual.rs index b09a487a73d47..c1e0135a48d11 100644 --- a/examples/2d/mesh2d_manual.rs +++ b/examples/2d/mesh2d_manual.rs @@ -70,8 +70,8 @@ fn star( // Set the position attribute star.insert_attribute(Mesh::ATTRIBUTE_POSITION, v_pos); // And a RGB color attribute as well - let mut v_color: Vec = vec![Color::BLACK.as_linear_rgba_u32()]; - v_color.extend_from_slice(&[Color::YELLOW.as_linear_rgba_u32(); 10]); + let mut v_color: Vec = vec![bytemuck::cast([0 as u8, 0 as u8, 0 as u8, 255 as u8])]; + v_color.extend_from_slice(&[bytemuck::cast([255 as u8, 255 as u8, 0 as u8, 255 as u8]); 10]); star.insert_attribute(Mesh::ATTRIBUTE_COLOR, v_color); // Now, we specify the indices of the vertex that are going to compose the @@ -230,7 +230,6 @@ fn vertex(vertex: Vertex) -> VertexOutput { var out: VertexOutput; // Project the world position of the mesh into screen position out.clip_position = view.view_proj * mesh.model * vec4(vertex.position, 1.0); - // Unpack the `u32` from the vertex buffer into the `vec4` used by the fragment shader out.color = vec4((vec4(vertex.color) >> vec4(0u, 8u, 16u, 24u)) & vec4(255u)) / 255.0; return out; } From 03a9f7e493b038ddf7a1f24d11ede88d37e42d04 Mon Sep 17 00:00:00 2001 From: Omar Bassam Date: Fri, 11 Mar 2022 00:28:06 +0200 Subject: [PATCH 2/7] Adding Screen to world function --- Cargo.toml | 8 +++ crates/bevy_render/src/camera/camera.rs | 78 +++++++++++++++++++++++- crates/bevy_render/src/primitives/mod.rs | 12 ++++ examples/2d/mouse_tracking.rs | 43 +++++++++++++ examples/3d/screen_to_world.rs | 71 +++++++++++++++++++++ 5 files changed, 209 insertions(+), 3 deletions(-) create mode 100644 examples/2d/mouse_tracking.rs create mode 100644 examples/3d/screen_to_world.rs diff --git a/Cargo.toml b/Cargo.toml index 5efa16115826e..6afa867321b33 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -165,11 +165,19 @@ path = "examples/2d/text2d.rs" name = "texture_atlas" path = "examples/2d/texture_atlas.rs" +[[example]] +name = "mouse_tracking" +path = "examples/2d/mouse_tracking.rs" + # 3D Rendering [[example]] name = "3d_scene" path = "examples/3d/3d_scene.rs" +[[example]] +name = "screen_to_world" +path = "examples/3d/screen_to_world.rs" + [[example]] name = "lighting" path = "examples/3d/lighting.rs" diff --git a/crates/bevy_render/src/camera/camera.rs b/crates/bevy_render/src/camera/camera.rs index 07e748fe8211f..b69632da8a47a 100644 --- a/crates/bevy_render/src/camera/camera.rs +++ b/crates/bevy_render/src/camera/camera.rs @@ -1,6 +1,10 @@ use crate::{ - camera::CameraProjection, prelude::Image, render_asset::RenderAssets, - render_resource::TextureView, view::ExtractedWindows, + camera::CameraProjection, + prelude::Image, + primitives::{Line, Plane}, + render_asset::RenderAssets, + render_resource::TextureView, + view::ExtractedWindows, }; use bevy_asset::{AssetEvent, Assets, Handle}; use bevy_ecs::{ @@ -12,7 +16,7 @@ use bevy_ecs::{ reflect::ReflectComponent, system::{QuerySet, Res}, }; -use bevy_math::{Mat4, UVec2, Vec2, Vec3}; +use bevy_math::{Mat4, UVec2, Vec2, Vec3, Vec4}; use bevy_reflect::{Reflect, ReflectDeserialize}; use bevy_transform::components::GlobalTransform; use bevy_utils::HashSet; @@ -138,6 +142,74 @@ impl Camera { None } } + + /// Given a position in screen space, compute the world-space line that corresponds to it. + pub fn screen_to_world_ray( + &self, + pos_screen: Vec2, + windows: &Windows, + images: &Assets, + camera_transform: &GlobalTransform, + ) -> Line { + let camera_position = camera_transform.compute_matrix(); + let window_size = self.target.get_logical_size(windows, images).unwrap(); + let projection_matrix = self.projection_matrix; + + // Normalized device coordinate cursor position from (-1, -1, -1) to (1, 1, 1) + let cursor_ndc = (pos_screen / window_size) * 2.0 - Vec2::from([1.0, 1.0]); + let cursor_pos_ndc_near: Vec3 = cursor_ndc.extend(-1.0); + let cursor_pos_ndc_far: Vec3 = cursor_ndc.extend(1.0); + + // Use near and far ndc points to generate a ray in world space + // This method is more robust than using the location of the camera as the start of + // the ray, because ortho cameras have a focal point at infinity! + let ndc_to_world: Mat4 = camera_position * projection_matrix.inverse(); + let cursor_pos_near: Vec3 = ndc_to_world.project_point3(cursor_pos_ndc_near); + let cursor_pos_far: Vec3 = ndc_to_world.project_point3(cursor_pos_ndc_far); + let ray_direction = cursor_pos_far - cursor_pos_near; + Line::from_point_direction(cursor_pos_near, ray_direction) + } + + /// Given a position in screen space and a plane in world space, compute what point on the plane the point in screen space corresponds to. + /// In 2D, use `screen_to_point_2d`. + pub fn screen_to_point_on_plane( + &self, + pos_screen: Vec2, + plane: Plane, + windows: &Windows, + images: &Assets, + camera_transform: &GlobalTransform, + ) -> Option { + let world_ray = self.screen_to_world_ray(pos_screen, windows, images, camera_transform); + let d = world_ray.point.dot(plane.normal()); + if d == 0. { + None + } else { + let diff = world_ray.point.extend(1.0) - plane.normal_d(); + let p = diff.dot(plane.normal_d()); + let dist = p / d; + Some(world_ray.point - world_ray.direction * dist) + } + } + + /// Computes the world position for a given screen position. + /// The output will always be on the XY plane with Z at zero. It is designed for 2D, but also works with a 3D camera. + /// For more flexibility in 3D, consider `screen_to_point_on_plane`. + pub fn screen_to_point_2d( + &self, + pos_screen: Vec2, + windows: &Windows, + images: &Assets, + camera_transform: &GlobalTransform, + ) -> Option { + self.screen_to_point_on_plane( + pos_screen, + Plane::new(Vec4::new(0., 0., 1., 0.)), + windows, + images, + camera_transform, + ) + } } #[allow(clippy::type_complexity)] diff --git a/crates/bevy_render/src/primitives/mod.rs b/crates/bevy_render/src/primitives/mod.rs index 161e8c24fd75a..2b4f10e87f44c 100644 --- a/crates/bevy_render/src/primitives/mod.rs +++ b/crates/bevy_render/src/primitives/mod.rs @@ -192,6 +192,18 @@ impl CubemapFrusta { } } +#[derive(Clone, Copy, Debug, Default)] +pub struct Line { + pub point: Vec3, + pub direction: Vec3, +} + +impl Line { + pub fn from_point_direction(point: Vec3, direction: Vec3) -> Self { + Self { point, direction } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/examples/2d/mouse_tracking.rs b/examples/2d/mouse_tracking.rs new file mode 100644 index 0000000000000..7cdffc33ed66f --- /dev/null +++ b/examples/2d/mouse_tracking.rs @@ -0,0 +1,43 @@ +use bevy::{prelude::*, render::camera::Camera}; + +fn main() { + App::new() + .add_plugins(DefaultPlugins) + .add_startup_system(setup) + .add_system(follow) + .run(); +} + +#[derive(Component)] +struct Follow; + +fn setup(mut commands: Commands, asset_server: Res) { + let texture_handle = asset_server.load("branding/icon.png"); + commands.spawn_bundle(OrthographicCameraBundle::new_2d()); + commands + .spawn_bundle(SpriteBundle { + texture: texture_handle, + ..Default::default() + }) + .insert(Follow); +} + +fn follow( + mut q: Query<&mut Transform, With>, + q_camera: Query<(&Camera, &GlobalTransform)>, + windows: Res, + images: Res>, + mut evr_cursor: EventReader, +) { + let (camera, camera_transform) = q_camera.single(); + if let Some(cursor) = evr_cursor.iter().next() { + for mut transform in q.iter_mut() { + let point: Option = + camera.screen_to_point_2d(cursor.position, &windows, &images, camera_transform); + println!("Point {:?}", point); + if let Some(point) = point { + transform.translation = point; + } + } + } +} diff --git a/examples/3d/screen_to_world.rs b/examples/3d/screen_to_world.rs new file mode 100644 index 0000000000000..5da3b87bb88f5 --- /dev/null +++ b/examples/3d/screen_to_world.rs @@ -0,0 +1,71 @@ +use bevy::{prelude::*, render::camera::Camera, render::primitives::Plane}; + +fn main() { + App::new() + .insert_resource(Msaa { samples: 4 }) + .add_plugins(DefaultPlugins) + .add_startup_system(setup) + .add_system(follow) + .run(); +} + +#[derive(Component)] +struct Follow; + +/// set up a simple 3D scene +fn setup( + mut commands: Commands, + mut meshes: ResMut>, + mut materials: ResMut>, +) { + // plane + commands.spawn_bundle(PbrBundle { + mesh: meshes.add(Mesh::from(shape::Plane { size: 5.0 })), + material: materials.add(Color::rgb(0.3, 0.5, 0.3).into()), + ..Default::default() + }); + // cube + commands + .spawn_bundle(PbrBundle { + mesh: meshes.add(Mesh::from(shape::Cube { size: 1.0 })), + material: materials.add(Color::rgb(0.8, 0.7, 0.6).into()), + transform: Transform::from_xyz(0.0, 0.5, 0.0), + ..Default::default() + }) + .insert(Follow); + // light + commands.spawn_bundle(PointLightBundle { + transform: Transform::from_xyz(4.0, 8.0, 4.0), + ..Default::default() + }); + // camera + commands.spawn_bundle(PerspectiveCameraBundle { + transform: Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), + ..Default::default() + }); +} + +fn follow( + mut q: Query<&mut Transform, With>, + q_camera: Query<(&Camera, &GlobalTransform)>, + windows: Res, + images: Res>, + mut evr_cursor: EventReader, +) { + // Assumes there is at least one camera + let (camera, camera_transform) = q_camera.iter().next().unwrap(); + if let Some(cursor) = evr_cursor.iter().next() { + for mut transform in q.iter_mut() { + let point: Option = camera.screen_to_point_on_plane( + cursor.position, + Plane::new(Vec4::new(0., 1., 0., 1.)), + &windows, + &images, + camera_transform, + ); + if let Some(point) = point { + transform.translation = point + Vec3::new(0., 0.5, 0.); + } + } + } +} From 3a243f6d3a4793faef15ec27fc0c5f711289ea5a Mon Sep 17 00:00:00 2001 From: Omar Bassam Date: Fri, 11 Mar 2022 00:56:33 +0200 Subject: [PATCH 3/7] Fixing conflicts --- examples/2d/mesh2d_manual.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/2d/mesh2d_manual.rs b/examples/2d/mesh2d_manual.rs index c1e0135a48d11..f6e4af029cae8 100644 --- a/examples/2d/mesh2d_manual.rs +++ b/examples/2d/mesh2d_manual.rs @@ -70,8 +70,8 @@ fn star( // Set the position attribute star.insert_attribute(Mesh::ATTRIBUTE_POSITION, v_pos); // And a RGB color attribute as well - let mut v_color: Vec = vec![bytemuck::cast([0 as u8, 0 as u8, 0 as u8, 255 as u8])]; - v_color.extend_from_slice(&[bytemuck::cast([255 as u8, 255 as u8, 0 as u8, 255 as u8]); 10]); + let mut v_color: Vec = vec![bytemuck::cast([0_u8, 0_u8, 0_u8, 255_u8])]; + v_color.extend_from_slice(&[bytemuck::cast([255_u8, 255_u8, 0_u8, 255_u8]); 10]); star.insert_attribute(Mesh::ATTRIBUTE_COLOR, v_color); // Now, we specify the indices of the vertex that are going to compose the From 842732a4aedf3e46ef9c306a3f4d4cf4c2391057 Mon Sep 17 00:00:00 2001 From: Omar Bassam Date: Fri, 11 Mar 2022 01:52:32 +0200 Subject: [PATCH 4/7] syncing with main --- examples/2d/mesh2d_manual.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/2d/mesh2d_manual.rs b/examples/2d/mesh2d_manual.rs index f6e4af029cae8..52b4915d960fa 100644 --- a/examples/2d/mesh2d_manual.rs +++ b/examples/2d/mesh2d_manual.rs @@ -70,8 +70,8 @@ fn star( // Set the position attribute star.insert_attribute(Mesh::ATTRIBUTE_POSITION, v_pos); // And a RGB color attribute as well - let mut v_color: Vec = vec![bytemuck::cast([0_u8, 0_u8, 0_u8, 255_u8])]; - v_color.extend_from_slice(&[bytemuck::cast([255_u8, 255_u8, 0_u8, 255_u8]); 10]); + let mut v_color: Vec = vec![Color::BLACK.as_linear_rgba_u32()]; + v_color.extend_from_slice(&[Color::YELLOW.as_linear_rgba_u32(); 10]); star.insert_attribute(Mesh::ATTRIBUTE_COLOR, v_color); // Now, we specify the indices of the vertex that are going to compose the From 3fb86b4e9c1abe3b4e81419e04d01708cf1d889e Mon Sep 17 00:00:00 2001 From: Omar Bassam Date: Fri, 11 Mar 2022 02:13:07 +0200 Subject: [PATCH 5/7] Syncing with main again --- examples/2d/mesh2d_manual.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/2d/mesh2d_manual.rs b/examples/2d/mesh2d_manual.rs index 52b4915d960fa..b09a487a73d47 100644 --- a/examples/2d/mesh2d_manual.rs +++ b/examples/2d/mesh2d_manual.rs @@ -230,6 +230,7 @@ fn vertex(vertex: Vertex) -> VertexOutput { var out: VertexOutput; // Project the world position of the mesh into screen position out.clip_position = view.view_proj * mesh.model * vec4(vertex.position, 1.0); + // Unpack the `u32` from the vertex buffer into the `vec4` used by the fragment shader out.color = vec4((vec4(vertex.color) >> vec4(0u, 8u, 16u, 24u)) & vec4(255u)) / 255.0; return out; } From 64e061869633de02e484c006762232dd13ea8fce Mon Sep 17 00:00:00 2001 From: Omar Bassam Date: Fri, 11 Mar 2022 02:17:02 +0200 Subject: [PATCH 6/7] Apply suggestions from code review Co-authored-by: Robert Swain --- crates/bevy_render/src/camera/camera.rs | 36 ++++++++++++++----------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/crates/bevy_render/src/camera/camera.rs b/crates/bevy_render/src/camera/camera.rs index b69632da8a47a..197f3960c18ac 100644 --- a/crates/bevy_render/src/camera/camera.rs +++ b/crates/bevy_render/src/camera/camera.rs @@ -151,22 +151,25 @@ impl Camera { images: &Assets, camera_transform: &GlobalTransform, ) -> Line { - let camera_position = camera_transform.compute_matrix(); + let view_matrix = camera_transform.compute_matrix(); let window_size = self.target.get_logical_size(windows, images).unwrap(); let projection_matrix = self.projection_matrix; - // Normalized device coordinate cursor position from (-1, -1, -1) to (1, 1, 1) - let cursor_ndc = (pos_screen / window_size) * 2.0 - Vec2::from([1.0, 1.0]); - let cursor_pos_ndc_near: Vec3 = cursor_ndc.extend(-1.0); - let cursor_pos_ndc_far: Vec3 = cursor_ndc.extend(1.0); + // Normalized device coordinate cursor position from (-1, -1, 1) to (1, 1, 0) where 0 is at the far plane + // and 1 is at the near plane. + let cursor_ndc = (pos_screen / window_size) * 2.0 - Vec2::ONE; + let cursor_pos_ndc_near: Vec3 = cursor_ndc.extend(1.0); + let cursor_pos_ndc_far: Vec3 = cursor_ndc.extend(0.0); // Use near and far ndc points to generate a ray in world space // This method is more robust than using the location of the camera as the start of // the ray, because ortho cameras have a focal point at infinity! - let ndc_to_world: Mat4 = camera_position * projection_matrix.inverse(); - let cursor_pos_near: Vec3 = ndc_to_world.project_point3(cursor_pos_ndc_near); - let cursor_pos_far: Vec3 = ndc_to_world.project_point3(cursor_pos_ndc_far); - let ray_direction = cursor_pos_far - cursor_pos_near; + let inverse_projection = projection_matrix.inverse(); + let cursor_pos_view_near = inverse_projection.project_point3(cursor_pos_ndc_near); + let cursor_pos_view_far = inverse_projection.project_point3(cursor_pos_ndc_far); + let cursor_pos_near = view_matrix.transform_point3(cursor_pos_view_near); + let cursor_pos_far = view_matrix.transform_point3(cursor_pos_view_far); + let ray_direction = (cursor_pos_far - cursor_pos_near).normalize(); Line::from_point_direction(cursor_pos_near, ray_direction) } @@ -181,14 +184,17 @@ impl Camera { camera_transform: &GlobalTransform, ) -> Option { let world_ray = self.screen_to_world_ray(pos_screen, windows, images, camera_transform); - let d = world_ray.point.dot(plane.normal()); - if d == 0. { + let plane_normal = plane.normal(); + let direction_dot_normal = world_ray.direction.dot(plane_normal); + if world_ray.point.extend(1.0).dot(plane.normal_d()).abs() < f32::EPSILON { + Some(world_ray.point) + } else if direction_dot_normal.abs() < f32::EPSILON { None } else { - let diff = world_ray.point.extend(1.0) - plane.normal_d(); - let p = diff.dot(plane.normal_d()); - let dist = p / d; - Some(world_ray.point - world_ray.direction * dist) + // https://www.scratchapixel.com/lessons/3d-basic-rendering/minimal-ray-tracer-rendering-simple-shapes/ray-plane-and-ray-disk-intersection + let p0 = plane_normal * plane.d(); + let t = (p0 - world_ray.point).dot(normal) / direction_dot_normal; + Some(world_ray.point + t * world_ray.direction) } } From 883350855ce61019fcb50e6c79d9442ddab6e38b Mon Sep 17 00:00:00 2001 From: Omar Bassam Date: Fri, 11 Mar 2022 03:11:36 +0200 Subject: [PATCH 7/7] fix normal to plane_normal --- crates/bevy_render/src/camera/camera.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/bevy_render/src/camera/camera.rs b/crates/bevy_render/src/camera/camera.rs index 197f3960c18ac..f3c8bf9a65eaf 100644 --- a/crates/bevy_render/src/camera/camera.rs +++ b/crates/bevy_render/src/camera/camera.rs @@ -193,7 +193,7 @@ impl Camera { } else { // https://www.scratchapixel.com/lessons/3d-basic-rendering/minimal-ray-tracer-rendering-simple-shapes/ray-plane-and-ray-disk-intersection let p0 = plane_normal * plane.d(); - let t = (p0 - world_ray.point).dot(normal) / direction_dot_normal; + let t = (p0 - world_ray.point).dot(plane_normal) / direction_dot_normal; Some(world_ray.point + t * world_ray.direction) } }