Change CameraMatrix::get_viewport_size to get_viewport_half_extents

Fixes #26637.
Fixes #19900.

The viewport_size returned by get_viewport_size was previously incorrect, being half the correct value. The function is renamed to get_viewport_half_extents, and now returns a Vector2.

Code which called this function has also been modified accordingly.

This PR also fixes shadow culling when using ortho cameras, because the correct input for CameraMatrix::set_orthogonal should be the full HEIGHT from get_viewport_half_extents, and not half the width.

It also fixes state.ubo_data.viewport_size in rasterizer_scene_gles3.cpp to be the width and the height of the viewport in pixels as stated in the documentation, rather than the current value which is half the viewport extents in worldspace, presumed to be a bug.
This commit is contained in:
lawnjelly 2020-01-21 18:39:16 +00:00
parent 11260fb87f
commit eaf8e5ce52
8 changed files with 37 additions and 40 deletions

View File

@ -247,7 +247,7 @@ real_t CameraMatrix::get_z_near() const {
return new_plane.d;
}
void CameraMatrix::get_viewport_size(real_t &r_width, real_t &r_height) const {
Vector2 CameraMatrix::get_viewport_half_extents() const {
const real_t *matrix = (const real_t *)this->matrix;
///////--- Near Plane ---///////
@ -273,8 +273,7 @@ void CameraMatrix::get_viewport_size(real_t &r_width, real_t &r_height) const {
Vector3 res;
near_plane.intersect_3(right_plane, top_plane, &res);
r_width = res.x;
r_height = res.y;
return Vector2(res.x, res.y);
}
bool CameraMatrix::get_endpoints(const Transform &p_transform, Vector3 *p_8points) const {
@ -563,9 +562,8 @@ CameraMatrix::operator String() const {
real_t CameraMatrix::get_aspect() const {
real_t w, h;
get_viewport_size(w, h);
return w / h;
Vector2 vp_he = get_viewport_half_extents();
return vp_he.x / vp_he.y;
}
int CameraMatrix::get_pixels_per_meter(int p_for_pixel_width) const {

View File

@ -73,7 +73,7 @@ struct CameraMatrix {
Vector<Plane> get_projection_planes(const Transform &p_transform) const;
bool get_endpoints(const Transform &p_transform, Vector3 *p_8points) const;
void get_viewport_size(real_t &r_width, real_t &r_height) const;
Vector2 get_viewport_half_extents() const;
void invert();
CameraMatrix inverse() const;

View File

@ -2685,14 +2685,14 @@ void RasterizerSceneGLES2::_draw_sky(RasterizerStorageGLES2::Sky *p_sky, const C
};
if (!asymmetrical) {
float vw, vh, zn;
camera.get_viewport_size(vw, vh);
Vector2 vp_he = camera.get_viewport_half_extents();
float zn;
zn = p_projection.get_z_near();
for (int i = 0; i < 4; i++) {
Vector3 uv = vertices[i * 2 + 1];
uv.x = (uv.x * 2.0 - 1.0) * vw;
uv.y = -(uv.y * 2.0 - 1.0) * vh;
uv.x = (uv.x * 2.0 - 1.0) * vp_he.x;
uv.y = -(uv.y * 2.0 - 1.0) * vp_he.y;
uv.z = -zn;
vertices[i * 2 + 1] = p_transform.basis.xform(uv).normalized();
vertices[i * 2 + 1].z = -vertices[i * 2 + 1].z;

View File

@ -2538,14 +2538,14 @@ void RasterizerSceneGLES3::_draw_sky(RasterizerStorageGLES3::Sky *p_sky, const C
};
if (!asymmetrical) {
float vw, vh, zn;
camera.get_viewport_size(vw, vh);
Vector2 vp_he = camera.get_viewport_half_extents();
float zn;
zn = p_projection.get_z_near();
for (int i = 0; i < 4; i++) {
Vector3 uv = vertices[i * 2 + 1];
uv.x = (uv.x * 2.0 - 1.0) * vw;
uv.y = -(uv.y * 2.0 - 1.0) * vh;
uv.x = (uv.x * 2.0 - 1.0) * vp_he.x;
uv.y = -(uv.y * 2.0 - 1.0) * vp_he.y;
uv.z = -zn;
vertices[i * 2 + 1] = p_transform.basis.xform(uv).normalized();
vertices[i * 2 + 1].z = -vertices[i * 2 + 1].z;
@ -4181,11 +4181,15 @@ void RasterizerSceneGLES3::render_scene(const Transform &p_cam_transform, const
state.ubo_data.shadow_dual_paraboloid_render_zfar = 0;
state.ubo_data.opaque_prepass_threshold = 0.99;
p_cam_projection.get_viewport_size(state.ubo_data.viewport_size[0], state.ubo_data.viewport_size[1]);
int viewport_width_pixels = storage->frame.current_rt->width;
int viewport_height_pixels = storage->frame.current_rt->height;
state.ubo_data.viewport_size[0] = viewport_width_pixels;
state.ubo_data.viewport_size[1] = viewport_height_pixels;
if (storage->frame.current_rt) {
state.ubo_data.screen_pixel_size[0] = 1.0 / storage->frame.current_rt->width;
state.ubo_data.screen_pixel_size[1] = 1.0 / storage->frame.current_rt->height;
state.ubo_data.screen_pixel_size[0] = 1.0 / viewport_width_pixels;
state.ubo_data.screen_pixel_size[1] = 1.0 / viewport_height_pixels;
}
_setup_environment(env, p_cam_projection, p_cam_transform, p_reflection_probe.is_valid());

View File

@ -428,8 +428,7 @@ Vector3 SpatialEditorViewport::_get_screen_to_space(const Vector3 &p_vector3) {
} else {
cm.set_perspective(get_fov(), get_size().aspect(), get_znear() + p_vector3.z, get_zfar());
}
float screen_w, screen_h;
cm.get_viewport_size(screen_w, screen_h);
Vector2 screen_he = cm.get_viewport_half_extents();
Transform camera_transform;
camera_transform.translate(cursor.pos);
@ -437,7 +436,7 @@ Vector3 SpatialEditorViewport::_get_screen_to_space(const Vector3 &p_vector3) {
camera_transform.basis.rotate(Vector3(0, 1, 0), -cursor.y_rot);
camera_transform.translate(0, 0, cursor.distance);
return camera_transform.xform(Vector3(((p_vector3.x / get_size().width) * 2.0 - 1.0) * screen_w, ((1.0 - (p_vector3.y / get_size().height)) * 2.0 - 1.0) * screen_h, -(get_znear() + p_vector3.z)));
return camera_transform.xform(Vector3(((p_vector3.x / get_size().width) * 2.0 - 1.0) * screen_he.x, ((1.0 - (p_vector3.y / get_size().height)) * 2.0 - 1.0) * screen_he.y, -(get_znear() + p_vector3.z)));
}
void SpatialEditorViewport::_select_region() {

View File

@ -85,9 +85,8 @@ Vector3 ARVRCamera::project_local_ray_normal(const Point2 &p_pos) const {
Vector3 ray;
CameraMatrix cm = arvr_interface->get_projection_for_eye(ARVRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar());
float screen_w, screen_h;
cm.get_viewport_size(screen_w, screen_h);
ray = Vector3(((cpos.x / viewport_size.width) * 2.0 - 1.0) * screen_w, ((1.0 - (cpos.y / viewport_size.height)) * 2.0 - 1.0) * screen_h, -get_znear()).normalized();
Vector2 screen_he = cm.get_viewport_half_extents();
ray = Vector3(((cpos.x / viewport_size.width) * 2.0 - 1.0) * screen_he.x, ((1.0 - (cpos.y / viewport_size.height)) * 2.0 - 1.0) * screen_he.y, -get_znear()).normalized();
return ray;
};
@ -138,13 +137,12 @@ Vector3 ARVRCamera::project_position(const Point2 &p_point, float p_z_depth) con
CameraMatrix cm = arvr_interface->get_projection_for_eye(ARVRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar());
Size2 vp_size;
cm.get_viewport_size(vp_size.x, vp_size.y);
Vector2 vp_he = cm.get_viewport_half_extents();
Vector2 point;
point.x = (p_point.x / viewport_size.x) * 2.0 - 1.0;
point.y = (1.0 - (p_point.y / viewport_size.y)) * 2.0 - 1.0;
point *= vp_size;
point *= vp_he;
Vector3 p(point.x, point.y, -p_z_depth);

View File

@ -291,9 +291,8 @@ Vector3 Camera::project_local_ray_normal(const Point2 &p_pos) const {
} else {
CameraMatrix cm;
cm.set_perspective(fov, viewport_size.aspect(), near, far, keep_aspect == KEEP_WIDTH);
float screen_w, screen_h;
cm.get_viewport_size(screen_w, screen_h);
ray = Vector3(((cpos.x / viewport_size.width) * 2.0 - 1.0) * screen_w, ((1.0 - (cpos.y / viewport_size.height)) * 2.0 - 1.0) * screen_h, -near).normalized();
Vector2 screen_he = cm.get_viewport_half_extents();
ray = Vector3(((cpos.x / viewport_size.width) * 2.0 - 1.0) * screen_he.x, ((1.0 - (cpos.y / viewport_size.height)) * 2.0 - 1.0) * screen_he.y, -near).normalized();
}
return ray;
@ -402,13 +401,12 @@ Vector3 Camera::project_position(const Point2 &p_point, float p_z_depth) const {
else
cm.set_perspective(fov, viewport_size.aspect(), p_z_depth, far, keep_aspect == KEEP_WIDTH);
Size2 vp_size;
cm.get_viewport_size(vp_size.x, vp_size.y);
Vector2 vp_he = cm.get_viewport_half_extents();
Vector2 point;
point.x = (p_point.x / viewport_size.x) * 2.0 - 1.0;
point.y = (1.0 - (p_point.y / viewport_size.y)) * 2.0 - 1.0;
point *= vp_size;
point *= vp_he;
Vector3 p(point.x, point.y, -p_z_depth);

View File

@ -1398,9 +1398,9 @@ bool VisualServerScene::_light_instance_update_shadow(Instance *p_instance, cons
if (p_cam_orthogonal) {
float w, h;
p_cam_projection.get_viewport_size(w, h);
camera_matrix.set_orthogonal(w, aspect, distances[(i == 0 || !overlap) ? i : i - 1], distances[i + 1], false);
Vector2 vp_he = p_cam_projection.get_viewport_half_extents();
camera_matrix.set_orthogonal(vp_he.y * 2.0, aspect, distances[(i == 0 || !overlap) ? i : i - 1], distances[i + 1], false);
} else {
float fov = p_cam_projection.get_fov();
@ -2088,8 +2088,8 @@ void VisualServerScene::_prepare_scene(const Transform p_cam_transform, const Ca
float zn = p_cam_projection.get_z_near();
Plane p(cam_xf.origin + cam_xf.basis.get_axis(2) * -zn, -cam_xf.basis.get_axis(2)); //camera near plane
float vp_w, vp_h; //near plane size in screen coordinates
p_cam_projection.get_viewport_size(vp_w, vp_h);
// near plane half width and height
Vector2 vp_half_extents = p_cam_projection.get_viewport_half_extents();
switch (VSG::storage->light_get_type(ins->base)) {
@ -2115,7 +2115,7 @@ void VisualServerScene::_prepare_scene(const Transform p_cam_transform, const Ca
}
float screen_diameter = points[0].distance_to(points[1]) * 2;
coverage = screen_diameter / (vp_w + vp_h);
coverage = screen_diameter / (vp_half_extents.x + vp_half_extents.y);
} break;
case VS::LIGHT_SPOT: {
@ -2144,7 +2144,7 @@ void VisualServerScene::_prepare_scene(const Transform p_cam_transform, const Ca
}
float screen_diameter = points[0].distance_to(points[1]) * 2;
coverage = screen_diameter / (vp_w + vp_h);
coverage = screen_diameter / (vp_half_extents.x + vp_half_extents.y);
} break;
default: {