I am developing a VR application for Meta Quest using Unity. In this application, the user interacts using a controller, and the user's hand is visually represented in the virtual space.
Objective:
I want to cast a ray into the virtual space corresponding to the point touched by the hand on an object (hereafter referred to as the "panel") that has a RenderTexture showing part of the virtual space. For debugging purposes, a cube with scale (0.1, 0.1, 0.1) is displayed at the hit position for 0.1 seconds. Eventually, I plan to activate a particle system on the cube existing at the hit position.
Additionally, the camera that renders to the RenderTexture is attached to other players and is always in motion.
Current Issue:
Currently, touching the panel does not trigger any actions. Using a previous method, a cube is indeed created when touched, but the coordinates where the ray is cast are significantly misaligned.
Referenced Articles:
How do you get the texture coordinate hit by the mouse on a UI raw image in Unity?
Method to display effects on objects touched via RenderTexture (Japanese)
The second link is from a question I previously asked. With this method, a cube is generated upon touch, but the coordinates where the ray is cast are greatly misaligned.
Code:
Here is the main code attached to the panel. Any suggestions or modifications to correct the issue would be greatly appreciated.
using UnityEngine;
using Photon.Pun;
using UnityEngine.UI;
public class PanelManager : MonoBehaviourPun
{
public Camera displayRenderCamera; // Camera that renders to the RenderTexture
private RawImage displayGameObject; // GameObject displaying the RenderTexture
private Vector3? colliderPoint = null; // Intersection point with the collider
void Start()
{
InitializeCameraAndPanel();
}
void Update()
{
bool gripHeld = OVRInput.Get(OVRInput.Button.PrimaryHandTrigger, OVRInput.Controller.RTouch);
bool triggerNotPressed = !OVRInput.Get(OVRInput.Button.PrimaryIndexTrigger, OVRInput.Controller.RTouch);
if (gripHeld && triggerNotPressed && colliderPoint != null) // Holding grip and not pressing trigger (pointing gesture)
{
InteractWithRenderTexture();
}
InitializeCameraAndPanel();
}
private void InitializeCameraAndPanel()
{
PhotonView[] allPhotonViews = FindObjectsOfType<PhotonView>();
foreach (PhotonView view in allPhotonViews)
{
if (view.Owner != null)
{
if (view.Owner.ActorNumber != PhotonNetwork.LocalPlayer.ActorNumber)
{
GameObject camera = view.gameObject.transform.Find("Head/ViewCamera")?.gameObject;
if (camera != null)
{
displayRenderCamera = camera.GetComponent<Camera>();
Debug.Log(displayRenderCamera);
}
}
else if (view.Owner.ActorNumber == PhotonNetwork.LocalPlayer.ActorNumber)
{
GameObject panel = view.gameObject.transform.Find("Panel/Panel")?.gameObject;
if (panel != null)
{
displayGameObject = panel.GetComponent<RawImage>();
}
}
}
}
}
private void InteractWithRenderTexture()
{
if (colliderPoint == null) return;
Vector3 worldSpaceHitPoint = colliderPoint.Value;
Vector2 localHitPoint = displayGameObject.rectTransform.InverseTransformPoint(worldSpaceHitPoint);
var rect = displayGameObject.rectTransform.rect;
Vector2 textureCoord = localHitPoint - rect.min;
textureCoord.x *= displayGameObject.uvRect.width / rect.width;
textureCoord.y *= displayGameObject.uvRect.height / rect.height;
textureCoord += displayGameObject.uvRect.min;
Ray ray = displayRenderCamera.ViewportPointToRay(new Vector3(textureCoord.x, textureCoord.y, 0));
// Debug: Show a red cube at the touch location
Vector3 point = ray.GetPoint(2.0f);
GameObject cube = GameObject.CreatePrimitive(PrimitiveType.Cube);
cube.transform.position = point;
cube.transform.localScale = new Vector3(0.1f, 0.1f, 0.1f);
cube.GetComponent<Renderer>().material.color = Color.red;
Destroy(cube, 0.1f);
if (Physics.Raycast(ray, out var hit, 10.0f))
{
if (hit.transform.TryGetComponent<CubeManager>(out var cubeManager))
{
cubeManager.StartParticleSystem();
}
}
}
void OnTriggerEnter(Collider other)
{
if (other.CompareTag("rightHand"))
{
var plane = new Plane(transform.forward, transform.position);
colliderPoint = plane.ClosestPointOnPlane(other.bounds.center);
}
}
void OnTriggerExit(Collider other)
{
if (other.CompareTag("rightHand"))
{
colliderPoint = null;
}
}
}
I've revisited the RenderTexture settings to ensure the virtual space is rendered correctly.
How should I modify the code to accurately cast rays based on the touch position?
Thank you for your assistance!