In my Xamarin.Android application I have orientation data for X, Y, Z axes got from device's geomagnetic rotation vector composite sensor and processed with SensorManager.GetOrientation( ) method. I want to apply this orientation data in UrhoSharp's scene on the Rotation property of CameraNode. In other words I want to control the scene's camera using the device "orientation" sensors.
What I did so far in the SensorChanged event handler:
// app -> an instance of Urho.SimpleApplication
public void OnSensorChanged(SensorEvent e) {
if (e.Sensor.Type == SensorType.GeomagneticRotationVector) {
var rm = new float[9];
SensorManager.GetRotationMatrixFromVector(rm, e.Values.ToArray());
var ov = new float[3];
SensorManager.GetOrientation(rm, ov);
app.Pitch = (Urho.MathHelper.RadiansToDegrees(ov[0]) + 360) % 360; // map [-Pi...+Pi] to [0...360]
app.Yaw = (Urho.MathHelper.RadiansToDegrees(ov[1]) + 360) % 360; // map [-Pi/2...+Pi/2] to [0...360]
app.CameraNode.Rotation = new Urho.Quaternion(app.Pitch, app.Yaw, 0);
}
}
But unfortunately it does not work as expected and camera looks always to wrong direction. Any idea?
========================================================
Update:
The best solution is to use rotation matrices with orientation sensor and to keep OnSensorChanged method as short as possible!
using System.Linq;
using System.Threading.Tasks;
// a global array
private float[] SensorData = null;
public void OnSensorChanged(SensorEvent? e) {
SensorData = e.Values.ToArray();
}
// a task defined somewhere in the OnCreate async method
await Task.Run(async () => {
var RM = new float[9];
var outR = new float[9];
var res = new float[3];
var Azimuth = 0.0f;
var Pitch = 0.0f;
var Roll = 0.0f;
while (true) {
if (SensorData == null) return;
SensorManager.GetRotationMatrixFromVector(RM, SensorData);
var remap = SensorManager.RemapCoordinateSystem(RM, Android.Hardware.Axis.X, Android.Hardware.Axis.Z, outR);
if (!remap) return;
_ = SensorManager.GetOrientation(outR, res);
Azimuth = (MathHelper.RadiansToDegrees(res[0]) + 360.0f) % 360.0f; // azimuth
Pitch = MathHelper.RadiansToDegrees(res[1]); // altitude
Roll = MathHelper.RadiansToDegrees(-res[2]);
try {
// finally update our Camera Node's Rotation property
cn.Rotation = new Quaternion(Pitch, Azimuth, Roll);
}
catch {
// On [Urho.Application.Stop] this exception occurs.
// Break is also required to exit from while loop!
break;
}
RunOnUiThread(() => {
// update our TextViews
tvAzm.Text = $"Azimuth: {Azimuth,7:F2}";
tvPitch.Text = $"Pitch: {Pitch,7:F2}";
tvRoll.Text = $"Roll: {Roll,7:F2}";
});
// let's wait to avoid to frequent RunOnUiThread calls
await Task.Delay(50);
}
});
Finally I solved it with some research and help of @joe
Here is the final version of method:
// [app] is an instance of Urho.SimpleApplication
public void OnSensorChanged(SensorEvent e) {
if (e.Sensor.Type == SensorType.GeomagneticRotationVector) {
var inR = new float[9];
SensorManager.GetRotationMatrixFromVector(inR, e.Values.ToArray());
var outR = new float[9];
// we need to remap cooridante system, since the Y and Z axes will be swapped, when we pick up the device
if (SensorManager.RemapCoordinateSystem(inR, Android.Hardware.Axis.X, Android.Hardware.Axis.Z, outR)) {
var ov = new float[3];
SensorManager.GetOrientation(outR, ov);
try {
app.Pitch = (MathHelper.RadiansToDegrees(ov[1]) + 360) % 360;
app.Yaw = (MathHelper.RadiansToDegrees(ov[0]) + 360) % 360;
app.CameraNode.Rotation = new Quaternion(app.Pitch, app.Yaw, 0);
}
catch (System.Exception ex) {
// while Urho.SimpleApplication is not fully started, the [app] properties are not available
System.Diagnostics.Trace.WriteLine(ex.Message);
}
}
}
}