I have a colmap export of the camera pose file which named "images.txt".
# Image list with two lines of data per image:
# IMAGE_ID, QW, QX, QY, QZ, TX, TY, TZ, CAMERA_ID, NAME
# POINTS2D[] as (X, Y, POINT3D_ID)
# Number of images: 2, mean observations per image: 2
1 0.851773 0.0165051 0.503764 -0.142941 -0.737434 1.02973 3.74354 1 P1180141.JPG
2362.39 248.498 58396 1784.7 268.254 59027 1784.7 268.254 -1
2 0.851773 0.0165051 0.503764 -0.142941 -0.737434 1.02973 3.74354 1 P1180142.JPG
1190.83 663.957 23056 1258.77 640.354 59070
Then I want to import it into Blender. So I wrote the following code. First I read the text, and split it to quaternion and translation. And then calculate optical center use -R^T*t, and camera rotation use R^T
colmap_pose = r"/Users/chunibyo/Desktop/images.txt"
rc_pose = r'flight_log.csv'
image_list_file = r'select.bat'
pose_list_file = r'blender.py'
import math
import numpy as np
import open3d as o3d
def euler_from_quaternion(x, y, z, w):
"""
Convert a quaternion into euler angles (roll, pitch, yaw)
roll is rotation around x in radians (counterclockwise)
pitch is rotation around y in radians (counterclockwise)
yaw is rotation around z in radians (counterclockwise)
"""
t0 = +2.0 * (w * x + y * z)
t1 = +1.0 - 2.0 * (x * x + y * y)
roll_x = math.atan2(t0, t1)
t2 = +2.0 * (w * y - z * x)
t2 = +1.0 if t2 > +1.0 else t2
t2 = -1.0 if t2 < -1.0 else t2
pitch_y = math.asin(t2)
t3 = +2.0 * (w * z + x * y)
t4 = +1.0 - 2.0 * (y * y + z * z)
yaw_z = math.atan2(t3, t4)
return roll_x, pitch_y, yaw_z # in radians
def quaternion_rotation_matrix(q0, q1, q2, q3):
# w, x, y ,z
"""
Covert a quaternion into a full three-dimensional rotation matrix.
Input
:param Q: A 4 element array representing the quaternion (q0,q1,q2,q3)
Output
:return: A 3x3 element matrix representing the full 3D rotation matrix.
This rotation matrix converts a point in the local reference
frame to a point in the global reference frame.
"""
# Extract the values from Q
# First row of the rotation matrix
r00 = 2 * (q0 * q0 + q1 * q1) - 1
r01 = 2 * (q1 * q2 - q0 * q3)
r02 = 2 * (q1 * q3 + q0 * q2)
# Second row of the rotation matrix
r10 = 2 * (q1 * q2 + q0 * q3)
r11 = 2 * (q0 * q0 + q2 * q2) - 1
r12 = 2 * (q2 * q3 - q0 * q1)
# Third row of the rotation matrix
r20 = 2 * (q1 * q3 - q0 * q2)
r21 = 2 * (q2 * q3 + q0 * q1)
r22 = 2 * (q0 * q0 + q3 * q3) - 1
# 3x3 rotation matrix
rot_matrix = np.array([[r00, r01, r02],
[r10, r11, r12],
[r20, r21, r22]])
return rot_matrix
def rotmat2qvec(R):
Rxx, Ryx, Rzx, Rxy, Ryy, Rzy, Rxz, Ryz, Rzz = R.flat
K = np.array([
[Rxx - Ryy - Rzz, 0, 0, 0],
[Ryx + Rxy, Ryy - Rxx - Rzz, 0, 0],
[Rzx + Rxz, Rzy + Ryz, Rzz - Rxx - Ryy, 0],
[Ryz - Rzy, Rzx - Rxz, Rxy - Ryx, Rxx + Ryy + Rzz]]) / 3.0
eigvals, eigvecs = np.linalg.eigh(K)
qvec = eigvecs[[3, 0, 1, 2], np.argmax(eigvals)]
if qvec[0] < 0:
qvec *= -1
return qvec
def qvec2rotmat(qvec):
# w, x, y, z
return np.array([
[1 - 2 * qvec[2] ** 2 - 2 * qvec[3] ** 2,
2 * qvec[1] * qvec[2] - 2 * qvec[0] * qvec[3],
2 * qvec[3] * qvec[1] + 2 * qvec[0] * qvec[2]],
[2 * qvec[1] * qvec[2] + 2 * qvec[0] * qvec[3],
1 - 2 * qvec[1] ** 2 - 2 * qvec[3] ** 2,
2 * qvec[2] * qvec[3] - 2 * qvec[0] * qvec[1]],
[2 * qvec[3] * qvec[1] - 2 * qvec[0] * qvec[2],
2 * qvec[2] * qvec[3] + 2 * qvec[0] * qvec[1],
1 - 2 * qvec[1] ** 2 - 2 * qvec[2] ** 2]])
def main():
with open(colmap_pose) as f:
lines = f.readlines()
rc_lines = []
image_list = []
pose_list = []
point_cloud = []
accuracy = 10
for index, temp_line in enumerate(lines):
line = temp_line.strip()
if not line.startswith('#') and (line.endswith('.png') or line.endswith('jpg')):
temp = line.split(' ')
qw = float(temp[1])
qx = float(temp[2])
qy = float(temp[3])
qz = float(temp[4])
tx = float(temp[5])
ty = float(temp[6])
tz = float(temp[7])
name = temp[9]
rotation_matrix = qvec2rotmat([qw, qx, qy, qz])
optical_center = -rotation_matrix.T @ np.array([tx, ty, tz])
tx = float(optical_center[0])
ty = float(optical_center[1])
tz = float(optical_center[2])
_qw, _qx, _qy, _qz = rotmat2qvec(rotation_matrix.T)
roll_x, pitch_y, yaw_z = euler_from_quaternion(_qx, _qy, _qz, _qw)
rc_lines.append(
f"{name},{tx},{ty},{tz},{accuracy},{accuracy},{accuracy},{math.degrees(yaw_z)},{math.degrees(pitch_y)},{math.degrees(roll_x)},{accuracy},{accuracy},{accuracy}\n")
image_list.append(f"xcopy {name} select\n")
# f"camera_object{index}.rotation_euler = Euler(({yaw_z}, {pitch_y}, {roll_x}), 'ZYX')\n" \
# f"camera_object{index}.rotation_quaternion = Quaternion(({qw}, {qx}, {qy}, {qz}))\n" \
blender_script = f"camera_data{index} = bpy.data.cameras.new(name='{name}')\n" \
f"camera_object{index} = bpy.data.objects.new('{name}', camera_data{index})\n" \
f"bpy.context.scene.collection.objects.link(camera_object{index})\n" \
f"camera_object{index}.location = ({tx}, {ty}, {tz})\n" \
f"camera_object{index}.rotation_euler = Euler(({yaw_z}, {pitch_y}, {roll_x}), 'ZYX')\n" \
f"camera_object{index}.rotation_mode = 'ZYX'\n" \
f"bpy.data.cameras['{name}'].lens = 30\n\n"
pose_list.append(blender_script)
point_cloud.append([tx, ty, tz])
# selected image
with open(image_list_file, 'w') as f:
f.write('@echo off\nmkdir select\n')
f.writelines(image_list)
# blender
with open(pose_list_file, 'w') as f:
f.write('import bpy\nfrom mathutils import Euler, Quaternion\n\n')
f.writelines(pose_list)
# ply
pcd = o3d.geometry.PointCloud()
pcd.points = o3d.utility.Vector3dVector(np.array(point_cloud))
o3d.io.write_point_cloud("pcl.ply", pcd, write_ascii=True)
if __name__ == '__main__':
main()
But I got the wrong rotation of the camera,the original pose and the imported pose are shown in the figure.
Could you give me some help, please?
After loading R and T try this and use quaternions for camera direction.
w2c = np.eye(4)
w2c[:3,:3] = R
w2c[:3,3] = T
c2w = np.linalg.inv(w2c)
# quaternions
q = Rotation.from_matrix(c2w[:3, :3).as_quat()
# translation
t = c2w[:3, 3]