diff --git a/.gitignore b/.gitignore index 06f7000..8948850 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ .vscode +.venv *.png *.bin *.db3 diff --git a/packages/camera/scripts/camera_placement_params.yaml b/packages/camera/scripts/camera_placement_params.yaml new file mode 100644 index 0000000..712878a --- /dev/null +++ b/packages/camera/scripts/camera_placement_params.yaml @@ -0,0 +1,106 @@ +intrinsics: + image_size: [1920, 1200] + 1: + camera_matrix: [672.2824725267757, 0, 984.0472159818853, 0, 672.6886411532304, 602.96669930345, 0, 0, 1] + distortion_coefs: [-0.09715103386082896, 0.06788948036532018, -0.0007157453506997161, 0.0003048354358359307, -0.003636308978789861] + 2: + camera_matrix: [685.7143789189881, 0, 991.0247637161314, 0, 686.3020333004097, 601.2442243349392, 0, 0, 1] + distortion_coefs: [-0.09781628655937251, 0.07153618281495966, -0.001066517414175782, 0.0004679942401339674, -0.003645360450147547] + +camera_positions: + 0: + 1: + rotation: [-135, 0, 90] + translation: [1.3, -0.6, 1] + 2: + rotation: [-135, 0, -90] + translation: [-1.3, -0.6, 1] + + 1: + 1: + rotation: [-110, 0, 90] + translation: [1.3, -0.6, 1] + 2: + rotation: [-110, 0, -90] + translation: [-1.3, -0.6, 1] + + 2: + 1: + rotation: [-110, 0, 90] + translation: [1, -0.6, 1] + 2: + rotation: [-110, 0, -90] + translation: [-1, -0.6, 1] + + 3: + 1: + rotation: [-100, 0, 60] + translation: [1.6, -0.6, 0.7] + 2: + rotation: [-100, 0, -60] + translation: [-1.6, -0.6, 0.7] + + 4: + 1: + rotation: [-110, 0, 45] + translation: [1.3, -1.3, 1] + 2: + rotation: [-110, 0, -45] + translation: [-1.3, -1.3, 1] + + + 5: + 1: + rotation: [-110, 0, 45] + translation: [1.3, -1.3, 1] + 2: + rotation: [-110, 0, -45] + translation: [-1.3, -1.3, 1] + + 6: + 1: + rotation: [-110, 0, 45] + translation: [1, -1.3, 1] + 2: + rotation: [-110, 0, -45] + translation: [-1, -1.3, 1] + + 7: + 1: + rotation: [-110, 0, 90] + translation: [1.3, 0, 1] + 2: + rotation: [-100, 0, -30] + translation: [-0.8, -1.6, 0.7] + + 8: # stereopair with 20cm gap + 1: + rotation: [-90, 0, 0] + translation: [0.1, -1.7, 0.2] + 2: + rotation: [-90, 0, 0] + translation: [-0.1, -1.7, 0.2] + + 9: # stereopair with 10cm gap + 1: + rotation: [-90, 0, 0] + translation: [0.05, -1.7, 0.2] + 2: + rotation: [-90, 0, 0] + translation: [-0.05, -1.7, 0.2] + + 10: # stereopair with 20cm gap facing slightly up + 1: + rotation: [-80, 0, 0] + translation: [0.1, -1.9, 0.2] + 2: + rotation: [-80, 0, 0] + translation: [-0.1, -1.9, 0.2] + + 11: # stereopair with 10cm gap facing slightly up + 1: + rotation: [-80, 0, 0] + translation: [0.05, -1.9, 0.2] + 2: + rotation: [-80, 0, 0] + translation: [-0.05, -1.9, 0.2] diff --git a/packages/camera/scripts/camera_placement_visual.py b/packages/camera/scripts/camera_placement_visual.py new file mode 100644 index 0000000..02e8057 --- /dev/null +++ b/packages/camera/scripts/camera_placement_visual.py @@ -0,0 +1,758 @@ +import argparse +import json +import os +from typing import Dict, List, Tuple + +import cv2 +import numpy as np +import rosbag2_py +import yaml +from cv_bridge import CvBridge +from geometry_msgs.msg import Point, TransformStamped +from rclpy.serialization import serialize_message +from scipy.spatial.transform import Rotation +from sensor_msgs.msg import CameraInfo +from visualization_msgs.msg import ImageMarker, Marker + +from scipy.ndimage import center_of_mass +from typing import Tuple, List + +SEC_MULTIPLIER = 10**9 +MS_MULTIPLIER = 10**6 +MCS_MULTIPLIER = 10**3 +NANO_MULTIPLIER = 1 + +TABLE_LENGTH = 2.74 +TABLE_WIDTH = 1.525 +FPS_LATENCY_MS = 15 + +last_marker_id = 0 +last_table_marker_id = None + + +def get_new_marker_id() -> int: + global last_marker_id + last_marker_id += 1 + return last_marker_id + + +class CameraParameters: + def __init__( + self, + image_size, + camera_matrix, + dist_coefs, + camera_id, + rotation_vector, + translation_vector, + yaw_pitch_roll_order=False, + ): + self.camera_matrix = np.array(camera_matrix, dtype=float).reshape((3, 3)) + self.dist_coefs = np.array(dist_coefs, dtype=float) + self.camera_id = camera_id + self.image_size = image_size + + self.mapx, self.mapy = cv2.initUndistortRectifyMap( + self.camera_matrix, + self.dist_coefs, + None, + self.camera_matrix, + self.image_size, + cv2.CV_32FC1, + ) + + self.rotation_vector = np.array(rotation_vector, dtype=float) + self.translation_vector = np.array(translation_vector, dtype=float) + if yaw_pitch_roll_order: + self.rotation_matrix = Rotation.from_euler( + "xyz", rotation_vector, degrees=True + ).as_matrix() + else: + self.rotation_matrix = cv2.Rodrigues(self.rotation_vector.reshape((3, 1)))[0] + + + self.static_transformation = TransformStamped() + self.static_transformation.child_frame_id = f"camera_{self.camera_id}" + self.static_transformation.header.frame_id = "world" + + self.static_transformation.transform.translation.x = self.translation_vector[0] + self.static_transformation.transform.translation.y = self.translation_vector[1] + self.static_transformation.transform.translation.z = self.translation_vector[2] + + qx, qy, qz, qw = Rotation.from_matrix(self.rotation_matrix).as_quat().tolist() + + self.static_transformation.transform.rotation.x = qx + self.static_transformation.transform.rotation.y = qy + self.static_transformation.transform.rotation.z = qz + self.static_transformation.transform.rotation.w = qw + + def undistort(self, image: cv2.Mat) -> cv2.Mat: + return cv2.remap(image, self.mapx, self.mapy, cv2.INTER_NEAREST) + + def publish_camera_info(self, writer: rosbag2_py.SequentialWriter, current_time: int) -> None: + camera_info_msg = CameraInfo() + camera_info_msg.header.frame_id = f"camera_{self.camera_id}" + camera_info_msg.header.stamp.sec = current_time % SEC_MULTIPLIER + camera_info_msg.header.stamp.nanosec = current_time // SEC_MULTIPLIER + camera_info_msg.height = self.image_size[1] + camera_info_msg.width = self.image_size[0] + + camera_info_msg.distortion_model = "plumb_bob" + camera_info_msg.d = self.dist_coefs.flatten().tolist() + + camera_info_msg.k = self.camera_matrix.flatten().tolist() + camera_info_msg.p = self.camera_matrix.flatten().tolist() + + writer.write( + f"/camera_{self.camera_id}/info", + serialize_message(camera_info_msg), + current_time, + ) + + def publish_transform(self, writer: rosbag2_py.SequentialWriter, current_time: int) -> None: + self.static_transformation.header.stamp.sec = current_time % SEC_MULTIPLIER + self.static_transformation.header.stamp.nanosec = current_time // SEC_MULTIPLIER + + writer.write("/tf", serialize_message(self.static_transformation), current_time) + + +def init_writer(export_file: str) -> rosbag2_py.SequentialWriter: + writer = rosbag2_py.SequentialWriter() + writer.open( + rosbag2_py.StorageOptions(uri=export_file, storage_id="mcap"), + rosbag2_py.ConverterOptions( + input_serialization_format="cdr", output_serialization_format="cdr" + ), + ) + + writer.create_topic( + rosbag2_py.TopicMetadata( + name="/triangulation/ball_marker", + type="visualization_msgs/msg/Marker", + serialization_format="cdr", + ) + ) + writer.create_topic( + rosbag2_py.TopicMetadata( + name="/triangulation/ball_table_projection", + type="visualization_msgs/msg/Marker", + serialization_format="cdr", + ) + ) + writer.create_topic( + rosbag2_py.TopicMetadata( + name="/triangulation/trajectory", + type="visualization_msgs/msg/Marker", + serialization_format="cdr", + ) + ) + writer.create_topic( + rosbag2_py.TopicMetadata( + name="/triangulation/intersection_points", + type="visualization_msgs/msg/Marker", + serialization_format="cdr", + ) + ) + writer.create_topic( + rosbag2_py.TopicMetadata( + name="/triangulation/table_plane", + type="visualization_msgs/msg/Marker", + serialization_format="cdr", + ) + ) + writer.create_topic( + rosbag2_py.TopicMetadata( + name="/tf", + type="geometry_msgs/msg/TransformStamped", + serialization_format="cdr", + ) + ) + + for i in range(2): + writer.create_topic( + rosbag2_py.TopicMetadata( + name=f"/camera_{i + 1}/image", + type="sensor_msgs/msg/CompressedImage", + serialization_format="cdr", + ) + ) + writer.create_topic( + rosbag2_py.TopicMetadata( + name=f"/camera_{i + 1}/ball_center", + type="visualization_msgs/msg/ImageMarker", + serialization_format="cdr", + ) + ) + writer.create_topic( + rosbag2_py.TopicMetadata( + name=f"/camera_{i + 1}/info", + type="sensor_msgs/msg/CameraInfo", + serialization_format="cdr", + ) + ) + + return writer + + +def init_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser() + + parser.add_argument( + "--params-file", + help="yaml file with intrinsic parameters and cameras' positions", + required=True, + ) + parser.add_argument("--export", help="some_file.mcap", required=True) + + return parser + + +def init_ball_marker( + marker_id: int, current_time: int, position: List[int], camera_id: int, ttl=100 +) -> Marker: + msg = Marker() + msg.header.frame_id = f"camera_{camera_id}" + msg.header.stamp.sec = current_time // SEC_MULTIPLIER + msg.header.stamp.nanosec = current_time % SEC_MULTIPLIER + msg.ns = "ball_markers" + msg.id = marker_id + msg.type = Marker.SPHERE + msg.action = Marker.ADD + msg.pose.position.x, msg.pose.position.y, msg.pose.position.z = position + + msg.pose.orientation.x = 0.0 + msg.pose.orientation.y = 0.0 + msg.pose.orientation.z = 0.0 + msg.pose.orientation.w = 1.0 + + msg.scale.x = 0.04 + msg.scale.y = 0.04 + msg.scale.z = 0.04 + + msg.color.r = 1.0 + msg.color.g = 0.5 + msg.color.b = 0.0 + msg.color.a = 1.0 + + msg.lifetime.nanosec = ttl * 10**6 # ttl in milliseconds + + return msg + + +def init_detection_center_marker( + marker_id: int, current_time: int, position: List[int], camera_id: int, ttl=100 +) -> ImageMarker: + msg = ImageMarker() + msg.header.frame_id = f"camera_{camera_id}" + msg.header.stamp.sec = current_time // SEC_MULTIPLIER + msg.header.stamp.nanosec = current_time % SEC_MULTIPLIER + msg.ns = "ball_markers" + msg.id = marker_id + msg.type = ImageMarker.CIRCLE + msg.action = ImageMarker.ADD + + msg.position.x = float(position[0]) + msg.position.y = float(position[1]) + msg.position.z = 0.0 + + msg.scale = 3.0 + msg.filled = 255 + + msg.fill_color.r = 1.0 # orange color + msg.fill_color.g = 0.0 + msg.fill_color.b = 0.0 + msg.fill_color.a = 1.0 # alpha (1.0 = opaque, 0.0 = transparent) + + msg.outline_color.r = 1.0 # orange color + msg.outline_color.g = 0.0 + msg.outline_color.b = 0.0 + msg.outline_color.a = 1.0 # alpha (1.0 = opaque, 0.0 = transparent) + + msg.lifetime.nanosec = ttl * 10**6 # ttl = 10ms + + return msg + + +def init_camera_info( + writer: rosbag2_py.SequentialWriter, params_path: str, camera_ids=[1, 2] +) -> Tuple[List[CameraParameters], np.ndarray]: + intrinsics = [] + for camera_id in camera_ids: + with open(params_path, mode="r", encoding="utf-8") as file: + data = yaml.safe_load(file) + intrinsics.append( + CameraParameters( + data["parameters"][camera_id]["image_size"], + data["parameters"][camera_id]["camera_matrix"], + data["parameters"][camera_id]["distortion_coefs"], + camera_id, + data["parameters"][camera_id]["rotation"], + data["parameters"][camera_id]["translation"], + ) + ) + complanar_aruco_points = np.array(data["triangulated_common_points"], dtype=np.float64) + + centroid = np.mean(complanar_aruco_points, axis=0) + _, _, VT = np.linalg.svd(complanar_aruco_points[:10] - centroid, full_matrices=False) + print("normal is", VT[-1, :]) + return intrinsics, VT[-1, :] + + +def publish_table_plain(writer: rosbag2_py.SequentialWriter, simulation_time: int) -> None: + global last_table_marker_id + if last_table_marker_id: + marker = Marker() + marker.id = last_table_marker_id + marker.action = 2 # DELETE + writer.write("/triangulation/table_plane", serialize_message(marker), simulation_time) + + # publish blue table plain + marker = Marker() + marker.header.frame_id = "table" + marker.id = get_new_marker_id() + last_table_marker_id = marker.id + marker.type = marker.CUBE + marker.action = marker.ADD + marker.scale.x = TABLE_LENGTH + marker.scale.y = TABLE_WIDTH + marker.scale.z = 0.01 + + marker.color.r = 0.0 + marker.color.g = 0.0 + marker.color.b = 0.8 + marker.color.a = 0.3 + + marker.pose.position.x = 0.0 + marker.pose.position.y = 0.0 + marker.pose.position.z = 0.0 + ( + marker.pose.orientation.x, + marker.pose.orientation.y, + marker.pose.orientation.z, + marker.pose.orientation.w, + ) = Rotation.from_euler("xyz", [0, 0, 90], degrees=True).as_quat().tolist() + writer.write("/triangulation/table_plane", serialize_message(marker), simulation_time) + + # publish white border + marker = Marker() + marker.id = get_new_marker_id() + marker.header.frame_id = "table" + marker.type = marker.LINE_STRIP + marker.action = marker.ADD + marker.scale.x = 0.01 + + marker.color.r = 1.0 + marker.color.g = 1.0 + marker.color.b = 1.0 + marker.color.a = 0.9 + + ( + marker.pose.orientation.x, + marker.pose.orientation.y, + marker.pose.orientation.z, + marker.pose.orientation.w, + ) = Rotation.from_euler("xyz", [0, 0, 90], degrees=True).as_quat().tolist() + + coords = [ + (-TABLE_WIDTH / 2, -TABLE_LENGTH / 2), + (-TABLE_WIDTH / 2, TABLE_LENGTH / 2), + (TABLE_WIDTH / 2, TABLE_LENGTH / 2), + (TABLE_WIDTH / 2, -TABLE_LENGTH / 2), + (-TABLE_WIDTH / 2, -TABLE_LENGTH / 2), + ] + + for cur_y, cur_x in coords: + new_point = Point() + new_point.x = cur_x + new_point.y = cur_y + marker.points.append(new_point) + + writer.write("/triangulation/table_plane", serialize_message(marker), simulation_time) + + # publish length line + marker.id = get_new_marker_id() + marker.points = [] + + coords = [ + (0.0, -TABLE_LENGTH / 2), + (0.0, TABLE_LENGTH / 2), + ] + + for cur_y, cur_x in coords: + new_point = Point() + new_point.x = cur_x + new_point.y = cur_y + marker.points.append(new_point) + + writer.write("/triangulation/table_plane", serialize_message(marker), simulation_time) + + # publish width line + marker.id = get_new_marker_id() + marker.points = [] + + coords = [ + (-TABLE_WIDTH / 2, 0.0), + (TABLE_WIDTH / 2, 0.0), + ] + + for cur_y, cur_x in coords: + new_point = Point() + new_point.x = cur_x + new_point.y = cur_y + marker.points.append(new_point) + + writer.write("/triangulation/table_plane", serialize_message(marker), simulation_time) + + +def get_cam2world_transform( + table_plane_normal: np.ndarray, table_orientation_points: List[List[float]] +) -> Tuple[np.ndarray]: + edge_table_orient_point = np.array(table_orientation_points[0], dtype=float) + middle_table_orient_point = np.array(table_orientation_points[1], dtype=float) + x_vector = middle_table_orient_point - edge_table_orient_point + z_vector = ( + table_plane_normal + - (table_plane_normal.dot(x_vector) / np.linalg.norm(x_vector, ord=2)) * x_vector + ) + # z_vector = table_plane_normal + y_vector = np.cross(x_vector, z_vector) + + x_vector /= np.linalg.norm(x_vector, ord=2) + y_vector /= np.linalg.norm(y_vector, ord=2) + z_vector /= np.linalg.norm(z_vector, ord=2) + + R = np.concatenate( + (y_vector.reshape((3, 1)), x_vector.reshape((3, 1)), z_vector.reshape((3, 1))), + axis=1, + ) + table_line_middle = (edge_table_orient_point + middle_table_orient_point) / 2 + T = table_line_middle.reshape((3, 1)) + R_inv = np.linalg.inv(R) + + return R, T, R_inv, R_inv @ T, table_line_middle + + +def publish_cam2table_transform( + writer: rosbag2_py.SequentialWriter, R: np.ndarray, T: np.ndarray +) -> None: + static_transformation = TransformStamped() + static_transformation.child_frame_id = "table" + static_transformation.header.frame_id = "world" + + static_transformation.transform.translation.x = T[0, 0] + static_transformation.transform.translation.y = T[1, 0] + static_transformation.transform.translation.z = T[2, 0] + static_transformation.header.stamp.sec = 0 + static_transformation.header.stamp.nanosec = 0 + + qx, qy, qz, qw = Rotation.from_matrix(R).as_quat().tolist() + + static_transformation.transform.rotation.x = qx + static_transformation.transform.rotation.y = qy + static_transformation.transform.rotation.z = qz + static_transformation.transform.rotation.w = qw + + writer.write("/tf", serialize_message(static_transformation), 0) + + +from typing import Any + + +class Transformation: + def __init__(self, R, t): + self.R = R + self.t = t.reshape((3, 1)) + self.R_inv = np.linalg.inv(self.R) + + def __call__(self, point): + return self.R @ point + self.t + + def __mul__(self, other): + return Transformation(self.R @ other.R, self.R @ other.t + self.t) + + def transform(self, point): + return self(point) + + def inverse_transform(self, point): + return self.R_inv @ (point - self.t) + + # right transformation is applied first + def __mult__(self, other): + return Transformation(self.R @ other.R, self.t + other.t) + + +class Image: + def __init__( + self, camera_matrix, camera_transformation, distortion_coefs, image_size=(1200, 1920) + ): + self.camera_matrix = camera_matrix + self.camera_transformation = camera_transformation + self.distortion_coefs = distortion_coefs + self.image_size = image_size + + def normilise_image_point(self, point): + x_normalised = (point[0] - self.camera_matrix[0, 2]) / self.camera_matrix[0, 0] + y_normalised = (point[1] - self.camera_matrix[1, 2]) / self.camera_matrix[1, 1] + return np.array([x_normalised, y_normalised, 1]).reshape(3, 1) + + # in world coordinates + def project_point_to_image(self, point): + if point.shape != (3, 1): + point = point.reshape((3, 1)) + return self.camera_matrix @ self.camera_transformation(point) + + # transformed_point = self.camera_transformation(point) + # # transformed_point = transformed_point / transformed_point[2] + # projected_point = self.camera_matrix @ transformed_point + # return projected_point + + def project_points_to_image(self, points): + return np.array([self.project_point_to_image(point) for point in points]) + + def normilize_image_point(self, image_point): + x_normalised = (image_point[0] - self.camera_matrix[0, 2]) / self.camera_matrix[0, 0] + y_normalised = (image_point[1] - self.camera_matrix[1, 2]) / self.camera_matrix[1, 1] + return np.array([x_normalised, y_normalised, 1]).reshape(3, 1) + + def project_ball_to_image(self, center, radius: float) -> np.ndarray: + def valid_coords(x, y): + return x >= 0 and x < self.image_size[1] and y >= 0 and y < self.image_size[0] + + center = center.reshape((3, 1)) + camera_matrix_inv = np.linalg.inv(self.camera_matrix) + + transformed_center = self.camera_transformation(center) + projected_center = self.camera_matrix @ transformed_center + projected_center /= projected_center[2] + + if ( + np.linalg.norm( + projected_center.flatten() + - np.array([self.image_size[1] / 2, self.image_size[0] / 2, 1]) + ) + > 2000 + ): + return np.zeros(self.image_size) + + image = np.zeros(self.image_size) + checked_pixels = set() + + pixels_to_check = {(int(projected_center[0][0]), int(projected_center[1][0]))} + while pixels_to_check: + x, y = pixels_to_check.pop() + + image_point_camera_ray = camera_matrix_inv @ np.array([x, y, 1]).reshape((3, 1)) + image_point_world_ray = self.camera_transformation.inverse_transform( + image_point_camera_ray + ) - self.camera_transformation.inverse_transform(np.array([0, 0, 0]).reshape((3, 1))) + ball_center_world_ray = center - self.camera_transformation.inverse_transform( + np.array([0, 0, 0]).reshape((3, 1)) + ) + + distance = np.linalg.norm( + np.cross(ball_center_world_ray.flatten(), image_point_world_ray.flatten()), ord=2 + ) / np.linalg.norm(image_point_world_ray, ord=2) + if distance <= radius: + if valid_coords(x, y): + image[y, x] = 1 + # adding all 8 neighbours to the queue + for dx in range(-1, 2): + for dy in range(-1, 2): + if (x + dx, y + dy) not in checked_pixels: + pixels_to_check.add((x + dx, y + dy)) + checked_pixels.add((x + dx, y + dy)) + + return image + + +def get_bbox(mask: np.ndarray) -> List[float]: + if not np.any(mask): + return [0.0, 0.0, 0.0, 0.0] + # x_min, y_min, x_max, y_max + horizontal_indicies = np.where(np.any(mask, axis=0))[0] + vertical_indicies = np.where(np.any(mask, axis=1))[0] + x1, x2 = horizontal_indicies[[0, -1]] + y1, y2 = vertical_indicies[[0, -1]] + bbox = list(map(float, [x1, y1, x2, y2])) + return bbox + + +def get_mask_center(mask): + bbox = get_bbox(mask) + centroid_x = (bbox[0] + bbox[2]) / 2 + centroid_y = (bbox[1] + bbox[3]) / 2 + return np.array([centroid_x, centroid_y]) + + +def get_mask_centroid(mask): + return np.array(center_of_mass(mask)) + + +class StereoScene: + def __init__(self, left_camera: Image, right_camera: Image, table_middle_normal): + self.left_camera = left_camera + self.right_camera = right_camera + self.table_middle_normal = table_middle_normal + self.last_n_clicks = 0 + + def project_ball_to_images(self, center, radius): + left_image = self.left_camera.project_ball_to_image(center, radius) + right_image = self.right_camera.project_ball_to_image(center, radius) + return left_image, right_image + + def triangulate_position(self, points_by_view_1, points_by_view_2, world2cam, cam2cam): + print("triangulating") + # print(points_by_view) + world2cam_Rt = np.column_stack((world2cam.R, world2cam.t)) + world2second_cam = cam2cam * world2cam + world2second_cam_Rt = np.column_stack((world2second_cam.R, world2second_cam.t)) + proj_1 = self.left_camera.camera_matrix @ world2cam_Rt + proj_2 = self.right_camera.camera_matrix @ world2second_cam_Rt + + res = cv2.triangulatePoints(proj_1, proj_2, points_by_view_1, points_by_view_2) + res /= res[3, :] # normalizing + + # TODO preserve 4D points? + return res[:3, :] + + +def evaluate_camera_position( + world2master: Transformation, + master2second: Transformation, + center_extractor, + camera_params_1: CameraParameters, + camera_params_2: CameraParameters, + simulation_time +): + NUMBER_OF_SPHERES = 6 + image_1 = Image(camera_params_1.camera_matrix, world2master, camera_params_1.dist_coefs) + image_2 = Image( + camera_params_2.camera_matrix, master2second * world2master, camera_params_2.dist_coefs + ) + stereo_scene = StereoScene(image_1, image_2, None) + + sphere_centers = [] + for y in np.linspace(-TABLE_LENGTH / 2, TABLE_LENGTH / 2, NUMBER_OF_SPHERES): + for x in np.linspace(-TABLE_WIDTH / 2, TABLE_WIDTH / 2, NUMBER_OF_SPHERES): + for z in np.linspace(0, 1, NUMBER_OF_SPHERES): + sphere_centers.append((x, y, z)) + + sphere_centers = np.array(sphere_centers).T + points_1 = [] + points_2 = [] + valid_sphere_centers = [] + # world2second = master2second * world2master + + + for i in range(sphere_centers.shape[1]): + mask_1, mask_2 = stereo_scene.project_ball_to_images(sphere_centers[:, i : (i + 1)], 0.02) + ball_marker = init_ball_marker(get_new_marker_id(), simulation_time, sphere_centers[:, i : (i + 1)].flatten(), 1, ttl=SEC_MULTIPLIER) + ball_marker.header.frame_id = "table" + writer.write( + "/triangulation/ball_marker", + serialize_message(ball_marker), + simulation_time, + ) + + if np.sum(mask_1) == 0 or np.sum(mask_2) == 0: + continue + + points_1.append(center_extractor(mask_1)) + points_2.append(center_extractor(mask_2)) + valid_sphere_centers.append(sphere_centers[:, i]) + + points_1 = np.array(points_1).T + points_2 = np.array(points_2).T + sphere_centers = np.array(valid_sphere_centers).T + + triangulated_points = stereo_scene.triangulate_position( + points_1, points_2, world2master, master2second + ) + + # Calculate the Euclidean distance between the true and triangulated points + distances = np.linalg.norm(sphere_centers - triangulated_points, axis=0) + + # Calculate mean and standard deviation of the distances + mean_distance = np.mean(distances) + std_distance = np.std(distances) + + print(f"Mean distance error: {mean_distance}") + print(f"Standard deviation of distance error: {std_distance}") + print("evalution complete") + print() + + +if __name__ == "__main__": + # init all modules + parser = init_parser() + args = parser.parse_args() + writer = init_writer(args.export) + + table_plane_normal = np.array([0, 0, 1]) + publish_table_plain(writer, 100) + + with open(args.params_file, mode="r", encoding="utf-8") as yaml_params_file: + params = yaml.safe_load(yaml_params_file) + yaml_intrinsics_dics = params["intrinsics"] + publish_cam2table_transform(writer, np.eye(3), np.zeros((3, 1))) + + for position_idx in params["camera_positions"].keys(): + print(position_idx) + R_1, T_1 = ( + params["camera_positions"][position_idx][1]["rotation"], + params["camera_positions"][position_idx][1]["translation"], + ) + R_1 = np.array(R_1) + T_1 = np.array(T_1) + + R_2, T_2 = ( + params["camera_positions"][position_idx][2]["rotation"], + params["camera_positions"][position_idx][2]["translation"], + ) + R_2 = np.array(R_2) + T_2 = np.array(T_2) + + intrinsics = [ + CameraParameters( + yaml_intrinsics_dics["image_size"], + yaml_intrinsics_dics[1]["camera_matrix"], + yaml_intrinsics_dics[1]["distortion_coefs"], + 1, + R_1, + T_1, + yaw_pitch_roll_order=True, + ), + CameraParameters( + yaml_intrinsics_dics["image_size"], + yaml_intrinsics_dics[2]["camera_matrix"], + yaml_intrinsics_dics[2]["distortion_coefs"], + 2, + R_2, + T_2, + yaw_pitch_roll_order=True, + ), + ] + for cam_params in intrinsics: + cam_params.publish_transform(writer, SEC_MULTIPLIER * position_idx) + cam_params.publish_camera_info(writer, SEC_MULTIPLIER * position_idx) + publish_table_plain(writer, SEC_MULTIPLIER * position_idx) + + world2master = Transformation( + intrinsics[0].rotation_matrix, intrinsics[1].translation_vector + ) + rotation_master2slave = world2master.R_inv @ world2master.R + translation_master2slave = intrinsics[1].translation_vector + # print(rotation_master2slave.shape) + master2slave = Transformation(rotation_master2slave, translation_master2slave) + + evaluate_camera_position( + world2master, + master2slave, + get_mask_center, + intrinsics[0], + intrinsics[1], + SEC_MULTIPLIER * position_idx + ) + + + del writer diff --git a/research.ipynb b/research.ipynb new file mode 100644 index 0000000..6aace41 --- /dev/null +++ b/research.ipynb @@ -0,0 +1,659 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Overview of triangulation precision dependencies\n", + "\n", + "This research is a part of project \"Handy\" that is conducted by HSE Robotics Groups.\n", + "The aims of this research are the following:\n", + "- to determine the best placement of cameras around the table\n", + "- to measure precision fluctuations caused by various center-determing algorithms, segmentation errors and FOV\n", + "\n", + "Let us assume that intrinsic parameters of the cameras are known to us with ideal precision. Also, cameras' positions relative to each other \n", + "(parameters of stereo calibration) are also known with maximum precision." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "import cv2\n", + "from scipy.spatial.transform import Rotation\n", + "from scipy.ndimage import center_of_mass\n", + "\n", + "from typing import Tuple, List" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "# let us introduce intrinsic parameters and assume that they provide zero reprojection error for an arbitrary image-world correlation\n", + "\n", + "IMAGE_SIZE = (1200, 1920)\n", + "K_1 = np.array(\n", + " [\n", + " 672.2824725267757,\n", + " 0,\n", + " 984.0472159818853,\n", + " 0,\n", + " 672.6886411532304,\n", + " 602.96669930345,\n", + " 0,\n", + " 0,\n", + " 1,\n", + " ]\n", + ").reshape((3, 3))\n", + "distortion_coefs_1 = np.array(\n", + " [\n", + " -0.09715103386082896,\n", + " 0.06788948036532018,\n", + " -0.0007157453506997161,\n", + " 0.0003048354358359307,\n", + " -0.003636308978789861,\n", + " ]\n", + ")\n", + "K_2 = np.array(\n", + " [\n", + " 685.7143789189881,\n", + " 0,\n", + " 991.0247637161314,\n", + " 0,\n", + " 686.3020333004097,\n", + " 601.2442243349392,\n", + " 0,\n", + " 0,\n", + " 1,\n", + " ]\n", + ").reshape((3, 3))\n", + "distortion_coefs_2 = np.array(\n", + " [\n", + " -0.09781628655937251,\n", + " 0.07153618281495966,\n", + " -0.001066517414175782,\n", + " 0.0004679942401339674,\n", + " -0.003645360450147547,\n", + " ]\n", + ")\n", + "# reference:\n", + "# https://github.com/robotics-laboratory/handy/blob/table-dataset-1/datasets/TableOrange2msRecord_22_04/camera_params.yaml\n", + "stereo_cam_rotation = [0.01175419895518242, 2.170836441913732, 2.19333242876324]\n", + "stereo_cam_translation = [-0.06677747450343367, -1.174973690204363, 1.140354306665756]\n", + "\n", + "# other constant and measurements\n", + "TABLE_LENGTH = 2.74\n", + "TABLE_WIDTH = 1.525" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[-0.03576951]\n", + " [-0.00441021]\n", + " [ 1. ]]\n", + "[[1.65632969e+03]\n", + " [6.02966699e+02]\n", + " [1.00000000e+00]]\n" + ] + } + ], + "source": [ + "inv = np.linalg.inv(K_1)\n", + "# x, y = \n", + "# point = np.array([IMAGE_SIZE[0], IMAGE_SIZE[1], 1])\n", + "# point[0]\n", + "print(inv @ np.array([IMAGE_SIZE[1]/2, IMAGE_SIZE[0]/2, 1]).reshape((3, 1)))\n", + "print(K_1 @ np.array([1, 0, 1]).reshape((3, 1)))\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Any\n", + "\n", + "\n", + "class Transformation:\n", + " def __init__(self, R, t):\n", + " self.R = R\n", + " self.t = t.reshape((3, 1))\n", + " self.R_inv = np.linalg.inv(self.R)\n", + "\n", + " def __call__(self, point):\n", + " return self.R @ point + self.t\n", + " \n", + " def __mul__(self, other):\n", + " return Transformation(self.R @ other.R, self.R @ other.t + self.t)\n", + "\n", + " def transform(self, point):\n", + " return self(point)\n", + "\n", + " def inverse_transform(self, point):\n", + " return self.R_inv @ (point - self.t)\n", + "\n", + " # right transformation is applied first\n", + " def __mult__(self, other):\n", + " return Transformation(self.R @ other.R, self.t + other.t)\n", + " \n", + "\n", + "class Image:\n", + " def __init__(self, camera_matrix, camera_transformation, distortion_coefs, image_size=IMAGE_SIZE):\n", + " self.camera_matrix = camera_matrix\n", + " self.camera_transformation = camera_transformation\n", + " self.distortion_coefs = distortion_coefs\n", + " self.image_size = image_size\n", + "\n", + " def normilise_image_point(self, point):\n", + " x_normalised = (point[0] - self.camera_matrix[0, 2]) / self.camera_matrix[0, 0]\n", + " y_normalised = (point[1] - self.camera_matrix[1, 2]) / self.camera_matrix[1, 1]\n", + " return np.array([x_normalised, y_normalised, 1]).reshape(3, 1)\n", + "\n", + " # in world coordinates\n", + " def project_point_to_image(self, point):\n", + " if point.shape != (3, 1):\n", + " point = point.reshape((3, 1))\n", + " return self.camera_matrix @ self.camera_transformation(point)\n", + "\n", + " # transformed_point = self.camera_transformation(point)\n", + " # # transformed_point = transformed_point / transformed_point[2]\n", + " # projected_point = self.camera_matrix @ transformed_point\n", + " # return projected_point\n", + " \n", + " def project_points_to_image(self, points):\n", + " return np.array([self.project_point_to_image(point) for point in points])\n", + " \n", + " def normilize_image_point(self, image_point):\n", + " x_normalised = (image_point[0] - self.camera_matrix[0, 2]) / self.camera_matrix[0, 0]\n", + " y_normalised = (image_point[1] - self.camera_matrix[1, 2]) / self.camera_matrix[1, 1]\n", + " return np.array([x_normalised, y_normalised, 1]).reshape(3, 1)\n", + " \n", + "\n", + " def project_ball_to_image(self, \n", + " center, radius: float) -> np.ndarray:\n", + "\n", + " def valid_coords(x, y):\n", + " return x >= 0 and x < self.image_size[1] and y >= 0 and y < self.image_size[0]\n", + "\n", + " center = center.reshape((3, 1))\n", + " camera_matrix_inv = np.linalg.inv(self.camera_matrix)\n", + "\n", + " transformed_center = self.camera_transformation(center)\n", + " projected_center = self.camera_matrix @ transformed_center\n", + " projected_center /= projected_center[2]\n", + "\n", + " if np.linalg.norm(projected_center.flatten() - np.array([IMAGE_SIZE[1]/2, IMAGE_SIZE[0]/2, 1])) > 2000:\n", + " return np.zeros(self.image_size)\n", + "\n", + " image = np.zeros(self.image_size)\n", + " checked_pixels = set()\n", + "\n", + " pixels_to_check = {(int(projected_center[0][0]), int(projected_center[1][0]))}\n", + " while pixels_to_check:\n", + " x, y = pixels_to_check.pop()\n", + "\n", + " image_point_camera_ray = camera_matrix_inv @ np.array([x, y, 1]).reshape((3, 1))\n", + " image_point_world_ray = self.camera_transformation.inverse_transform(image_point_camera_ray) - \\\n", + " self.camera_transformation.inverse_transform(np.array([0, 0, 0]).reshape((3, 1)))\n", + " ball_center_world_ray = center - self.camera_transformation.inverse_transform(np.array([0, 0, 0]).reshape((3, 1)))\n", + "\n", + " distance = np.linalg.norm(\n", + " np.cross(ball_center_world_ray.flatten(), image_point_world_ray.flatten()), ord=2) / \\\n", + " np.linalg.norm(image_point_world_ray, ord=2)\n", + " if distance <= radius:\n", + " if valid_coords(x, y):\n", + " image[y, x] = 1\n", + " # adding all 8 neighbours to the queue\n", + " for dx in range(-1, 2):\n", + " for dy in range(-1, 2):\n", + " if (x + dx, y + dy) not in checked_pixels:\n", + " pixels_to_check.add((x + dx, y + dy))\n", + " checked_pixels.add((x + dx, y + dy))\n", + "\n", + " return image\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjAAAAFnCAYAAAC4knO9AAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAJ6tJREFUeJzt3Xt4VNWh9/HfhCSTcJkJATKTKQGiUi4FUQFjvFBb8hKQWjhyWrE5ii2FliZWBBHSFqzWGoqn2oMXUB8FnlcU6/MKtFSpMQioxAAREBAicpBgZRIkZoaLuZH1/uHJPo4EvE1MVvh+nmc/T2attfdea89y+Lln7z0uY4wRAACARWJauwMAAABfFgEGAABYhwADAACsQ4ABAADWIcAAAADrEGAAAIB1CDAAAMA6BBgAAGAdAgwAALAOAQYAAFinTQeYhx9+WH369FFCQoIyMjK0efPm1u4SAABoA9psgHn22Wc1Y8YM3XnnnXrzzTc1ZMgQZWdnq7KysrW7BgAAWpmrrf6YY0ZGhoYPH66HHnpIktTY2Ki0tDTdcsstmjNnTiv3DgAAtKbY1u5Ac+rq6lRaWqr8/HynLCYmRllZWSouLm52ndraWtXW1jqvGxsbVVVVpW7dusnlcrV4nwEAwNdnjNGxY8cUCAQUE3PmL4raZID58MMPderUKfl8vohyn8+nvXv3NrtOQUGB7rrrrm+iewAAoIUdOnRIPXv2PGN9mwwwX0V+fr5mzJjhvA6FQurVq5eu1DWKVVwr9gwAAHxRDarXa3pBXbp0OWu7Nhlgunfvrg4dOqiioiKivKKiQn6/v9l13G633G73aeWxilOsiwADAIAV/ufK3M+7/KNN3oUUHx+voUOHqqioyClrbGxUUVGRMjMzW7FnAACgLWiTZ2AkacaMGZo0aZKGDRumSy+9VH/5y1904sQJ/fSnP23trgEAgFbWZgPM9ddfryNHjmjevHkKBoO66KKLtHbt2tMu7AUAAOeeNvscmK8rHA7L6/Xqao3jGhgAACzRYOq1XqsVCoXk8XjO2K5NXgMDAABwNgQYAABgHQIMAACwDgEGAABYhwADAACsQ4ABAADWIcAAAADrEGAAAIB1CDAAAMA6BBgAAGAdAgwAALAOAQYAAFiHAAMAAKxDgAEAANYhwAAAAOsQYAAAgHUIMAAAwDoEGAAAYB0CDAAAsA4BBgAAWIcAAwAArEOAAQAA1iHAAAAA6xBgAACAdQgwAADAOgQYAABgHQIMAACwDgEGAABYhwADAACsQ4ABAADWIcAAAADrEGAAAIB1CDAAAMA6BBgAAGAdAgwAALAOAQYAAFiHAAMAAKxDgAEAANYhwAAAAOsQYAAAgHUIMAAAwDpRDzAFBQUaPny4unTpopSUFI0fP15lZWURbWpqapSbm6tu3bqpc+fOmjBhgioqKiLalJeXa+zYserYsaNSUlI0a9YsNTQ0RLu7AADAQlEPMBs2bFBubq7eeOMNFRYWqr6+XqNGjdKJEyecNrfddpv+/ve/67nnntOGDRv0wQcf6LrrrnPqT506pbFjx6qurk6bNm3SsmXLtHTpUs2bNy/a3QUAABZyGWNMS+7gyJEjSklJ0YYNGzRixAiFQiH16NFDTz/9tP793/9dkrR3714NGDBAxcXFuuyyy/Tiiy/qBz/4gT744AP5fD5J0uLFizV79mwdOXJE8fHxn7vfcDgsr9erqzVOsa64lhwiAACIkgZTr/VarVAoJI/Hc8Z2LX4NTCgUkiQlJydLkkpLS1VfX6+srCynTf/+/dWrVy8VFxdLkoqLizV48GAnvEhSdna2wuGwdu/e3ex+amtrFQ6HIxYAANA+tWiAaWxs1PTp03XFFVdo0KBBkqRgMKj4+HglJSVFtPX5fAoGg06bT4eXpvqmuuYUFBTI6/U6S1paWpRHAwAA2ooWDTC5ubnatWuXVqxY0ZK7kSTl5+crFAo5y6FDh1p8nwAAoHXEttSG8/LytGbNGm3cuFE9e/Z0yv1+v+rq6lRdXR1xFqaiokJ+v99ps3nz5ojtNd2l1NTms9xut9xud5RHAQAA2qKon4ExxigvL08rV67UunXrlJ6eHlE/dOhQxcXFqaioyCkrKytTeXm5MjMzJUmZmZnauXOnKisrnTaFhYXyeDwaOHBgtLsMAAAsE/UzMLm5uXr66ae1evVqdenSxblmxev1KjExUV6vV5MnT9aMGTOUnJwsj8ejW265RZmZmbrsssskSaNGjdLAgQN14403asGCBQoGg/rd736n3NxczrIAAIDo30btcrmaLV+yZIluvvlmSZ88yG7mzJl65plnVFtbq+zsbD3yyCMRXw8dPHhQ06ZN0/r169WpUydNmjRJ8+fPV2zsF8tc3EYNAIB9vuht1C3+HJjWQoABAMA+beY5MAAAANFGgAEAANYhwAAAAOsQYAAAgHUIMAAAwDoEGAAAYB0CDAAAsA4BBgAAWIcAAwAArEOAAQAA1iHAAAAA6xBgAACAdQgwAADAOgQYAABgHQIMAACwDgEGAABYhwADAACsQ4ABAADWIcAAAADrEGAAAIB1CDAAAMA6BBgAAGAdAgwAALAOAQYAAFiHAAMAAKxDgAEAANYhwAAAAOsQYAAAgHUIMAAAwDoEGAAAYB0CDAAAsA4BBgAAWIcAAwAArEOAAQAA1iHAAAAA6xBgAACAdQgwAADAOgQYAABgHQIMAACwDgEGAABYp8UDzPz58+VyuTR9+nSnrKamRrm5uerWrZs6d+6sCRMmqKKiImK98vJyjR07Vh07dlRKSopmzZqlhoaGlu4uAACwQIsGmC1btujRRx/VhRdeGFF+22236e9//7uee+45bdiwQR988IGuu+46p/7UqVMaO3as6urqtGnTJi1btkxLly7VvHnzWrK7AADAEi0WYI4fP66cnBw9/vjj6tq1q1MeCoX0xBNP6P7779f3v/99DR06VEuWLNGmTZv0xhtvSJJeeuklvf3223rqqad00UUXacyYMfrDH/6ghx9+WHV1dS3VZQAAYIkWCzC5ubkaO3assrKyIspLS0tVX18fUd6/f3/16tVLxcXFkqTi4mINHjxYPp/PaZOdna1wOKzdu3e3VJcBAIAlYltioytWrNCbb76pLVu2nFYXDAYVHx+vpKSkiHKfz6dgMOi0+XR4aapvqmtObW2tamtrndfhcPjrDAEAALRhUT8Dc+jQId16661avny5EhISor35MyooKJDX63WWtLS0b2zfAADgmxX1AFNaWqrKykpdcsklio2NVWxsrDZs2KCFCxcqNjZWPp9PdXV1qq6ujlivoqJCfr9fkuT3+0+7K6npdVObz8rPz1coFHKWQ4cORXtoAACgjYh6gBk5cqR27typ7du3O8uwYcOUk5Pj/B0XF6eioiJnnbKyMpWXlyszM1OSlJmZqZ07d6qystJpU1hYKI/Ho4EDBza7X7fbLY/HE7EAAID2KerXwHTp0kWDBg2KKOvUqZO6devmlE+ePFkzZsxQcnKyPB6PbrnlFmVmZuqyyy6TJI0aNUoDBw7UjTfeqAULFigYDOp3v/udcnNz5Xa7o91lAABgmRa5iPfzPPDAA4qJidGECRNUW1ur7OxsPfLII059hw4dtGbNGk2bNk2ZmZnq1KmTJk2apLvvvrs1ugsAANoYlzHGtHYnWkI4HJbX69XVGqdYV1xrdwcAAHwBDaZe67VaoVDorJeD8FtIAADAOgQYAABgHQIMAACwDgEGAABYhwADAACsQ4ABAADWIcAAAADrEGAAAIB1CDAAAMA6BBgAAGAdAgwAALAOAQYAAFiHAAMAAKxDgAEAANYhwAAAAOsQYAAAgHUIMAAAwDoEGAAAYB0CDAAAsA4BBgAAWIcAAwAArEOAAQAA1iHAAAAA6xBgAACAdQgwAADAOgQYAABgHQIMAACwDgEGAABYhwADAACsQ4ABAADWIcAAAADrEGAAAIB1CDAAAMA6BBgAAGAdAgwAALAOAQYAAFiHAAMAAKxDgAEAANYhwAAAAOsQYAAAgHUIMAAAwDotEmD+9a9/6T/+4z/UrVs3JSYmavDgwdq6datTb4zRvHnzlJqaqsTERGVlZWnfvn0R26iqqlJOTo48Ho+SkpI0efJkHT9+vCW6CwAALBP1APPRRx/piiuuUFxcnF588UW9/fbb+vOf/6yuXbs6bRYsWKCFCxdq8eLFKikpUadOnZSdna2amhqnTU5Ojnbv3q3CwkKtWbNGGzdu1NSpU6PdXQAAYCGXMcZEc4Nz5szR66+/rldffbXZemOMAoGAZs6cqdtvv12SFAqF5PP5tHTpUk2cOFF79uzRwIEDtWXLFg0bNkyStHbtWl1zzTV6//33FQgEPrcf4XBYXq9XV2ucYl1x0RsgAABoMQ2mXuu1WqFQSB6P54zton4G5m9/+5uGDRumH/3oR0pJSdHFF1+sxx9/3Kk/cOCAgsGgsrKynDKv16uMjAwVFxdLkoqLi5WUlOSEF0nKyspSTEyMSkpKmt1vbW2twuFwxAIAANqnqAeY//7v/9aiRYvUt29f/fOf/9S0adP061//WsuWLZMkBYNBSZLP54tYz+fzOXXBYFApKSkR9bGxsUpOTnbafFZBQYG8Xq+zpKWlRXtoAACgjYh6gGlsbNQll1yie++9VxdffLGmTp2qKVOmaPHixdHeVYT8/HyFQiFnOXToUIvuDwAAtJ6oB5jU1FQNHDgwomzAgAEqLy+XJPn9fklSRUVFRJuKigqnzu/3q7KyMqK+oaFBVVVVTpvPcrvd8ng8EQsAAGifoh5grrjiCpWVlUWUvfPOO+rdu7ckKT09XX6/X0VFRU59OBxWSUmJMjMzJUmZmZmqrq5WaWmp02bdunVqbGxURkZGtLsMAAAsExvtDd522226/PLLde+99+rHP/6xNm/erMcee0yPPfaYJMnlcmn69Om655571LdvX6Wnp2vu3LkKBAIaP368pE/O2IwePdr56qm+vl55eXmaOHHiF7oDCQAAtG9RDzDDhw/XypUrlZ+fr7vvvlvp6en6y1/+opycHKfNHXfcoRMnTmjq1Kmqrq7WlVdeqbVr1yohIcFps3z5cuXl5WnkyJGKiYnRhAkTtHDhwmh3FwAAWCjqz4FpK3gODAAA9mm158AAAAC0NAIMAACwDgEGAABYhwADAACsQ4ABAADWIcAAAADrEGAAAIB1CDAAAMA6BBgAAGAdAgwAALAOAQYAAFiHAAMAAKxDgAEAANYhwAAAAOsQYAAAgHUIMAAAwDoEGAAAYB0CDAAAsA4BBgAAWIcAAwAArEOAAQAA1iHAAAAA6xBgAACAdQgwAADAOgQYAABgHQIMAACwDgEGAABYhwADAACsQ4ABAADWIcAAAADrEGAAAIB1CDAAAMA6BBgAAGAdAgwAALAOAQYAAFiHAAMAAKxDgAEAANYhwAAAAOsQYAAAgHUIMAAAwDoEGAAAYJ2oB5hTp05p7ty5Sk9PV2Jios4//3z94Q9/kDHGaWOM0bx585SamqrExERlZWVp3759EdupqqpSTk6OPB6PkpKSNHnyZB0/fjza3QUAABaKeoD505/+pEWLFumhhx7Snj179Kc//UkLFizQgw8+6LRZsGCBFi5cqMWLF6ukpESdOnVSdna2ampqnDY5OTnavXu3CgsLtWbNGm3cuFFTp06NdncBAICFXObTp0ai4Ac/+IF8Pp+eeOIJp2zChAlKTEzUU089JWOMAoGAZs6cqdtvv12SFAqF5PP5tHTpUk2cOFF79uzRwIEDtWXLFg0bNkyStHbtWl1zzTV6//33FQgEPrcf4XBYXq9XV2ucYl1x0RwiAABoIQ2mXuu1WqFQSB6P54zton4G5vLLL1dRUZHeeecdSdKOHTv02muvacyYMZKkAwcOKBgMKisry1nH6/UqIyNDxcXFkqTi4mIlJSU54UWSsrKyFBMTo5KSkmb3W1tbq3A4HLEAAID2KTbaG5wzZ47C4bD69++vDh066NSpU/rjH/+onJwcSVIwGJQk+Xy+iPV8Pp9TFwwGlZKSEtnR2FglJyc7bT6roKBAd911V7SHAwAA2qCon4H561//quXLl+vpp5/Wm2++qWXLluk///M/tWzZsmjvKkJ+fr5CoZCzHDp0qEX3BwAAWk/Uz8DMmjVLc+bM0cSJEyVJgwcP1sGDB1VQUKBJkybJ7/dLkioqKpSamuqsV1FRoYsuukiS5Pf7VVlZGbHdhoYGVVVVOet/ltvtltvtjvZwAABAGxT1MzAnT55UTEzkZjt06KDGxkZJUnp6uvx+v4qKipz6cDiskpISZWZmSpIyMzNVXV2t0tJSp826devU2NiojIyMaHcZAABYJupnYK699lr98Y9/VK9evfSd73xH27Zt0/3336+f/exnkiSXy6Xp06frnnvuUd++fZWenq65c+cqEAho/PjxkqQBAwZo9OjRmjJlihYvXqz6+nrl5eVp4sSJX+gOJAAA0L5FPcA8+OCDmjt3rn71q1+psrJSgUBAv/jFLzRv3jynzR133KETJ05o6tSpqq6u1pVXXqm1a9cqISHBabN8+XLl5eVp5MiRiomJ0YQJE7Rw4cJodxcAAFgo6s+BaSt4DgwAAPZptefAAAAAtDQCDAAAsA4BBgAAWIcAAwAArEOAAQAA1iHAAAAA6xBgAACAdQgwAADAOgQYAABgHQIMAACwDgEGAABYhwADAACsQ4ABAADWIcAAAADrEGAAAIB1CDAAAMA6BBgAAGAdAgwAALAOAQYAAFiHAAMAAKxDgAEAANYhwAAAAOsQYAAAgHUIMAAAwDoEGAAAYB0CDAAAsA4BBgAAWIcAAwAArEOAAQAA1iHAAAAA6xBgAACAdQgwAADAOgQYAABgHQIMAACwDgEGAABYhwADAACsQ4ABAADWIcAAAADrEGAAAIB1CDAAAMA6XzrAbNy4Uddee60CgYBcLpdWrVoVUW+M0bx585SamqrExERlZWVp3759EW2qqqqUk5Mjj8ejpKQkTZ48WcePH49o89Zbb+mqq65SQkKC0tLStGDBgi8/OgAA0C596QBz4sQJDRkyRA8//HCz9QsWLNDChQu1ePFilZSUqFOnTsrOzlZNTY3TJicnR7t371ZhYaHWrFmjjRs3aurUqU59OBzWqFGj1Lt3b5WWluq+++7T73//ez322GNfYYgAAKC9cRljzFde2eXSypUrNX78eEmfnH0JBAKaOXOmbr/9dklSKBSSz+fT0qVLNXHiRO3Zs0cDBw7Uli1bNGzYMEnS2rVrdc011+j9999XIBDQokWL9Nvf/lbBYFDx8fGSpDlz5mjVqlXau3fvF+pbOByW1+vV1RqnWFfcVx0iAAD4BjWYeq3XaoVCIXk8njO2i+o1MAcOHFAwGFRWVpZT5vV6lZGRoeLiYklScXGxkpKSnPAiSVlZWYqJiVFJSYnTZsSIEU54kaTs7GyVlZXpo48+imaXAQCAhWKjubFgMChJ8vl8EeU+n8+pCwaDSklJiexEbKySk5Mj2qSnp5+2jaa6rl27nrbv2tpa1dbWOq/D4fDXHA0AAGir2s1dSAUFBfJ6vc6SlpbW2l0CAAAtJKoBxu/3S5IqKioiyisqKpw6v9+vysrKiPqGhgZVVVVFtGluG5/ex2fl5+crFAo5y6FDh77+gAAAQJsU1QCTnp4uv9+voqIipywcDqukpESZmZmSpMzMTFVXV6u0tNRps27dOjU2NiojI8Nps3HjRtXX1zttCgsL1a9fv2a/PpIkt9stj8cTsQAAgPbpSweY48ePa/v27dq+fbukTy7c3b59u8rLy+VyuTR9+nTdc889+tvf/qadO3fqpptuUiAQcO5UGjBggEaPHq0pU6Zo8+bNev3115WXl6eJEycqEAhIkn7yk58oPj5ekydP1u7du/Xss8/qv/7rvzRjxoyoDRwAANjrS1/Eu3XrVn3ve99zXjeFikmTJmnp0qW64447dOLECU2dOlXV1dW68sortXbtWiUkJDjrLF++XHl5eRo5cqRiYmI0YcIELVy40Kn3er166aWXlJubq6FDh6p79+6aN29exLNiAADAuetrPQemLeM5MAAA2KdVngMDAADwTSDAAAAA6xBgAACAdQgwAADAOgQYAABgHQIMAACwDgEGAABYhwADAACsQ4ABAADWIcAAAADrfOnfQgIAAG2RUcfOjermr1f6gBrFxTfKGJcOvuNWuCpWHx2JVUO9S5KrtTsaFQQYAACsZhToU6exNx3V8O+F5UurkzuhUa4YSUaqrY1RzYkYHXrXrS3rPNqyzqPyfW7V19n9JQwBBgAAaxn17lej3yw+qD7frjn95IpLcic0yp3QKG+3Bg269IQm/rpCu0o66/nHemjnG51UX2fnWRkCDAAAlurmr1f+I+Xq06/mi63gkhI7NWr498MacsVx7Xi9s/7foz204/XOamy0K8QQYAAAsNT/+dFH6tP/46+0brz7kyAzKOO41j7dTSseTFH1h7Gy5WyM3V+AAQBwjnLFGF0w+GO5vmbeSOzUqPE/P6I7n3xP/S4+KclEpX8tjQADAICF4t2fXP8SDS6XNHDYCc19/D0NvuyEbAgxBBgAACx0qkEKHY3ulSA9AvWa/dBBK0IMAQYAAAs11MfovbKEqG+3R6BecywIMQQYAAAstfWVLjrVEP2LbrsH6jXjz4fk61kf9W1HCwEGAABL7djUWeX73C2y7UCfWt04K6jYuMYW2f7XRYABAMBSHx+P0QtPdW+RszBySd+9tloXXXlcbfGrJAIMAADWcumlv3bV21s7tUjGiE9o1PV5lYqLJ8AAAIAoqjkRo4d+8y1VVca1yPb7X3xSfS/8ag/La0kEGAAArObSe2UJ+susnjr2UfQfsB/vbtQlI46prX2NRIABAMB2xqXNRR6teCjlf36cMYpc0sDhJxTTIbqb/boIMAAAtAPGuLTy8R5aODvtf37TKHrcCW3vTiQCDAAA7cSpBpdeerar7ru1lw69mxC1b33CH8XKtK1vkAgwAAC0Ly5tfaWL5t6Yrlf/kaSGr/mVkjHSzjc6ybSxkzAEGAAA2h2XDh90a8EtvbRwTk+9tzdBjae+WpAJlrv16pokSS3wrJmvIfqXKwMAgDahrjZG/1yRrE1rvRpxbbWuvfmo0s6vUWyc+fw8YqTKD+J1361pOvJBy9yi/XUQYAAAaNdcOlYdq3/8325av6qrLrjwpC7PDmvo1cfUPbVOCR0bI7JMY6NLVUditW1jFz3/WA/999sJamtnXyQCDAAA5wiXThzroB2vd9GOTZ3VqUujuvaoV59+NRG3SNfVuPTOjo766MNYmca2F1yaEGAAADjXGJdOhDvoRLiD3t+f0Nq9+Uq4iBcAAFiHAAMAAKxDgAEAANYhwAAAAOsQYAAAgHUIMAAAwDoEGAAAYB0CDAAAsA4BBgAAWIcAAwAArNNuf0rAGCNJalC9ZFq5MwAA4AtpUL2k//13/EzabYA5evSoJOk1vdDKPQEAAF/WsWPH5PV6z1jfbgNMcnKyJKm8vPysB6A9C4fDSktL06FDh+TxeFq7O9+4c338EsfgXB+/xDE418cv2XcMjDE6duyYAoHAWdu12wATE/PJ5T1er9eKN6wleTyec/oYnOvjlzgG5/r4JY7BuT5+ya5j8EVOPHARLwAAsA4BBgAAWKfdBhi3260777xTbre7tbvSas71Y3Cuj1/iGJzr45c4Buf6+KX2ewxc5vPuUwIAAGhj2u0ZGAAA0H4RYAAAgHUIMAAAwDoEGAAAYJ12GWAefvhh9enTRwkJCcrIyNDmzZtbu0tRUVBQoOHDh6tLly5KSUnR+PHjVVZWFtHm6quvlsvlilh++ctfRrQpLy/X2LFj1bFjR6WkpGjWrFlqaGj4Jofylf3+978/bXz9+/d36mtqapSbm6tu3bqpc+fOmjBhgioqKiK2YfP4JalPnz6nHQOXy6Xc3FxJ7W8ObNy4Uddee60CgYBcLpdWrVoVUW+M0bx585SamqrExERlZWVp3759EW2qqqqUk5Mjj8ejpKQkTZ48WcePH49o89Zbb+mqq65SQkKC0tLStGDBgpYe2hd2tmNQX1+v2bNna/DgwerUqZMCgYBuuukmffDBBxHbaG7ezJ8/P6JNWz0GnzcHbr755tPGNnr06Ig27XkOSGr2M8Hlcum+++5z2tg8B5pl2pkVK1aY+Ph48+STT5rdu3ebKVOmmKSkJFNRUdHaXfvasrOzzZIlS8yuXbvM9u3bzTXXXGN69epljh8/7rT57ne/a6ZMmWIOHz7sLKFQyKlvaGgwgwYNMllZWWbbtm3mhRdeMN27dzf5+fmtMaQv7c477zTf+c53IsZ35MgRp/6Xv/ylSUtLM0VFRWbr1q3msssuM5dffrlTb/v4jTGmsrIyYvyFhYVGknnllVeMMe1vDrzwwgvmt7/9rXn++eeNJLNy5cqI+vnz5xuv12tWrVplduzYYX74wx+a9PR08/HHHzttRo8ebYYMGWLeeOMN8+qrr5oLLrjA3HDDDU59KBQyPp/P5OTkmF27dplnnnnGJCYmmkcfffSbGuZZne0YVFdXm6ysLPPss8+avXv3muLiYnPppZeaoUOHRmyjd+/e5u67746YF5/+7GjLx+Dz5sCkSZPM6NGjI8ZWVVUV0aY9zwFjTMTYDx8+bJ588knjcrnM/v37nTY2z4HmtLsAc+mll5rc3Fzn9alTp0wgEDAFBQWt2KuWUVlZaSSZDRs2OGXf/e53za233nrGdV544QUTExNjgsGgU7Zo0SLj8XhMbW1tS3Y3Ku68804zZMiQZuuqq6tNXFycee6555yyPXv2GEmmuLjYGGP/+Jtz6623mvPPP980NjYaY9r3HPjsB3djY6Px+/3mvvvuc8qqq6uN2+02zzzzjDHGmLfffttIMlu2bHHavPjii8blcpl//etfxhhjHnnkEdO1a9eI8c+ePdv069evhUf05TX3j9dnbd682UgyBw8edMp69+5tHnjggTOuY8sxOFOAGTdu3BnXORfnwLhx48z3v//9iLL2MgeatKuvkOrq6lRaWqqsrCynLCYmRllZWSouLm7FnrWMUCgk6X9/uLLJ8uXL1b17dw0aNEj5+fk6efKkU1dcXKzBgwfL5/M5ZdnZ2QqHw9q9e/c30/Gvad++fQoEAjrvvPOUk5Oj8vJySVJpaanq6+sj3v/+/furV69ezvvfHsb/aXV1dXrqqaf0s5/9TC6Xyylv73OgyYEDBxQMBiPec6/Xq4yMjIj3PCkpScOGDXPaZGVlKSYmRiUlJU6bESNGKD4+3mmTnZ2tsrIyffTRR9/QaKInFArJ5XIpKSkponz+/Pnq1q2bLr74Yt13330RXxvafgzWr1+vlJQU9evXT9OmTdPRo0edunNtDlRUVOgf//iHJk+efFpde5oD7erHHD/88EOdOnUq4oNZknw+n/bu3dtKvWoZjY2Nmj59uq644goNGjTIKf/JT36i3r17KxAI6K233tLs2bNVVlam559/XpIUDAabPT5NdW1dRkaGli5dqn79+unw4cO66667dNVVV2nXrl0KBoOKj48/7UPb5/M5Y7N9/J+1atUqVVdX6+abb3bK2vsc+LSm/jY3nk+/5ykpKRH1sbGxSk5OjmiTnp5+2jaa6rp27doi/W8JNTU1mj17tm644YaIH+779a9/rUsuuUTJycnatGmT8vPzdfjwYd1///2S7D4Go0eP1nXXXaf09HTt379fv/nNbzRmzBgVFxerQ4cO59wcWLZsmbp06aLrrrsuory9zYF2FWDOJbm5udq1a5dee+21iPKpU6c6fw8ePFipqakaOXKk9u/fr/PPP/+b7mbUjRkzxvn7wgsvVEZGhnr37q2//vWvSkxMbMWetY4nnnhCY8aMifjZ+fY+B3Bm9fX1+vGPfyxjjBYtWhRRN2PGDOfvCy+8UPHx8frFL36hgoIC6x8xP3HiROfvwYMH68ILL9T555+v9evXa+TIka3Ys9bx5JNPKicnRwkJCRHl7W0OtKuvkLp3764OHTqcdtdJRUWF/H5/K/Uq+vLy8rRmzRq98sor6tmz51nbZmRkSJLeffddSZLf72/2+DTV2SYpKUnf/va39e6778rv96uurk7V1dURbT79/ren8R88eFAvv/yyfv7zn5+1XXueA039Pdt/836/X5WVlRH1DQ0Nqqqqalfzoim8HDx4UIWFhRFnX5qTkZGhhoYGvffee5LaxzFoct5556l79+4Rc/5cmAOS9Oqrr6qsrOxzPxck++dAuwow8fHxGjp0qIqKipyyxsZGFRUVKTMzsxV7Fh3GGOXl5WnlypVat27daaf6mrN9+3ZJUmpqqiQpMzNTO3fujPiPuenDbuDAgS3S75Z0/Phx7d+/X6mpqRo6dKji4uIi3v+ysjKVl5c77397Gv+SJUuUkpKisWPHnrVde54D6enp8vv9Ee95OBxWSUlJxHteXV2t0tJSp826devU2NjohLvMzExt3LhR9fX1TpvCwkL169evzZ02b05TeNm3b59efvlldevW7XPX2b59u2JiYpyvVmw/Bp/2/vvv6+jRoxFzvr3PgSZPPPGEhg4dqiFDhnxuW+vnQGtfRRxtK1asMG632yxdutS8/fbbZurUqSYpKSnijgtbTZs2zXi9XrN+/fqI2+BOnjxpjDHm3XffNXfffbfZunWrOXDggFm9erU577zzzIgRI5xtNN1CO2rUKLN9+3azdu1a06NHjzZ7C+1nzZw506xfv94cOHDAvP766yYrK8t0797dVFZWGmM+uY26V69eZt26dWbr1q0mMzPTZGZmOuvbPv4mp06dMr169TKzZ8+OKG+Pc+DYsWNm27ZtZtu2bUaSuf/++822bducO2zmz59vkpKSzOrVq81bb71lxo0b1+xt1BdffLEpKSkxr732munbt2/ELbTV1dXG5/OZG2+80ezatcusWLHCdOzYsc3cPnq2Y1BXV2d++MMfmp49e5rt27dHfDY03U2yadMm88ADD5jt27eb/fv3m6eeesr06NHD3HTTTc4+2vIxONv4jx07Zm6//XZTXFxsDhw4YF5++WVzySWXmL59+5qamhpnG+15DjQJhUKmY8eOZtGiRaetb/scaE67CzDGGPPggw+aXr16mfj4eHPppZeaN954o7W7FBWSml2WLFlijDGmvLzcjBgxwiQnJxu3220uuOACM2vWrIhngBhjzHvvvWfGjBljEhMTTffu3c3MmTNNfX19K4zoy7v++utNamqqiY+PN9/61rfM9ddfb959912n/uOPPza/+tWvTNeuXU3Hjh3Nv/3bv5nDhw9HbMPm8Tf55z//aSSZsrKyiPL2OAdeeeWVZuf9pEmTjDGf3Eo9d+5c4/P5jNvtNiNHjjztuBw9etTccMMNpnPnzsbj8Zif/vSn5tixYxFtduzYYa688krjdrvNt771LTN//vxvaoif62zH4MCBA2f8bGh6NlBpaanJyMgwXq/XJCQkmAEDBph777034h94Y9ruMTjb+E+ePGlGjRplevToYeLi4kzv3r3NlClTTvuf1vY8B5o8+uijJjEx0VRXV5+2vu1zoDkuY4xp0VM8AAAAUdauroEBAADnBgIMAACwDgEGAABYhwADAACsQ4ABAADWIcAAAADrEGAAAIB1CDAAAMA6BBgAAGAdAgwAALAOAQYAAFiHAAMAAKzz/wGDrl4sxpaV7wAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "image = Image(K_1, Transformation(np.eye(3), np.zeros((3, 1))), distortion_coefs_1)\n", + "projected_ball_mask = image.project_ball_to_image(np.array([2.3, 1.5, 2]).reshape((3, 1)), 0.1)\n", + "plt.imshow(projected_ball_mask)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Visually testing `Image` class" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "import plotly.graph_objects as go\n", + "import numpy as np\n", + "\n", + "\n", + "def display_ball(position, radius, color):\n", + " u, v = np.mgrid[0:2*np.pi:20j, 0:np.pi:10j]\n", + " x = position[0] + radius * np.cos(u) * np.sin(v)\n", + " y = position[1] + radius * np.sin(u) * np.sin(v)\n", + " z = position[2] + radius * np.cos(v)\n", + " \n", + " return go.Surface(x=x, y=y, z=z, colorscale=[[0, color], [1, color]], showscale=False)\n", + "\n", + "def display_camera(position, 3d_rotation_matrix, color=\"red\", length=0.1):\n", + " pass\n", + "\n", + "\n", + "def display_table(length, width):\n", + " corners = np.array([\n", + " [-length / 2, -width / 2],\n", + " [length / 2, -width / 2],\n", + " [length / 2, width / 2],\n", + " [-length / 2, width / 2],\n", + " [-length / 2, -width / 2]])\n", + "\n", + "\n", + " return go.Surface(\n", + " x=[[0, length, length, 0]],\n", + " y=[[0, 0, width, width]],\n", + " z=[[0, 0, 0, 0]],\n", + " colorscale=[[0, 'brown'], [1, 'brown']],\n", + " showscale=False\n", + " )\n", + "\n", + "\n", + "def get_bbox(mask: np.ndarray) -> List[float]:\n", + " if not np.any(mask):\n", + " return [0., 0., 0., 0.]\n", + " # x_min, y_min, x_max, y_max\n", + " horizontal_indicies = np.where(np.any(mask, axis=0))[0]\n", + " vertical_indicies = np.where(np.any(mask, axis=1))[0]\n", + " x1, x2 = horizontal_indicies[[0, -1]]\n", + " y1, y2 = vertical_indicies[[0, -1]]\n", + " bbox = list(map(float, [x1, y1, x2, y2]))\n", + " return bbox\n", + "\n", + "\n", + "def get_mask_center(mask):\n", + " bbox = get_bbox(mask)\n", + " centroid_x = (bbox[0] + bbox[2]) / 2\n", + " centroid_y = (bbox[1] + bbox[3]) / 2\n", + " return np.array([centroid_x, centroid_y])\n", + "\n", + "\n", + "def get_mask_centroid(mask):\n", + " return np.array(center_of_mass(mask))\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "import dash\n", + "from dash import dcc, html\n", + "from dash.dependencies import Input, Output\n", + "import plotly.graph_objects as go\n", + "import plotly.express as px\n", + "\n", + "\n", + "class StereoScene:\n", + " def __init__(self, left_camera: Image, right_camera: Image, table_middle_normal):\n", + " self.left_camera = left_camera\n", + " self.right_camera = right_camera\n", + " self.table_middle_normal = table_middle_normal\n", + " self.last_n_clicks = 0\n", + "\n", + " def project_ball_to_images(self, center, radius):\n", + " left_image = self.left_camera.project_ball_to_image(center, radius)\n", + " right_image = self.right_camera.project_ball_to_image(center, radius)\n", + " return left_image, right_image\n", + " \n", + " def launch_3D_scene(self):\n", + " app = dash.Dash(__name__)\n", + "\n", + " app.layout = html.Div([\n", + " dcc.Graph(id='3d-scene'),\n", + " dcc.Graph(id='left-image'),\n", + " dcc.Graph(id='right-image'),\n", + " html.Label('Ball X Position'),\n", + " dcc.Slider(id='ball-x', min=-1.5, max=1.5, step=0.1, value=0.13),\n", + " html.Label('Ball Y Position'),\n", + " dcc.Slider(id='ball-y', min=-1.5, max=1.5, step=0.1, value=0),\n", + " html.Label('Ball Z Position'),\n", + " dcc.Slider(id='ball-z', min=0, max=2, step=0.1, value=1),\n", + " html.Label('Master camera pitch'),\n", + " dcc.Input(id='pitch_1', type='range'),\n", + " html.Label('Master camera yaw'),\n", + " dcc.Input(id='yaw_1', type='range'),\n", + " html.Label('Master camera roll'),\n", + " dcc.Input(id='roll_1', type='range'),\n", + " html.Label('Slave camera pitch'),\n", + " dcc.Input(id='pitch_2', type='range'),\n", + " html.Label('Slave camera pitch'),\n", + " dcc.Input(id='yaw_2', type='range'),\n", + " html.Label('Slave camera pitch'),\n", + " dcc.Input(id='roll_2', type='range'),\n", + " html.Button('Evaluate camera placement', id='evaluate-placement'),\n", + " ])\n", + "\n", + " @app.callback(\n", + " [Output('3d-scene', 'figure'),\n", + " Output('left-image', 'figure'),\n", + " Output('right-image', 'figure')],\n", + " [Input('ball-x', 'value'),\n", + " Input('ball-y', 'value'),\n", + " Input('ball-z', 'value'),\n", + " Input('evaluate-placement', 'n_clicks')]\n", + " )\n", + " def update_scene(ball_x, ball_y, ball_z, n_clicks):\n", + " ball_center = np.array([ball_x, ball_y, ball_z]).reshape((3, 1))\n", + " left_image, right_image = self.project_ball_to_images(ball_center, 0.04)\n", + " \n", + " fig_3d = go.Figure(layout=go.Layout(\n", + " scene=dict(\n", + " aspectmode='data')))\n", + " # fig_3d.add_trace(display_plane((0, 0, 1), (TABLE_LENGTH, TABLE_WIDTH), (0, 0, 0)))\n", + " fig_3d.add_trace(display_ball((ball_x, ball_y, ball_z), 0.04, \"green\"))\n", + "\n", + " fig_3d.add_trace(go.Cone(x=[0], y=[0], z=[0], u=[0], v=[0], w=[-1], colorscale=[[0, \"red\"], [1, \"red\"]]))\n", + "\n", + " second_camera_direction = self.right_camera.camera_transformation.R @ np.array([0, 0, 1]).reshape((3, 1))\n", + " second_cam_cone_position_direction = dict(zip([\"x\", \"y\", \"z\", \"u\", \"v\", \"w\"], \n", + " [*self.right_camera.camera_transformation.t.tolist()] + \n", + " [*(-second_camera_direction).tolist()]))\n", + " fig_3d.add_trace(go.Cone(**second_cam_cone_position_direction,\n", + " colorscale=[[0, \"blue\"], [1, \"blue\"]]))\n", + " \n", + " display_arrow([0, 0, 0], [1, 0, 0], \"red\", figure=fig_3d, length=10)\n", + "\n", + " fig_left = go.Figure(data=go.Heatmap(z=left_image),\n", + " layout=go.Layout(\n", + " scene=dict(\n", + " aspectmode='data')))\n", + " fig_right = go.Figure(data=go.Heatmap(z=right_image),\n", + " layout=go.Layout(\n", + " scene=dict(\n", + " aspectmode='data')))\n", + "\n", + " center_1 = get_mask_center(left_image)\n", + " center_2 = get_mask_center(right_image)\n", + " points_1 = np.array([[center_1[0]], [center_1[1]]])\n", + " points_2 = np.array([[center_2[0]], [center_2[1]]])\n", + " print(self.triangulate_position(points_1, points_2,\n", + " self.left_camera.camera_transformation, self.right_camera.camera_transformation))\n", + " \n", + " if self.last_n_clicks != n_clicks:\n", + " self.last_n_clicks = n_clicks\n", + " print(\"Evaluating camera placement\")\n", + " \n", + " return fig_3d, fig_left, fig_right\n", + "\n", + " return app\n", + "\n", + " def triangulate_position(self, \n", + " points_by_view_1, points_by_view_2, world2cam, cam2cam\n", + "):\n", + " print(\"triangulating\")\n", + " # print(points_by_view)\n", + " world2cam_Rt = np.column_stack((world2cam.R, world2cam.t))\n", + " world2second_cam = cam2cam * world2cam\n", + " world2second_cam_Rt = np.column_stack((world2second_cam.R, world2second_cam.t))\n", + " proj_1 = self.left_camera.camera_matrix @ world2cam_Rt\n", + " proj_2 = self.right_camera.camera_matrix @ world2second_cam_Rt\n", + "\n", + " res = cv2.triangulatePoints(\n", + " proj_1, proj_2, points_by_view_1, points_by_view_2\n", + " )\n", + " res /= res[3, :] # normalizing\n", + "\n", + " # TODO preserve 4D points?\n", + " return res[:3, :]" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + " \n", + " " + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "triangulating\n", + "[[1.30151015e-01]\n", + " [4.40582071e-05]\n", + " [1.00013910e+00]]\n", + "Evaluating camera placement\n", + "triangulating\n", + "[[1.30151015e-01]\n", + " [4.40582071e-05]\n", + " [1.00013910e+00]]\n" + ] + } + ], + "source": [ + "camera_1_transform = Transformation(np.eye(3), np.array([0, 0, 0]))\n", + "image_1 = Image(K_1, camera_1_transform, distortion_coefs_1)\n", + "camera_2_transform = Transformation(cv2.Rodrigues(np.array(stereo_cam_rotation))[0], \n", + " np.array(stereo_cam_translation))\n", + "image_2 = Image(K_2, camera_2_transform, distortion_coefs_2)\n", + "\n", + "scene = StereoScene(image_1, image_2, None)\n", + "app = scene.launch_3D_scene()\n", + "app.run_server(host=\"localhost\", port=\"8060\",\n", + " debug=True, use_reloader=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[Ссылка на веб интерфейс для визуального контроля](http://localhost:8060)" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": {}, + "outputs": [], + "source": [ + "def evaluate_camera_position(\n", + " world2master: Transformation,\n", + " master2second: Transformation,\n", + " center_extractor,\n", + " camera_matrix_1,\n", + " camera_matrix_2,\n", + "):\n", + " NUMBER_OF_SPHERES = 6\n", + "\n", + " image_1 = Image(K_1, world2master, distortion_coefs_1)\n", + " image_2 = Image(K_2, master2second * world2master, distortion_coefs_2)\n", + " stereo_scene = StereoScene(image_1, image_2, None)\n", + "\n", + " sphere_centers = []\n", + " for x in np.linspace(-TABLE_LENGTH / 2, TABLE_LENGTH / 2, NUMBER_OF_SPHERES):\n", + " for y in np.linspace(-TABLE_WIDTH / 2, TABLE_WIDTH / 2, NUMBER_OF_SPHERES):\n", + " for z in np.linspace(0, 1, NUMBER_OF_SPHERES):\n", + " sphere_centers.append((x, y, z))\n", + "\n", + " sphere_centers = np.array(sphere_centers).T\n", + " points_1 = []\n", + " points_2 = []\n", + " valid_sphere_centers = []\n", + " world2second = master2second * world2master\n", + "\n", + " for i in range(sphere_centers.shape[1]): \n", + " mask_1, mask_2 = stereo_scene.project_ball_to_images(\n", + " sphere_centers[:, i : (i + 1)], 0.02\n", + " )\n", + "\n", + " if np.sum(mask_1) == 0 or np.sum(mask_2) == 0:\n", + " continue\n", + "\n", + " points_1.append(center_extractor(mask_1))\n", + " points_2.append(center_extractor(mask_2))\n", + " valid_sphere_centers.append(sphere_centers[:, i])\n", + "\n", + " points_1 = np.array(points_1).T\n", + " points_2 = np.array(points_2).T\n", + " sphere_centers = np.array(valid_sphere_centers).T\n", + " print(sphere_centers.shape)\n", + "\n", + " triangulated_points = stereo_scene.triangulate_position(points_1, points_2, world2master, master2second)\n", + "\n", + "\n", + " # Calculate the Euclidean distance between the true and triangulated points\n", + " distances = np.linalg.norm(sphere_centers - triangulated_points, axis=0)\n", + " \n", + " # Calculate mean and standard deviation of the distances\n", + " mean_distance = np.mean(distances)\n", + " std_distance = np.std(distances)\n", + " \n", + " print(f\"Mean distance error: {mean_distance}\")\n", + " print(f\"Standard deviation of distance error: {std_distance}\")\n", + " print(\"evalution complete\")\n", + " print()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(3, 54)\n", + "triangulating\n", + "Mean distance error: 0.005709064422212816\n", + "Standard deviation of distance error: 0.008145795684027528\n", + "evalution complete\n", + "\n" + ] + } + ], + "source": [ + "# main test: opposite camera placement\n", + "# world2master = Transformation(Rotation.from_euler(\"xyz\", [0, -45, -90], degrees=True).as_matrix(), np.array([-TABLE_LENGTH/4, TABLE_WIDTH, 0.5]))\n", + "# master2second = Transformation(Rotation.from_euler(\"xyz\", [], degrees=True).as_matrix(), np.array([0, 0, 0]))\n", + "\n", + "world2master = Transformation(\n", + " Rotation.from_euler(\"xyz\", [0, -45, -90], degrees=True).as_matrix(),\n", + " np.array([-TABLE_LENGTH / 4, TABLE_WIDTH, 0.5]),\n", + ")\n", + "master2second = Transformation(\n", + " cv2.Rodrigues(np.array(stereo_cam_rotation))[0], np.array(stereo_cam_translation))\n", + "evaluate_camera_position(world2master, master2second, get_mask_center, K_1, K_2)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "world2master = Transformation(\n", + " Rotation.from_euler(\"xyz\", [0, -45, -90], degrees=True).as_matrix(),\n", + " np.array([-TABLE_LENGTH / 4, TABLE_WIDTH, 0.5]),\n", + ")\n", + "master2second = Transformation(\n", + " cv2.Rodrigues(np.array(stereo_cam_rotation))[0], np.array(stereo_cam_translation))\n", + "evaluate_camera_position(world2master, master2second, get_mask_center, K_1, K_2)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}