/* * COPYRIGHT AND PERMISSION NOTICE * Penn Software MSCKF_VIO * Copyright (C) 2017 The Trustees of the University of Pennsylvania * All rights reserved. */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include using namespace std; using namespace Eigen; namespace msckf_vio{ // Static member variables in IMUState class. StateIDType IMUState::next_id = 0; double IMUState::gyro_noise = 0.001; double IMUState::acc_noise = 0.01; double IMUState::gyro_bias_noise = 0.001; double IMUState::acc_bias_noise = 0.01; Vector3d IMUState::gravity = Vector3d(0, 0, -GRAVITY_ACCELERATION); Isometry3d IMUState::T_imu_body = Isometry3d::Identity(); // Static member variables in CAMState class. Isometry3d CAMState::T_cam0_cam1 = Isometry3d::Identity(); // Static member variables in Feature class. FeatureIDType Feature::next_id = 0; double Feature::observation_noise = 0.01; Feature::OptimizationConfig Feature::optimization_config; map MsckfVio::chi_squared_test_table; MsckfVio::MsckfVio(ros::NodeHandle& pnh): is_gravity_set(false), is_first_img(true), image_sub(10), nan_flag(false), last_time_bound(0), nh(pnh) { return; } bool MsckfVio::loadParameters() { //Photometry Flag nh.param("PHOTOMETRIC", PHOTOMETRIC, false); nh.param("PrintImages", PRINTIMAGES, false); nh.param("GroundTruth", GROUNDTRUTH, false); // Frame id nh.param("fixed_frame_id", fixed_frame_id, "world"); nh.param("child_frame_id", child_frame_id, "robot"); nh.param("publish_tf", publish_tf, true); nh.param("frame_rate", frame_rate, 40.0); nh.param("position_std_threshold", position_std_threshold, 8.0); nh.param("rotation_threshold", rotation_threshold, 0.2618); nh.param("translation_threshold", translation_threshold, 0.4); nh.param("tracking_rate_threshold", tracking_rate_threshold, 0.5); // Feature optimization parameters nh.param("feature/config/translation_threshold", Feature::optimization_config.translation_threshold, 0.2); // Noise related parameters nh.param("noise/gyro", IMUState::gyro_noise, 0.001); nh.param("noise/acc", IMUState::acc_noise, 0.01); nh.param("noise/gyro_bias", IMUState::gyro_bias_noise, 0.001); nh.param("noise/acc_bias", IMUState::acc_bias_noise, 0.01); nh.param("noise/feature", Feature::observation_noise, 0.01); // Use variance instead of standard deviation. IMUState::gyro_noise *= IMUState::gyro_noise; IMUState::acc_noise *= IMUState::acc_noise; IMUState::gyro_bias_noise *= IMUState::gyro_bias_noise; IMUState::acc_bias_noise *= IMUState::acc_bias_noise; Feature::observation_noise *= Feature::observation_noise; // Set the initial IMU state. // The intial orientation and position will be set to the origin // implicitly. But the initial velocity and bias can be // set by parameters. // TODO: is it reasonable to set the initial bias to 0? nh.param("initial_state/velocity/x", state_server.imu_state.velocity(0), 0.0); nh.param("initial_state/velocity/y", state_server.imu_state.velocity(1), 0.0); nh.param("initial_state/velocity/z", state_server.imu_state.velocity(2), 0.0); // The initial covariance of orientation and position can be // set to 0. But for velocity, bias and extrinsic parameters, // there should be nontrivial uncertainty. double gyro_bias_cov, acc_bias_cov, velocity_cov; nh.param("initial_covariance/velocity", velocity_cov, 0.25); nh.param("initial_covariance/gyro_bias", gyro_bias_cov, 1e-4); nh.param("initial_covariance/acc_bias", acc_bias_cov, 1e-2); double extrinsic_rotation_cov, extrinsic_translation_cov; nh.param("initial_covariance/extrinsic_rotation_cov", extrinsic_rotation_cov, 3.0462e-4); nh.param("initial_covariance/extrinsic_translation_cov", extrinsic_translation_cov, 1e-4); // Get the initial irradiance covariance nh.param("initial_covariance/irradiance_frame_bias", irradiance_frame_bias, 0.1); // Get the photometric patch size nh.param("patch_size_n", N, 3); // get camera information (used for back projection) nh.param("cam0/distortion_model", cam0.distortion_model, string("radtan")); nh.param("cam1/distortion_model", cam1.distortion_model, string("radtan")); vector cam0_resolution_temp(2); nh.getParam("cam0/resolution", cam0_resolution_temp); cam0.resolution[0] = cam0_resolution_temp[0]; cam0.resolution[1] = cam0_resolution_temp[1]; vector cam1_resolution_temp(2); nh.getParam("cam1/resolution", cam1_resolution_temp); cam1.resolution[0] = cam1_resolution_temp[0]; cam1.resolution[1] = cam1_resolution_temp[1]; vector cam0_intrinsics_temp(4); nh.getParam("cam0/intrinsics", cam0_intrinsics_temp); cam0.intrinsics[0] = cam0_intrinsics_temp[0]; cam0.intrinsics[1] = cam0_intrinsics_temp[1]; cam0.intrinsics[2] = cam0_intrinsics_temp[2]; cam0.intrinsics[3] = cam0_intrinsics_temp[3]; vector cam1_intrinsics_temp(4); nh.getParam("cam1/intrinsics", cam1_intrinsics_temp); cam1.intrinsics[0] = cam1_intrinsics_temp[0]; cam1.intrinsics[1] = cam1_intrinsics_temp[1]; cam1.intrinsics[2] = cam1_intrinsics_temp[2]; cam1.intrinsics[3] = cam1_intrinsics_temp[3]; vector cam0_distortion_coeffs_temp(4); nh.getParam("cam0/distortion_coeffs", cam0_distortion_coeffs_temp); cam0.distortion_coeffs[0] = cam0_distortion_coeffs_temp[0]; cam0.distortion_coeffs[1] = cam0_distortion_coeffs_temp[1]; cam0.distortion_coeffs[2] = cam0_distortion_coeffs_temp[2]; cam0.distortion_coeffs[3] = cam0_distortion_coeffs_temp[3]; vector cam1_distortion_coeffs_temp(4); nh.getParam("cam1/distortion_coeffs", cam1_distortion_coeffs_temp); cam1.distortion_coeffs[0] = cam1_distortion_coeffs_temp[0]; cam1.distortion_coeffs[1] = cam1_distortion_coeffs_temp[1]; cam1.distortion_coeffs[2] = cam1_distortion_coeffs_temp[2]; cam1.distortion_coeffs[3] = cam1_distortion_coeffs_temp[3]; state_server.state_cov = MatrixXd::Zero(21, 21); for (int i = 3; i < 6; ++i) state_server.state_cov(i, i) = gyro_bias_cov; for (int i = 6; i < 9; ++i) state_server.state_cov(i, i) = velocity_cov; for (int i = 9; i < 12; ++i) state_server.state_cov(i, i) = acc_bias_cov; for (int i = 15; i < 18; ++i) state_server.state_cov(i, i) = extrinsic_rotation_cov; for (int i = 18; i < 21; ++i) state_server.state_cov(i, i) = extrinsic_translation_cov; // Transformation offsets between the frames involved. Isometry3d T_imu_cam0 = utils::getTransformEigen(nh, "cam0/T_cam_imu"); Isometry3d T_cam0_imu = T_imu_cam0.inverse(); state_server.imu_state.R_imu_cam0 = T_cam0_imu.linear().transpose(); state_server.imu_state.t_cam0_imu = T_cam0_imu.translation(); true_state_server.imu_state.R_imu_cam0 = T_cam0_imu.linear().transpose(); true_state_server.imu_state.t_cam0_imu = T_cam0_imu.translation(); CAMState::T_cam0_cam1 = utils::getTransformEigen(nh, "cam1/T_cn_cnm1"); IMUState::T_imu_body = utils::getTransformEigen(nh, "T_imu_body").inverse(); // Maximum number of camera states to be stored nh.param("max_cam_state_size", max_cam_state_size, 30); //cam_state_size = max_cam_state_size; ROS_INFO("==========================================="); ROS_INFO("fixed frame id: %s", fixed_frame_id.c_str()); ROS_INFO("child frame id: %s", child_frame_id.c_str()); ROS_INFO("publish tf: %d", publish_tf); ROS_INFO("frame rate: %f", frame_rate); ROS_INFO("position std threshold: %f", position_std_threshold); ROS_INFO("Keyframe rotation threshold: %f", rotation_threshold); ROS_INFO("Keyframe translation threshold: %f", translation_threshold); ROS_INFO("Keyframe tracking rate threshold: %f", tracking_rate_threshold); ROS_INFO("gyro noise: %.10f", IMUState::gyro_noise); ROS_INFO("gyro bias noise: %.10f", IMUState::gyro_bias_noise); ROS_INFO("acc noise: %.10f", IMUState::acc_noise); ROS_INFO("acc bias noise: %.10f", IMUState::acc_bias_noise); ROS_INFO("observation noise: %.10f", Feature::observation_noise); ROS_INFO("initial velocity: %f, %f, %f", state_server.imu_state.velocity(0), state_server.imu_state.velocity(1), state_server.imu_state.velocity(2)); ROS_INFO("initial gyro bias cov: %f", gyro_bias_cov); ROS_INFO("initial acc bias cov: %f", acc_bias_cov); ROS_INFO("initial velocity cov: %f", velocity_cov); ROS_INFO("initial extrinsic rotation cov: %f", extrinsic_rotation_cov); ROS_INFO("initial extrinsic translation cov: %f", extrinsic_translation_cov); cout << T_imu_cam0.linear() << endl; cout << T_imu_cam0.translation().transpose() << endl; cout << "OpenCV version : " << CV_VERSION << endl; cout << "Major version : " << CV_MAJOR_VERSION << endl; cout << "Minor version : " << CV_MINOR_VERSION << endl; cout << "Subminor version : " << CV_SUBMINOR_VERSION << endl; ROS_INFO("max camera state #: %d", max_cam_state_size); ROS_INFO("==========================================="); return true; } bool MsckfVio::createRosIO() { // activating bag playing parameter, for debugging nh.setParam("/play_bag", true); odom_pub = nh.advertise("odom", 10); feature_pub = nh.advertise( "feature_point_cloud", 10); marker_pub = nh.advertise("/visualization_marker_array", 10); reset_srv = nh.advertiseService("reset", &MsckfVio::resetCallback, this); imu_sub = nh.subscribe("imu", 100, &MsckfVio::imuCallback, this); truth_sub = nh.subscribe("ground_truth", 100, &MsckfVio::truthCallback, this); cam0_img_sub.subscribe(nh, "cam0_image", 10); cam1_img_sub.subscribe(nh, "cam1_image", 10); feature_sub.subscribe(nh, "features", 10); image_sub.connectInput(cam0_img_sub, cam1_img_sub, feature_sub); image_sub.registerCallback(&MsckfVio::imageCallback, this); mocap_odom_sub = nh.subscribe("mocap_odom", 10, &MsckfVio::mocapOdomCallback, this); mocap_odom_pub = nh.advertise("gt_odom", 1); return true; } bool MsckfVio::initialize() { if (!loadParameters()) return false; ROS_INFO("Finish loading ROS parameters..."); // Initialize state server state_server.continuous_noise_cov = Matrix::Zero(); state_server.continuous_noise_cov.block<3, 3>(0, 0) = Matrix3d::Identity()*IMUState::gyro_noise; state_server.continuous_noise_cov.block<3, 3>(3, 3) = Matrix3d::Identity()*IMUState::gyro_bias_noise; state_server.continuous_noise_cov.block<3, 3>(6, 6) = Matrix3d::Identity()*IMUState::acc_noise; state_server.continuous_noise_cov.block<3, 3>(9, 9) = Matrix3d::Identity()*IMUState::acc_bias_noise; // Initialize the chi squared test table with confidence // level 0.95. for (int i = 1; i < 100; ++i) { boost::math::chi_squared chi_squared_dist(i); chi_squared_test_table[i] = boost::math::quantile(chi_squared_dist, 0.2); } if (!createRosIO()) return false; ROS_INFO("Finish creating ROS IO..."); /* rosbag::Bag bag; bag.open("/home/raphael/dev/MSCKF_ws/bag/TUM/dataset-corridor1_1024_16.bag", rosbag::bagmode::Read); for(rosbag::MessageInstance const m: rosbag::View(bag)) { std_msgs::Int32::ConstPtr i = m.instantiate(); if (i != NULL) std::cout << i->data << std::endl; } bag.close(); */ return true; } void MsckfVio::imuCallback(const sensor_msgs::ImuConstPtr& msg){ // IMU msgs are pushed backed into a buffer instead of // being processed immediately. The IMU msgs are processed // when the next image is available, in which way, we can // easily handle the transfer delay. imu_msg_buffer.push_back(*msg); if (!is_gravity_set) { if (imu_msg_buffer.size() < 200) return; //if (imu_msg_buffer.size() < 10) return; initializeGravityAndBias(); is_gravity_set = true; } return; } void MsckfVio::truthCallback(const geometry_msgs::TransformStampedPtr& msg){ static bool first_truth_odom_msg = true; // errorstate /*if(not ErrorState) return; */ // If this is the first mocap odometry messsage, set // the initial frame. if (first_truth_odom_msg) { Quaterniond orientation; Vector3d translation; tf::vectorMsgToEigen( msg->transform.translation, translation); tf::quaternionMsgToEigen( msg->transform.rotation, orientation); mocap_initial_orientation = QtoV(orientation); mocap_initial_position = translation; first_truth_odom_msg = false; } // Transform the ground truth. Quaterniond orientation; Vector3d translation; //tf::vectorMsgToEigen( // msg->transform.translation, translation); //tf::quaternionMsgToEigen( // msg->transform.rotation, orientation); tf::vectorMsgToEigen( msg->transform.translation, translation); tf::quaternionMsgToEigen( msg->transform.rotation, orientation); Eigen::Vector4d q = quaternionMultiplication(quaternionConjugate(mocap_initial_orientation), QtoV(orientation)); translation -= mocap_initial_position; msg->transform.rotation.x = q(0); msg->transform.rotation.y = q(1); msg->transform.rotation.z = q(2); msg->transform.rotation.w = q(3); msg->transform.translation.x = translation(0); msg->transform.translation.y = translation(1); msg->transform.translation.z = translation(2); truth_msg_buffer.push_back(*msg); } void MsckfVio::imageCallback( const sensor_msgs::ImageConstPtr& cam0_img, const sensor_msgs::ImageConstPtr& cam1_img, const CameraMeasurementConstPtr& feature_msg) { // Return if the gravity vector has not been set. if (!is_gravity_set) return; // Start the system if the first image is received. // The frame where the first image is received will be // the origin. if (is_first_img) { is_first_img = false; state_server.imu_state.time = feature_msg->header.stamp.toSec(); } static double max_processing_time = 0.0; static int critical_time_cntr = 0; double processing_start_time = ros::Time::now().toSec(); // Propogate the IMU state. // that are received before the image feature_msg. ros::Time start_time = ros::Time::now(); batchImuProcessing(feature_msg->header.stamp.toSec()); // save true state in seperate state vector batchTruthProcessing(feature_msg->header.stamp.toSec()); if(GROUNDTRUTH) { state_server.imu_state.position = true_state_server.imu_state.position; state_server.imu_state.orientation = true_state_server.imu_state.orientation; state_server.imu_state.position_null = true_state_server.imu_state.position_null; state_server.imu_state.orientation_null = true_state_server.imu_state.orientation_null; state_server.imu_state.R_imu_cam0 = true_state_server.imu_state.R_imu_cam0; state_server.imu_state.t_cam0_imu = true_state_server.imu_state.t_cam0_imu; } double imu_processing_time = ( ros::Time::now()-start_time).toSec(); // Augment the state vector. start_time = ros::Time::now(); if(PHOTOMETRIC) { truePhotometricStateAugmentation(feature_msg->header.stamp.toSec()); PhotometricStateAugmentation(feature_msg->header.stamp.toSec()); } else stateAugmentation(feature_msg->header.stamp.toSec()); double state_augmentation_time = ( ros::Time::now()-start_time).toSec(); // Add new observations for existing features or new // features in the map server. start_time = ros::Time::now(); addFeatureObservations(feature_msg); double add_observations_time = ( ros::Time::now()-start_time).toSec(); // Add new images to moving window start_time = ros::Time::now(); manageMovingWindow(cam0_img, cam1_img, feature_msg); double manage_moving_window_time = ( ros::Time::now()-start_time).toSec(); // Perform measurement update if necessary. start_time = ros::Time::now(); removeLostFeatures(); double remove_lost_features_time = ( ros::Time::now()-start_time).toSec(); start_time = ros::Time::now(); pruneCamStateBuffer(); double prune_cam_states_time = ( ros::Time::now()-start_time).toSec(); // Publish the odometry. start_time = ros::Time::now(); publish(feature_msg->header.stamp); double publish_time = ( ros::Time::now()-start_time).toSec(); // Reset the system if necessary. onlineReset(); double processing_end_time = ros::Time::now().toSec(); double processing_time = processing_end_time - processing_start_time; if (processing_time > 1.0/frame_rate) { ++critical_time_cntr; ROS_INFO("\033[1;31mTotal processing time %f/%d...\033[0m", processing_time, critical_time_cntr); printf("IMU processing time: %f/%f\n", imu_processing_time, imu_processing_time/processing_time); printf("State augmentation time: %f/%f\n", state_augmentation_time, state_augmentation_time/processing_time); printf("Add observations time: %f/%f\n", add_observations_time, add_observations_time/processing_time); printf("Remove lost features time: %f/%f\n", remove_lost_features_time, remove_lost_features_time/processing_time); printf("Remove camera states time: %f/%f\n", prune_cam_states_time, prune_cam_states_time/processing_time); printf("Publish time: %f/%f\n", publish_time, publish_time/processing_time); } return; } void MsckfVio::manageMovingWindow( const sensor_msgs::ImageConstPtr& cam0_img, const sensor_msgs::ImageConstPtr& cam1_img, const CameraMeasurementConstPtr& feature_msg) { //save exposure Time into moving window cam0.moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam0_img->header.frame_id.data(), NULL) / 1000000; cam1.moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam1_img->header.frame_id.data(), NULL) / 1000000; if(cam0.moving_window[state_server.imu_state.id].exposureTime_ms < 1) cam0.moving_window[state_server.imu_state.id].exposureTime_ms = 1; if(cam1.moving_window[state_server.imu_state.id].exposureTime_ms < 1) cam1.moving_window[state_server.imu_state.id].exposureTime_ms = 1; if(cam0.moving_window[state_server.imu_state.id].exposureTime_ms > 100) cam0.moving_window[state_server.imu_state.id].exposureTime_ms = 100; if(cam1.moving_window[state_server.imu_state.id].exposureTime_ms > 100) cam1.moving_window[state_server.imu_state.id].exposureTime_ms = 100; // Get the current image. cv_bridge::CvImageConstPtr cam0_img_ptr = cv_bridge::toCvShare(cam0_img, sensor_msgs::image_encodings::MONO8); cv_bridge::CvImageConstPtr cam1_img_ptr = cv_bridge::toCvShare(cam1_img, sensor_msgs::image_encodings::MONO8); // save image information into moving window cam0.moving_window[state_server.imu_state.id].image = cam0_img_ptr->image.clone(); cam1.moving_window[state_server.imu_state.id].image = cam1_img_ptr->image.clone(); //TODO handle any massive overflow correctly (should be pruned, before this ever triggers) while(cam0.moving_window.size() > 100) { cam1.moving_window.erase(cam1.moving_window.begin()); cam0.moving_window.erase(cam0.moving_window.begin()); } } void MsckfVio::initializeGravityAndBias() { // Initialize gravity and gyro bias. Vector3d sum_angular_vel = Vector3d::Zero(); Vector3d sum_linear_acc = Vector3d::Zero(); for (const auto& imu_msg : imu_msg_buffer) { Vector3d angular_vel = Vector3d::Zero(); Vector3d linear_acc = Vector3d::Zero(); tf::vectorMsgToEigen(imu_msg.angular_velocity, angular_vel); tf::vectorMsgToEigen(imu_msg.linear_acceleration, linear_acc); sum_angular_vel += angular_vel; sum_linear_acc += linear_acc; } state_server.imu_state.gyro_bias = sum_angular_vel / imu_msg_buffer.size(); //IMUState::gravity = // -sum_linear_acc / imu_msg_buffer.size(); // This is the gravity in the IMU frame. Vector3d gravity_imu = sum_linear_acc / imu_msg_buffer.size(); // Initialize the initial orientation, so that the estimation // is consistent with the inertial frame. double gravity_norm = gravity_imu.norm(); IMUState::gravity = Vector3d(0.0, 0.0, -gravity_norm); Quaterniond q0_i_w = Quaterniond::FromTwoVectors( gravity_imu, -IMUState::gravity); state_server.imu_state.orientation = rotationToQuaternion(q0_i_w.toRotationMatrix().transpose()); // printf("gravity Norm %f\n", gravity_norm); // printf("linear_acc %f, %f, %f\n", gravity_imu(0), gravity_imu(1), gravity_imu(2)); // printf("quaterniond: %f, %f, %f, %f\n", q0_i_w.w(), q0_i_w.x(), q0_i_w.y(), q0_i_w.z()); return; } bool MsckfVio::resetCallback( std_srvs::Trigger::Request& req, std_srvs::Trigger::Response& res) { cout << "reset" << endl; ROS_WARN("Start resetting msckf vio..."); // Temporarily shutdown the subscribers to prevent the // state from updating. imu_sub.shutdown(); // Reset the IMU state. IMUState& imu_state = state_server.imu_state; imu_state.time = 0.0; imu_state.orientation = Vector4d(0.0, 0.0, 0.0, 1.0); imu_state.position = Vector3d::Zero(); imu_state.velocity = Vector3d::Zero(); imu_state.gyro_bias = Vector3d::Zero(); imu_state.acc_bias = Vector3d::Zero(); imu_state.orientation_null = Vector4d(0.0, 0.0, 0.0, 1.0); imu_state.position_null = Vector3d::Zero(); imu_state.velocity_null = Vector3d::Zero(); // Remove all existing camera states. state_server.cam_states.clear(); // Reset the state covariance. double gyro_bias_cov, acc_bias_cov, velocity_cov; nh.param("initial_covariance/velocity", velocity_cov, 0.25); nh.param("initial_covariance/gyro_bias", gyro_bias_cov, 1e-4); nh.param("initial_covariance/acc_bias", acc_bias_cov, 1e-2); double extrinsic_rotation_cov, extrinsic_translation_cov; nh.param("initial_covariance/extrinsic_rotation_cov", extrinsic_rotation_cov, 3.0462e-4); nh.param("initial_covariance/extrinsic_translation_cov", extrinsic_translation_cov, 1e-4); // Reset the irradiance covariance nh.param("initial_covariance/irradiance_frame_bias", irradiance_frame_bias, 0.1); state_server.state_cov = MatrixXd::Zero(21, 21); for (int i = 3; i < 6; ++i) state_server.state_cov(i, i) = gyro_bias_cov; for (int i = 6; i < 9; ++i) state_server.state_cov(i, i) = velocity_cov; for (int i = 9; i < 12; ++i) state_server.state_cov(i, i) = acc_bias_cov; for (int i = 15; i < 18; ++i) state_server.state_cov(i, i) = extrinsic_rotation_cov; for (int i = 18; i < 21; ++i) state_server.state_cov(i, i) = extrinsic_translation_cov; // Clear all exsiting features in the map. map_server.clear(); // Clear the IMU msg buffer. imu_msg_buffer.clear(); // Reset the starting flags. is_gravity_set = false; is_first_img = true; // Restart the subscribers. imu_sub = nh.subscribe("imu", 100, &MsckfVio::imuCallback, this); truth_sub = nh.subscribe("ground_truth", 100, &MsckfVio::truthCallback, this); // feature_sub = nh.subscribe("features", 40, // &MsckfVio::featureCallback, this); // TODO: When can the reset fail? res.success = true; ROS_WARN("Resetting msckf vio completed..."); return true; } void MsckfVio::mocapOdomCallback(const nav_msgs::OdometryConstPtr& msg) { static bool first_mocap_odom_msg = true; // If this is the first mocap odometry messsage, set // the initial frame. if (first_mocap_odom_msg) { Quaterniond orientation; Vector3d translation; tf::pointMsgToEigen( msg->pose.pose.position, translation); tf::quaternionMsgToEigen( msg->pose.pose.orientation, orientation); //tf::vectorMsgToEigen( // msg->transform.translation, translation); //tf::quaternionMsgToEigen( // msg->transform.rotation, orientation); mocap_initial_frame.linear() = orientation.toRotationMatrix(); mocap_initial_frame.translation() = translation; first_mocap_odom_msg = false; } // Transform the ground truth. Quaterniond orientation; Vector3d translation; //tf::vectorMsgToEigen( // msg->transform.translation, translation); //tf::quaternionMsgToEigen( // msg->transform.rotation, orientation); tf::pointMsgToEigen( msg->pose.pose.position, translation); tf::quaternionMsgToEigen( msg->pose.pose.orientation, orientation); Eigen::Isometry3d T_b_v_gt; T_b_v_gt.linear() = orientation.toRotationMatrix(); T_b_v_gt.translation() = translation; Eigen::Isometry3d T_b_w_gt = mocap_initial_frame.inverse() * T_b_v_gt; //Eigen::Vector3d body_velocity_gt; //tf::vectorMsgToEigen(msg->twist.twist.linear, body_velocity_gt); //body_velocity_gt = mocap_initial_frame.linear().transpose() * // body_velocity_gt; // Ground truth tf. if (publish_tf) { tf::Transform T_b_w_gt_tf; tf::transformEigenToTF(T_b_w_gt, T_b_w_gt_tf); tf_pub.sendTransform(tf::StampedTransform( T_b_w_gt_tf, msg->header.stamp, fixed_frame_id, child_frame_id+"_mocap")); } // Ground truth odometry. nav_msgs::Odometry mocap_odom_msg; mocap_odom_msg.header.stamp = msg->header.stamp; mocap_odom_msg.header.frame_id = fixed_frame_id; mocap_odom_msg.child_frame_id = child_frame_id+"_mocap"; tf::poseEigenToMsg(T_b_w_gt, mocap_odom_msg.pose.pose); //tf::vectorEigenToMsg(body_velocity_gt, // mocap_odom_msg.twist.twist.linear); mocap_odom_pub.publish(mocap_odom_msg); return; } void MsckfVio::calcErrorState() { // imu error err_state_server.imu_state.id = state_server.imu_state.id; err_state_server.imu_state.time = state_server.imu_state.time; err_state_server.imu_state.orientation = quaternionMultiplication(true_state_server.imu_state.orientation, quaternionConjugate(state_server.imu_state.orientation)); // convert to small angle approximation err_state_server.imu_state.orientation *= 2; err_state_server.imu_state.orientation(3) = 0; err_state_server.imu_state.position = true_state_server.imu_state.position - state_server.imu_state.position; err_state_server.imu_state.velocity = true_state_server.imu_state.velocity - state_server.imu_state.velocity; err_state_server.imu_state.gyro_bias = true_state_server.imu_state.gyro_bias - true_state_server.imu_state.gyro_bias; err_state_server.imu_state.gyro_bias = true_state_server.imu_state.acc_bias - true_state_server.imu_state.acc_bias; err_state_server.imu_state.R_imu_cam0 = true_state_server.imu_state.R_imu_cam0 - true_state_server.imu_state.R_imu_cam0; err_state_server.imu_state.t_cam0_imu = true_state_server.imu_state.t_cam0_imu - true_state_server.imu_state.t_cam0_imu; err_state_server.imu_state.orientation_null = true_state_server.imu_state.orientation_null - true_state_server.imu_state.orientation_null; err_state_server.imu_state.position_null = true_state_server.imu_state.position_null - true_state_server.imu_state.position_null; err_state_server.imu_state.velocity_null = true_state_server.imu_state.velocity_null - true_state_server.imu_state.velocity_null; auto cam_state_iter = state_server.cam_states.begin(); auto true_cam_state_iter = true_state_server.cam_states.begin(); auto err_cam_state_iter = err_state_server.cam_states.begin(); for (int i = 0; i < state_server.cam_states.size(); ++i, ++cam_state_iter, ++err_cam_state_iter, ++true_cam_state_iter) { // calculate error in camera rotation Eigen::Vector4d q = cam_state_iter->second.orientation; Eigen::Vector4d p = quaternionConjugate(true_cam_state_iter->second.orientation); err_cam_state_iter->second.orientation = quaternionMultiplication(p, q); // small angle approximation err_cam_state_iter->second.orientation *= 2; err_cam_state_iter->second.orientation(3) = 0; // calculate error of state position err_cam_state_iter->second.position = true_cam_state_iter->second.position - cam_state_iter->second.position; } } void MsckfVio::batchTruthProcessing(const double& time_bound) { // Counter how many IMU msgs in the buffer are used. int used_truth_msg_cntr = 0; for (const auto& truth_msg : truth_msg_buffer) { double truth_time = truth_msg.header.stamp.toSec(); if (truth_time < true_state_server.imu_state.time) { ++used_truth_msg_cntr; continue; } if (truth_time > time_bound) break; // Convert the msgs. Eigen::Vector3d m_translation; Eigen::Vector4d m_rotation; tf::vectorMsgToEigen(truth_msg.transform.translation, m_translation); m_rotation[0] = truth_msg.transform.rotation.x; m_rotation[1] = truth_msg.transform.rotation.y; m_rotation[2] = truth_msg.transform.rotation.z; m_rotation[3] = truth_msg.transform.rotation.w; quaternionNormalize(m_rotation); // Execute process model. processTruthtoIMU(truth_time, m_rotation, m_translation); ++used_truth_msg_cntr; } last_time_bound = time_bound; // Set the state ID for the new IMU state. true_state_server.imu_state.id = IMUState::next_id; err_state_server.imu_state.id = IMUState::next_id; // Remove all used Truth msgs. truth_msg_buffer.erase(truth_msg_buffer.begin(), truth_msg_buffer.begin()+used_truth_msg_cntr); } void MsckfVio::processTruthtoIMU(const double& time, const Vector4d& m_rot, const Vector3d& m_trans){ IMUState& imu_state = true_state_server.imu_state; double dtime = time - imu_state.time; Vector4d& q = imu_state.orientation; Vector3d& v = imu_state.velocity; Vector3d& p = imu_state.position; double dt = time - imu_state.time; v = (m_trans-v)/dt; p = m_trans; q = m_rot; // Update the state correspondes to null space. imu_state.orientation_null = imu_state.orientation; imu_state.position_null = imu_state.position; imu_state.velocity_null = imu_state.velocity; // Update the state info true_state_server.imu_state.time = time; } void MsckfVio::batchImuProcessing(const double& time_bound) { // Counter how many IMU msgs in the buffer are used. int used_imu_msg_cntr = 0; for (const auto& imu_msg : imu_msg_buffer) { double imu_time = imu_msg.header.stamp.toSec(); if (imu_time < state_server.imu_state.time) { ++used_imu_msg_cntr; continue; } if (imu_time > time_bound) break; // Convert the msgs. Vector3d m_gyro, m_acc; tf::vectorMsgToEigen(imu_msg.angular_velocity, m_gyro); tf::vectorMsgToEigen(imu_msg.linear_acceleration, m_acc); // Execute process model. processModel(imu_time, m_gyro, m_acc); ++used_imu_msg_cntr; } // Set the state ID for the new IMU state. state_server.imu_state.id = IMUState::next_id++; // Remove all used IMU msgs. imu_msg_buffer.erase(imu_msg_buffer.begin(), imu_msg_buffer.begin()+used_imu_msg_cntr); return; } void MsckfVio::processModel(const double& time, const Vector3d& m_gyro, const Vector3d& m_acc) { // Remove the bias from the measured gyro and acceleration IMUState& imu_state = state_server.imu_state; Vector3d gyro = m_gyro - imu_state.gyro_bias; Vector3d acc = m_acc - imu_state.acc_bias; double dtime = time - imu_state.time; // Compute discrete transition and noise covariance matrix Matrix F = Matrix::Zero(); Matrix G = Matrix::Zero(); F.block<3, 3>(0, 0) = -skewSymmetric(gyro); F.block<3, 3>(0, 3) = -Matrix3d::Identity(); F.block<3, 3>(6, 0) = -quaternionToRotation( imu_state.orientation).transpose()*skewSymmetric(acc); F.block<3, 3>(6, 9) = -quaternionToRotation( imu_state.orientation).transpose(); F.block<3, 3>(12, 6) = Matrix3d::Identity(); G.block<3, 3>(0, 0) = -Matrix3d::Identity(); G.block<3, 3>(3, 3) = Matrix3d::Identity(); G.block<3, 3>(6, 6) = -quaternionToRotation( imu_state.orientation).transpose(); G.block<3, 3>(9, 9) = Matrix3d::Identity(); // Approximate matrix exponential to the 3rd order, // which can be considered to be accurate enough assuming // dtime is within 0.01s. Matrix Fdt = F * dtime; Matrix Fdt_square = Fdt * Fdt; Matrix Fdt_cube = Fdt_square * Fdt; Matrix Phi = Matrix::Identity() + Fdt + 0.5*Fdt_square + (1.0/6.0)*Fdt_cube; // Propogate the state using 4th order Runge-Kutta predictNewState(dtime, gyro, acc); // Propogate the state covariance matrix. Matrix Q = Phi*G*state_server.continuous_noise_cov* G.transpose()*Phi.transpose()*dtime; state_server.state_cov.block<21, 21>(0, 0) = Phi*state_server.state_cov.block<21, 21>(0, 0)*Phi.transpose() + Q; if (state_server.cam_states.size() > 0) { state_server.state_cov.block( 0, 21, 21, state_server.state_cov.cols()-21) = Phi * state_server.state_cov.block( 0, 21, 21, state_server.state_cov.cols()-21); state_server.state_cov.block( 21, 0, state_server.state_cov.rows()-21, 21) = state_server.state_cov.block( 21, 0, state_server.state_cov.rows()-21, 21) * Phi.transpose(); } MatrixXd state_cov_fixed = (state_server.state_cov + state_server.state_cov.transpose()) / 2.0; state_server.state_cov = state_cov_fixed; // Update the state correspondes to null space. imu_state.orientation_null = imu_state.orientation; imu_state.position_null = imu_state.position; imu_state.velocity_null = imu_state.velocity; // Update the state info state_server.imu_state.time = time; return; } void MsckfVio::predictNewState(const double& dt, const Vector3d& gyro, const Vector3d& acc) { // TODO: Will performing the forward integration using // the inverse of the quaternion give better accuracy? double gyro_norm = gyro.norm(); Matrix4d Omega = Matrix4d::Zero(); Omega.block<3, 3>(0, 0) = -skewSymmetric(gyro); Omega.block<3, 1>(0, 3) = gyro; Omega.block<1, 3>(3, 0) = -gyro; Vector4d& q = state_server.imu_state.orientation; Vector3d& v = state_server.imu_state.velocity; Vector3d& p = state_server.imu_state.position; // Some pre-calculation Vector4d dq_dt, dq_dt2; if (gyro_norm > 1e-5) { dq_dt = (cos(gyro_norm*dt*0.5)*Matrix4d::Identity() + 1/gyro_norm*sin(gyro_norm*dt*0.5)*Omega) * q; dq_dt2 = (cos(gyro_norm*dt*0.25)*Matrix4d::Identity() + 1/gyro_norm*sin(gyro_norm*dt*0.25)*Omega) * q; } else { dq_dt = (Matrix4d::Identity()+0.5*dt*Omega) * cos(gyro_norm*dt*0.5) * q; dq_dt2 = (Matrix4d::Identity()+0.25*dt*Omega) * cos(gyro_norm*dt*0.25) * q; } Matrix3d dR_dt_transpose = quaternionToRotation(dq_dt).transpose(); Matrix3d dR_dt2_transpose = quaternionToRotation(dq_dt2).transpose(); // k1 = f(tn, yn) Vector3d k1_v_dot = quaternionToRotation(q).transpose()*acc + IMUState::gravity; Vector3d k1_p_dot = v; // k2 = f(tn+dt/2, yn+k1*dt/2) Vector3d k1_v = v + k1_v_dot*dt/2; Vector3d k2_v_dot = dR_dt2_transpose*acc + IMUState::gravity; Vector3d k2_p_dot = k1_v; // k3 = f(tn+dt/2, yn+k2*dt/2) Vector3d k2_v = v + k2_v_dot*dt/2; Vector3d k3_v_dot = dR_dt2_transpose*acc + IMUState::gravity; Vector3d k3_p_dot = k2_v; // k4 = f(tn+dt, yn+k3*dt) Vector3d k3_v = v + k3_v_dot*dt; Vector3d k4_v_dot = dR_dt_transpose*acc + IMUState::gravity; Vector3d k4_p_dot = k3_v; // yn+1 = yn + dt/6*(k1+2*k2+2*k3+k4) q = dq_dt; quaternionNormalize(q); v = v + dt/6*(k1_v_dot+2*k2_v_dot+2*k3_v_dot+k4_v_dot); p = p + dt/6*(k1_p_dot+2*k2_p_dot+2*k3_p_dot+k4_p_dot); return; } void MsckfVio::stateAugmentation(const double& time) { const Matrix3d& R_i_c = state_server.imu_state.R_imu_cam0; const Vector3d& t_c_i = state_server.imu_state.t_cam0_imu; // Add a new camera state to the state server. Matrix3d R_w_i = quaternionToRotation( state_server.imu_state.orientation); Matrix3d R_w_c = R_i_c * R_w_i; Vector3d t_c_w = state_server.imu_state.position + R_w_i.transpose()*t_c_i; state_server.cam_states[state_server.imu_state.id] = CAMState(state_server.imu_state.id); CAMState& cam_state = state_server.cam_states[ state_server.imu_state.id]; cam_state.time = time; cam_state.orientation = rotationToQuaternion(R_w_c); cam_state.position = t_c_w; cam_state.orientation_null = cam_state.orientation; cam_state.position_null = cam_state.position; // Update the covariance matrix of the state. // To simplify computation, the matrix J below is the nontrivial block // in Equation (16) in "A Multi-State Constraint Kalman Filter for Vision // -aided Inertial Navigation". Matrix J = Matrix::Zero(); J.block<3, 3>(0, 0) = R_i_c; J.block<3, 3>(0, 15) = Matrix3d::Identity(); J.block<3, 3>(3, 0) = skewSymmetric(R_w_i.transpose()*t_c_i); //J.block<3, 3>(3, 0) = -R_w_i.transpose()*skewSymmetric(t_c_i); J.block<3, 3>(3, 12) = Matrix3d::Identity(); J.block<3, 3>(3, 18) = Matrix3d::Identity(); // Resize the state covariance matrix. size_t old_rows = state_server.state_cov.rows(); size_t old_cols = state_server.state_cov.cols(); state_server.state_cov.conservativeResize(old_rows+6, old_cols+6); // Rename some matrix blocks for convenience. const Matrix& P11 = state_server.state_cov.block<21, 21>(0, 0); const MatrixXd& P12 = state_server.state_cov.block(0, 21, 21, old_cols-21); // Fill in the augmented state covariance. state_server.state_cov.block(old_rows, 0, 6, old_cols) << J*P11, J*P12; state_server.state_cov.block(0, old_cols, old_rows, 6) = state_server.state_cov.block(old_rows, 0, 6, old_cols).transpose(); state_server.state_cov.block<6, 6>(old_rows, old_cols) = J * P11 * J.transpose(); // Fix the covariance to be symmetric MatrixXd state_cov_fixed = (state_server.state_cov + state_server.state_cov.transpose()) / 2.0; state_server.state_cov = state_cov_fixed; return; } void MsckfVio::truePhotometricStateAugmentation(const double& time) { const Matrix3d& true_R_i_c = true_state_server.imu_state.R_imu_cam0; const Vector3d& true_t_c_i = true_state_server.imu_state.t_cam0_imu; // Add a new camera state to the state server. Matrix3d true_R_w_i = quaternionToRotation( true_state_server.imu_state.orientation); Matrix3d true_R_w_c = true_R_i_c * true_R_w_i; Vector3d true_t_c_w = true_state_server.imu_state.position + true_R_w_i.transpose()*true_t_c_i; true_state_server.cam_states[true_state_server.imu_state.id] = CAMState(true_state_server.imu_state.id); CAMState& true_cam_state = true_state_server.cam_states[ true_state_server.imu_state.id]; // manage error state size err_state_server.cam_states[err_state_server.imu_state.id] = CAMState(err_state_server.imu_state.id); CAMState& err_cam_state = err_state_server.cam_states[ err_state_server.imu_state.id]; err_cam_state.time = time; true_cam_state.time = time; true_cam_state.orientation = rotationToQuaternion(true_R_w_c); true_cam_state.position = true_t_c_w; true_cam_state.orientation_null = true_cam_state.orientation; true_cam_state.position_null = true_cam_state.position; return; } void MsckfVio::PhotometricStateAugmentation(const double& time) { const Matrix3d& R_i_c = state_server.imu_state.R_imu_cam0; const Vector3d& t_c_i = state_server.imu_state.t_cam0_imu; // Add a new camera state to the state server. Matrix3d R_w_i = quaternionToRotation( state_server.imu_state.orientation); Matrix3d R_w_c = R_i_c * R_w_i; Vector3d t_c_w = state_server.imu_state.position + R_w_i.transpose()*t_c_i; state_server.cam_states[state_server.imu_state.id] = CAMState(state_server.imu_state.id); CAMState& cam_state = state_server.cam_states[ state_server.imu_state.id]; cam_state.time = time; cam_state.orientation = rotationToQuaternion(R_w_c); cam_state.position = t_c_w; cam_state.orientation_null = cam_state.orientation; cam_state.position_null = cam_state.position; // Update the covariance matrix of the state. // To simplify computation, the matrix J below is the nontrivial block // in Equation (16) in "A Multi-State Constraint Kalman Filter for Vision // -aided Inertial Navigation". Matrix J = Matrix::Zero(); J.block<3, 3>(0, 0) = R_i_c; J.block<3, 3>(0, 15) = Matrix3d::Identity(); J.block<3, 3>(3, 0) = skewSymmetric(R_w_i.transpose()*t_c_i); //J.block<3, 3>(3, 0) = -R_w_i.transpose()*skewSymmetric(t_c_i); J.block<3, 3>(3, 12) = Matrix3d::Identity(); J.block<3, 3>(3, 18) = Matrix3d::Identity(); // Resize the state covariance matrix. size_t old_rows = state_server.state_cov.rows(); size_t old_cols = state_server.state_cov.cols(); // add 7 for camera state + irradiance bias eta = b_l state_server.state_cov.conservativeResizeLike(Eigen::MatrixXd::Zero(old_rows+7, old_cols+7)); // Rename some matrix blocks for convenience. const Matrix& P11 = state_server.state_cov.block<21, 21>(0, 0); const MatrixXd& P12 = state_server.state_cov.block(0, 21, 21, old_cols-21); // Fill in the augmented state covariance. state_server.state_cov.block(old_rows, 0, 6, old_cols) << J*P11, J*P12; state_server.state_cov.block(0, old_cols, old_rows, 6) = state_server.state_cov.block(old_rows, 0, 6, old_cols).transpose(); state_server.state_cov.block<6, 6>(old_rows, old_cols) = J * P11 * J.transpose(); // Add photometry P_eta and surrounding Zeros state_server.state_cov(old_rows+6, old_cols+6) = irradiance_frame_bias; // Fix the covariance to be symmetric MatrixXd state_cov_fixed = (state_server.state_cov + state_server.state_cov.transpose()) / 2.0; state_server.state_cov = state_cov_fixed; return; } void MsckfVio::addFeatureObservations( const CameraMeasurementConstPtr& msg) { StateIDType state_id = state_server.imu_state.id; int curr_feature_num = map_server.size(); int tracked_feature_num = 0; // Add new observations for existing features or new // features in the map server. for (const auto& feature : msg->features) { if (map_server.find(feature.id) == map_server.end()) { // This is a new feature. map_server[feature.id] = Feature(feature.id); map_server[feature.id].observations[state_id] = Vector4d(feature.u0, feature.v0, feature.u1, feature.v1); } else { // This is an old feature. map_server[feature.id].observations[state_id] = Vector4d(feature.u0, feature.v0, feature.u1, feature.v1); ++tracked_feature_num; } } tracking_rate = static_cast(tracked_feature_num) / static_cast(curr_feature_num); return; } void MsckfVio::PhotometricMeasurementJacobian( const StateIDType& cam_state_id, const FeatureIDType& feature_id, MatrixXd& H_x, MatrixXd& H_y, VectorXd& r) { // Prepare all the required data. const CAMState& cam_state = state_server.cam_states[cam_state_id]; const Feature& feature = map_server[feature_id]; const StateIDType anchor_state_id = feature.observations.begin()->first; const CAMState anchor_state = state_server.cam_states[anchor_state_id]; // Cam0 pose. Matrix3d R_w_c0 = quaternionToRotation(cam_state.orientation); const Vector3d& t_c0_w = cam_state.position; // Cam1 pose. Matrix3d R_c0_c1 = CAMState::T_cam0_cam1.linear(); Matrix3d R_w_c1 = CAMState::T_cam0_cam1.linear() * R_w_c0; Vector3d t_c1_w = t_c0_w - R_w_c1.transpose()*CAMState::T_cam0_cam1.translation(); //photometric observation std::vector photo_z; std::vector photo_r; // individual Jacobians Matrix dI_dhj = Matrix::Zero(); Matrix dh_dCpij = Matrix::Zero(); Matrix dh_dGpij = Matrix::Zero(); Matrix dh_dXplj = Matrix::Zero(); Matrix dGpj_drhoj = Matrix::Zero(); Matrix dGpj_XpAj = Matrix::Zero(); Matrix dCpij_dGpij = Matrix::Zero(); Matrix dCpij_dCGtheta = Matrix::Zero(); Matrix dCpij_dGpC = Matrix::Zero(); // one line of the NxN Jacobians Eigen::Matrix H_rhoj; Eigen::Matrix H_plj; Eigen::Matrix H_pAj; // combined Jacobians Eigen::MatrixXd H_rho(N*N, 1); Eigen::MatrixXd H_pl(N*N, 6); Eigen::MatrixXd H_pA(N*N, 6); auto frame = cam0.moving_window.find(cam_state_id)->second.image; auto anchor_frame = cam0.moving_window.find(anchor_state_id)->second.image; //observation const Vector4d& z = feature.observations.find(cam_state_id)->second; //estimate photometric measurement std::vector estimate_irradiance; std::vector estimate_photo_z; IlluminationParameter estimated_illumination; feature.estimate_FrameIrradiance(cam_state, cam_state_id, cam0, estimate_irradiance, estimated_illumination); // calculated here, because we need true 'estimate_irradiance' later for jacobi for (auto& estimate_irradiance_j : estimate_irradiance) estimate_photo_z.push_back (estimate_irradiance_j * estimated_illumination.frame_gain * estimated_illumination.feature_gain + estimated_illumination.frame_bias + estimated_illumination.feature_bias); int count = 0; double dx, dy; // gradient visualization parameters cv::Point2f gradientVector(0,0); // residual change visualization cv::Point2f residualVector(0,0); double res_sum = 0; for (auto point : feature.anchorPatch_3d) { //cout << "____feature-measurement_____\n" << endl; Eigen::Vector3d p_c0 = R_w_c0 * (point-t_c0_w); cv::Point2f p_in_c0 = feature.projectPositionToCamera(cam_state, cam_state_id, cam0, point); cv::Point2f p_in_anchor = feature.projectPositionToCamera(anchor_state, anchor_state_id, cam0, point); //add observation photo_z.push_back(feature.PixelIrradiance(p_in_c0, frame)); //calculate photom. residual photo_r.push_back((photo_z[count] - estimate_photo_z[count])); //cout << "residual: " << photo_r.back() << endl; // add jacobians cv::Point2f pixelDistance = feature.pixelDistanceAt(anchor_state, anchor_state_id, cam0, point); // calculate derivation for anchor frame, use position for derivation calculation // frame derivative calculated convoluting with kernel [-1, 0, 1] dx = feature.Kernel(p_in_anchor, anchor_frame, "Sobel_x"); dy = feature.Kernel(p_in_anchor, anchor_frame, "Sobel_y"); // dx = feature.PixelIrradiance(cv::Point2f(p_in_anchor.x+1, p_in_anchor.y), anchor_frame) - feature.PixelIrradiance(cv::Point2f(p_in_anchor.x-1, p_in_anchor.y), anchor_frame); // dy = feature.PixelIrradiance(cv::Point2f(p_in_anchor.x, p_in_anchor.y+1), anchor_frame) - feature.PixelIrradiance(cv::Point2f(p_in_anchor.x, p_in_anchor.y-1), anchor_frame); dI_dhj(0, 0) = dx/(pixelDistance.x); dI_dhj(0, 1) = dy/(pixelDistance.y); gradientVector.x += dx; gradientVector.y += dy; residualVector.x += dx * photo_r[count]; residualVector.y += dy * photo_r[count]; res_sum += photo_r[count]; //dh / d{}^Cp_{ij} dh_dCpij(0, 0) = 1 / p_c0(2); dh_dCpij(1, 1) = 1 / p_c0(2); dh_dCpij(0, 2) = -(p_c0(0))/(p_c0(2)*p_c0(2)); dh_dCpij(1, 2) = -(p_c0(1))/(p_c0(2)*p_c0(2)); dCpij_dGpij = quaternionToRotation(cam_state.orientation); //orientation takes camera frame to world frame, we wa dh_dGpij = dh_dCpij * dCpij_dGpij; //dh / d X_{pl} dCpij_dCGtheta = skewSymmetric(p_c0); dCpij_dGpC = -quaternionToRotation(cam_state.orientation); dh_dXplj.block<2, 3>(0, 0) = dh_dCpij * dCpij_dCGtheta; dh_dXplj.block<2, 3>(0, 3) = dh_dCpij * dCpij_dGpC; // d{}^Gp_P{ij} / \rho_i double rho = feature.anchor_rho; // Isometry T_anchor_w takes a vector in anchor frame to world frame dGpj_drhoj = -feature.T_anchor_w.linear() * Eigen::Vector3d(feature.anchorPatch_ideal[count].x/(rho*rho), feature.anchorPatch_ideal[count].y/(rho*rho), 1/(rho*rho)); dGpj_XpAj.block<3, 3>(0, 0) = - feature.T_anchor_w.linear() * skewSymmetric(Eigen::Vector3d(feature.anchorPatch_ideal[count].x/(rho), feature.anchorPatch_ideal[count].y/(rho), 1/(rho))); dGpj_XpAj.block<3, 3>(0, 3) = Matrix::Identity(); // Intermediate Jakobians H_rhoj = dI_dhj * dh_dGpij * dGpj_drhoj; // 1 x 1 H_plj = dI_dhj * dh_dXplj; // 1 x 6 H_pAj = dI_dhj * dh_dGpij * dGpj_XpAj; // 1 x 6 H_rho.block<1, 1>(count, 0) = H_rhoj; H_pl.block<1, 6>(count, 0) = H_plj; H_pA.block<1, 6>(count, 0) = H_pAj; //cout << "H_pl\n" << H_plj << endl; //cout << "H_pA\n" << H_pAj << endl; //cout << "H_rho\n" << H_rhoj << endl; count++; } MatrixXd H_xl = MatrixXd::Zero(N*N, 21+state_server.cam_states.size()*7); MatrixXd H_yl = MatrixXd::Zero(N*N, N*N+state_server.cam_states.size()+1); // set anchor Jakobi // get position of anchor in cam states auto cam_state_anchor = state_server.cam_states.find(feature.observations.begin()->first); int cam_state_cntr_anchor = std::distance(state_server.cam_states.begin(), cam_state_anchor); H_xl.block(0, 21+cam_state_cntr_anchor*7, N*N, 6) = -H_pA; // set frame Jakobi //get position of current frame in cam states auto cam_state_iter = state_server.cam_states.find(cam_state_id); int cam_state_cntr = std::distance(state_server.cam_states.begin(), cam_state_iter); // set jakobi of state H_xl.block(0, 21+cam_state_cntr*7, N*N, 6) = -H_pl; // set ones for irradiance bias H_xl.block(0, 21+cam_state_cntr*7+6, N*N, 1) = Eigen::ArrayXd::Ones(N*N); // set irradiance error Block H_yl.block(0, 0,N*N, N*N) = estimated_illumination.feature_gain * estimated_illumination.frame_gain * Eigen::MatrixXd::Identity(N*N, N*N); // TODO make this calculation more fluent for(int i = 0; i< N*N; i++) H_yl(i, N*N+cam_state_cntr) = estimate_irradiance[i]; H_yl.block(0, N*N+state_server.cam_states.size(), N*N, 1) = -H_rho; H_x = H_xl; H_y = H_yl; //TODO make this more fluent as well count = 0; for(auto data : photo_r) r[count++] = data; std::stringstream ss; ss << "INFO:" << " anchor: " << cam_state_cntr_anchor << " frame: " << cam_state_cntr; if(PRINTIMAGES) { feature.MarkerGeneration(marker_pub, state_server.cam_states); feature.VisualizePatch(cam_state, cam_state_id, cam0, photo_r, ss, gradientVector, residualVector); } return; } void MsckfVio::PhotometricFeatureJacobian( const FeatureIDType& feature_id, const std::vector& cam_state_ids, MatrixXd& H_x, VectorXd& r) { // stop playing bagfile if printing images if(PRINTIMAGES) { std::cout << "stopped playpack" << std::endl; nh.setParam("/play_bag", false); } // Errstate calcErrorState(); /* std::cout << "--- error state: ---\n " << std::endl; std::cout << "imu orientation:\n " << err_state_server.imu_state.orientation << std::endl; std::cout << "imu position\n" << err_state_server.imu_state.position << std::endl; int count = 0; for(auto cam_state : err_state_server.cam_states) { std::cout << " - cam " << count++ << " - \n" << std::endl; std::cout << "orientation: " << cam_state.second.orientation(0) << cam_state.second.orientation(1) << " " << cam_state.second.orientation(2) << " " << std::endl; std::cout << "position:" << cam_state.second.position(0) << " " << cam_state.second.position(1) << " " << cam_state.second.position(2) << std::endl; } */ const auto& feature = map_server[feature_id]; // Check how many camera states in the provided camera // id camera has actually seen this feature. vector valid_cam_state_ids(0); for (const auto& cam_id : cam_state_ids) { if (feature.observations.find(cam_id) == feature.observations.end()) continue; valid_cam_state_ids.push_back(cam_id); } int jacobian_row_size = 0; jacobian_row_size = N * N * valid_cam_state_ids.size(); MatrixXd H_xi = MatrixXd::Zero(jacobian_row_size, 21+state_server.cam_states.size()*7); MatrixXd H_yi = MatrixXd::Zero(jacobian_row_size, N*N+state_server.cam_states.size()+1); VectorXd r_i = VectorXd::Zero(jacobian_row_size); int stack_cntr = 0; for (const auto& cam_id : valid_cam_state_ids) { MatrixXd H_xl; MatrixXd H_yl; Eigen::VectorXd r_l = VectorXd::Zero(N*N); PhotometricMeasurementJacobian(cam_id, feature.id, H_xl, H_yl, r_l); auto cam_state_iter = state_server.cam_states.find(cam_id); int cam_state_cntr = std::distance( state_server.cam_states.begin(), cam_state_iter); // Stack the Jacobians. H_xi.block(stack_cntr, 0, H_xl.rows(), H_xl.cols()) = H_xl; H_yi.block(stack_cntr, 0, H_yl.rows(), H_yl.cols()) = H_yl; r_i.segment(stack_cntr, N*N) = r_l; stack_cntr += N*N; } // Project the residual and Jacobians onto the nullspace // of H_yj. // get Nullspace FullPivLU lu(H_yi.transpose()); MatrixXd A_null_space = lu.kernel(); /* JacobiSVD svd_helper(H_yi, ComputeFullU | ComputeThinV); int sv_size = 0; Eigen::VectorXd singularValues = svd_helper.singularValues(); for(int i = 0; i < singularValues.size(); i++) if(singularValues[i] > 1e-12) sv_size++; MatrixXd A = svd_helper.matrixU().rightCols(jacobian_row_size - singularValues.size()); */ H_x = A_null_space.transpose() * H_xi; r = A_null_space.transpose() * r_i; if(PRINTIMAGES) { ofstream myfile; myfile.open ("/home/raphael/dev/MSCKF_ws/log.txt"); myfile << "Hx\n" << H_x << "r\n" << r << "from residual estimated error state: " << H_x.colPivHouseholderQr().solve(r) << endl; myfile.close(); cout << "---------- LOGGED -------- " << endl; } if(PRINTIMAGES) { std::cout << "resume playback" << std::endl; nh.setParam("/play_bag", true); } return; } void MsckfVio::measurementJacobian( const StateIDType& cam_state_id, const FeatureIDType& feature_id, Matrix& H_x, Matrix& H_f, Vector4d& r) { // Prepare all the required data. const CAMState& cam_state = state_server.cam_states[cam_state_id]; const Feature& feature = map_server[feature_id]; // Cam0 pose. Matrix3d R_w_c0 = quaternionToRotation(cam_state.orientation); const Vector3d& t_c0_w = cam_state.position; // Cam1 pose. Matrix3d R_c0_c1 = CAMState::T_cam0_cam1.linear(); Matrix3d R_w_c1 = CAMState::T_cam0_cam1.linear() * R_w_c0; Vector3d t_c1_w = t_c0_w - R_w_c1.transpose()*CAMState::T_cam0_cam1.translation(); // 3d feature position in the world frame. // And its observation with the stereo cameras. const Vector3d& p_w = feature.position; const Vector4d& z = feature.observations.find(cam_state_id)->second; // Convert the feature position from the world frame to // the cam0 and cam1 frame. Vector3d p_c0 = R_w_c0 * (p_w-t_c0_w); Vector3d p_c1 = R_w_c1 * (p_w-t_c1_w); // Compute the Jacobians. Matrix dz_dpc0 = Matrix::Zero(); dz_dpc0(0, 0) = 1 / p_c0(2); dz_dpc0(1, 1) = 1 / p_c0(2); dz_dpc0(0, 2) = -p_c0(0) / (p_c0(2)*p_c0(2)); dz_dpc0(1, 2) = -p_c0(1) / (p_c0(2)*p_c0(2)); Matrix dz_dpc1 = Matrix::Zero(); dz_dpc1(2, 0) = 1 / p_c1(2); dz_dpc1(3, 1) = 1 / p_c1(2); dz_dpc1(2, 2) = -p_c1(0) / (p_c1(2)*p_c1(2)); dz_dpc1(3, 2) = -p_c1(1) / (p_c1(2)*p_c1(2)); Matrix dpc0_dxc = Matrix::Zero(); // original jacobi dpc0_dxc.leftCols(3) = skewSymmetric(p_c0); dpc0_dxc.rightCols(3) = -R_w_c0; Matrix dpc1_dxc = Matrix::Zero(); dpc1_dxc.leftCols(3) = R_c0_c1 * skewSymmetric(p_c0); dpc1_dxc.rightCols(3) = -R_w_c1; Matrix3d dpc0_dpg = R_w_c0; Matrix3d dpc1_dpg = R_w_c1; H_x = dz_dpc0*dpc0_dxc + dz_dpc1*dpc1_dxc; H_f = dz_dpc0*dpc0_dpg + dz_dpc1*dpc1_dpg; // Compute the residual. r = z - Vector4d(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2), p_c1(0)/p_c1(2), p_c1(1)/p_c1(2)); return; } void MsckfVio::featureJacobian( const FeatureIDType& feature_id, const std::vector& cam_state_ids, MatrixXd& H_x, VectorXd& r) { const auto& feature = map_server[feature_id]; // Check how many camera states in the provided camera // id camera has actually seen this feature. vector valid_cam_state_ids(0); for (const auto& cam_id : cam_state_ids) { if (feature.observations.find(cam_id) == feature.observations.end()) continue; valid_cam_state_ids.push_back(cam_id); } int jacobian_row_size = 0; jacobian_row_size = 4 * valid_cam_state_ids.size(); MatrixXd H_xj = MatrixXd::Zero(jacobian_row_size, 21+state_server.cam_states.size()*6); MatrixXd H_fj = MatrixXd::Zero(jacobian_row_size, 3); VectorXd r_j = VectorXd::Zero(jacobian_row_size); int stack_cntr = 0; for (const auto& cam_id : valid_cam_state_ids) { Matrix H_xi = Matrix::Zero(); Matrix H_fi = Matrix::Zero(); Vector4d r_i = Vector4d::Zero(); measurementJacobian(cam_id, feature.id, H_xi, H_fi, r_i); auto cam_state_iter = state_server.cam_states.find(cam_id); int cam_state_cntr = std::distance( state_server.cam_states.begin(), cam_state_iter); // Stack the Jacobians. H_xj.block<4, 6>(stack_cntr, 21+6*cam_state_cntr) = H_xi; H_fj.block<4, 3>(stack_cntr, 0) = H_fi; r_j.segment<4>(stack_cntr) = r_i; stack_cntr += 4; } // Project the residual and Jacobians onto the nullspace // of H_fj. /* JacobiSVD svd_helper(H_fj, ComputeFullU | ComputeThinV); int sv_size = 0; Eigen::VectorXd singularValues = svd_helper.singularValues(); for(int i = 0; i < singularValues.size(); i++) if(singularValues[i] > 1e-5) sv_size++; cout << "sv size: " << sv_size << endl; MatrixXd A = svd_helper.matrixU().rightCols( jacobian_row_size - 3); */ FullPivLU lu(H_fj.transpose()); MatrixXd A = lu.kernel(); H_x = A.transpose() * H_xj; r = A.transpose() * r_j; ofstream myfile; myfile.open ("/home/raphael/dev/MSCKF_ws/log.txt"); myfile << "Hx\n" << H_x << "r\n" << r << "from residual estimated error state: " << H_x.colPivHouseholderQr().solve(r) << endl; myfile.close(); cout << "---------- LOGGED -------- " << endl; return; } void MsckfVio::measurementUpdate(const MatrixXd& H, const VectorXd& r) { if (H.rows() == 0 || r.rows() == 0) return; // Decompose the final Jacobian matrix to reduce computational // complexity as in Equation (28), (29). MatrixXd H_thin; VectorXd r_thin; int augmentationSize = 6; if(PHOTOMETRIC) augmentationSize = 7; /* if (H.rows() > H.cols()) { // Convert H to a sparse matrix. SparseMatrix H_sparse = H.sparseView(); // Perform QR decompostion on H_sparse. SPQR > spqr_helper; spqr_helper.setSPQROrdering(SPQR_ORDERING_NATURAL); spqr_helper.compute(H_sparse); MatrixXd H_temp; VectorXd r_temp; (spqr_helper.matrixQ().transpose() * H).evalTo(H_temp); (spqr_helper.matrixQ().transpose() * r).evalTo(r_temp); H_thin = H_temp.topRows(21+state_server.cam_states.size()*augmentationSize); r_thin = r_temp.head(21+state_server.cam_states.size()*augmentationSize); //HouseholderQR qr_helper(H); //MatrixXd Q = qr_helper.householderQ(); //MatrixXd Q1 = Q.leftCols(21+state_server.cam_states.size()*6); //H_thin = Q1.transpose() * H; //r_thin = Q1.transpose() * r; } else { H_thin = H; r_thin = r; } */ // Compute the Kalman gain. const MatrixXd& P = state_server.state_cov; MatrixXd S = H*P*H.transpose() + Feature::observation_noise*MatrixXd::Identity( H.rows(), H.rows()); //MatrixXd K_transpose = S.fullPivHouseholderQr().solve(H*P); MatrixXd K_transpose = S.ldlt().solve(H*P); MatrixXd K = K_transpose.transpose(); // Compute the error of the state. VectorXd delta_x = K * r; // Update the IMU state. const VectorXd& delta_x_imu = delta_x.head<21>(); if (//delta_x_imu.segment<3>(0).norm() > 0.15 || //delta_x_imu.segment<3>(3).norm() > 0.15 || delta_x_imu.segment<3>(6).norm() > 0.5 || //delta_x_imu.segment<3>(9).norm() > 0.5 || delta_x_imu.segment<3>(12).norm() > 1.0) { printf("delta velocity: %f\n", delta_x_imu.segment<3>(6).norm()); printf("delta position: %f\n", delta_x_imu.segment<3>(12).norm()); ROS_WARN("Update change is too large."); //return; } const Vector4d dq_imu = smallAngleQuaternion(delta_x_imu.head<3>()); state_server.imu_state.orientation = quaternionMultiplication( dq_imu, state_server.imu_state.orientation); state_server.imu_state.gyro_bias += delta_x_imu.segment<3>(3); state_server.imu_state.velocity += delta_x_imu.segment<3>(6); state_server.imu_state.acc_bias += delta_x_imu.segment<3>(9); state_server.imu_state.position += delta_x_imu.segment<3>(12); const Vector4d dq_extrinsic = smallAngleQuaternion(delta_x_imu.segment<3>(15)); state_server.imu_state.R_imu_cam0 = quaternionToRotation( dq_extrinsic) * state_server.imu_state.R_imu_cam0; state_server.imu_state.t_cam0_imu += delta_x_imu.segment<3>(18); // Update the camera states. auto cam_state_iter = state_server.cam_states.begin(); for (int i = 0; i < state_server.cam_states.size(); ++i, ++cam_state_iter) { const VectorXd& delta_x_cam = delta_x.segment(21+i*augmentationSize, augmentationSize); const Vector4d dq_cam = smallAngleQuaternion(delta_x_cam.head<3>()); cam_state_iter->second.orientation = quaternionMultiplication( dq_cam, cam_state_iter->second.orientation); cam_state_iter->second.position += delta_x_cam.tail<3>(); } // Update state covariance. MatrixXd I_KH = MatrixXd::Identity(K.rows(), H.cols()) - K*H; //state_server.state_cov = I_KH*state_server.state_cov*I_KH.transpose() + // K*K.transpose()*Feature::observation_noise; state_server.state_cov = I_KH*state_server.state_cov; // Fix the covariance to be symmetric MatrixXd state_cov_fixed = (state_server.state_cov + state_server.state_cov.transpose()) / 2.0; state_server.state_cov = state_cov_fixed; return; } bool MsckfVio::gatingTest(const MatrixXd& H, const VectorXd& r, const int& dof) { MatrixXd P1 = H * state_server.state_cov * H.transpose(); MatrixXd P2 = Feature::observation_noise * MatrixXd::Identity(H.rows(), H.rows()); double gamma = r.transpose() * (P1+P2).ldlt().solve(r); cout << dof << " " << gamma << " " << chi_squared_test_table[dof] << endl; if (gamma < chi_squared_test_table[dof]) { //cout << "passed" << endl; return true; } else { //cout << "failed" << endl; return false; } } void MsckfVio::removeLostFeatures() { // Remove the features that lost track. // BTW, find the size the final Jacobian matrix and residual vector. int jacobian_row_size = 0; int augmentationSize = 6; if(PHOTOMETRIC) augmentationSize = 7; vector invalid_feature_ids(0); vector processed_feature_ids(0); for (auto iter = map_server.begin(); iter != map_server.end(); ++iter) { // Rename the feature to be checked. auto& feature = iter->second; // Pass the features that are still being tracked. if (feature.observations.find(state_server.imu_state.id) != feature.observations.end()) continue; if (feature.observations.size() < 3) { invalid_feature_ids.push_back(feature.id); continue; } // Check if the feature can be initialized if it // has not been. if (!feature.is_initialized) { if (!feature.checkMotion(state_server.cam_states)) { invalid_feature_ids.push_back(feature.id); continue; } else { if(!feature.initializePosition(state_server.cam_states)) { invalid_feature_ids.push_back(feature.id); continue; } } } if(!feature.is_anchored) { if(!feature.initializeAnchor(cam0, N)) { invalid_feature_ids.push_back(feature.id); continue; } } if(PHOTOMETRIC) //just use max. size, as gets shrunken down after anyway jacobian_row_size += N*N*feature.observations.size(); else jacobian_row_size += 4*feature.observations.size() - 3; processed_feature_ids.push_back(feature.id); } //cout << "invalid/processed feature #: " << // invalid_feature_ids.size() << "/" << // processed_feature_ids.size() << endl; //cout << "jacobian row #: " << jacobian_row_size << endl; // Remove the features that do not have enough measurements. for (const auto& feature_id : invalid_feature_ids) map_server.erase(feature_id); // Return if there is no lost feature to be processed. if (processed_feature_ids.size() == 0) return; MatrixXd H_x = MatrixXd::Zero(jacobian_row_size, 21+augmentationSize*state_server.cam_states.size()); VectorXd r = VectorXd::Zero(jacobian_row_size); int stack_cntr = 0; // Process the features which lose track. for (const auto& feature_id : processed_feature_ids) { auto& feature = map_server[feature_id]; vector cam_state_ids(0); for (const auto& measurement : feature.observations) cam_state_ids.push_back(measurement.first); MatrixXd H_xj; VectorXd r_j; if(PHOTOMETRIC) PhotometricFeatureJacobian(feature.id, cam_state_ids, H_xj, r_j); else featureJacobian(feature.id, cam_state_ids, H_xj, r_j); if (gatingTest(H_xj, r_j, r_j.size())) { //, cam_state_ids.size()-1)) { H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj; r.segment(stack_cntr, r_j.rows()) = r_j; stack_cntr += H_xj.rows(); cout << "passed" << endl; } else { cout << "rejected" << endl; } // Put an upper bound on the row size of measurement Jacobian, // which helps guarantee the executation time. if (stack_cntr > 1500) break; } H_x.conservativeResize(stack_cntr, H_x.cols()); r.conservativeResize(stack_cntr); // Perform the measurement update step. measurementUpdate(H_x, r); // Remove all processed features from the map. for (const auto& feature_id : processed_feature_ids) map_server.erase(feature_id); return; } void MsckfVio::findRedundantCamStates(vector& rm_cam_state_ids) { // Move the iterator to the key position. auto key_cam_state_iter = state_server.cam_states.end(); for (int i = 0; i < 4; ++i) --key_cam_state_iter; auto cam_state_iter = key_cam_state_iter; ++cam_state_iter; auto first_cam_state_iter = state_server.cam_states.begin(); // Pose of the key camera state. const Vector3d key_position = key_cam_state_iter->second.position; const Matrix3d key_rotation = quaternionToRotation( key_cam_state_iter->second.orientation); // Mark the camera states to be removed based on the // motion between states. for (int i = 0; i < 2; ++i) { const Vector3d position = cam_state_iter->second.position; const Matrix3d rotation = quaternionToRotation( cam_state_iter->second.orientation); double distance = (position-key_position).norm(); double angle = AngleAxisd( rotation*key_rotation.transpose()).angle(); //if (angle < 0.1745 && distance < 0.2 && tracking_rate > 0.5) { if (angle < 0.2618 && distance < 0.4 && tracking_rate > 0.5) { rm_cam_state_ids.push_back(cam_state_iter->first); ++cam_state_iter; } else { rm_cam_state_ids.push_back(first_cam_state_iter->first); ++first_cam_state_iter; } } // Sort the elements in the output vector. sort(rm_cam_state_ids.begin(), rm_cam_state_ids.end()); return; } void MsckfVio::pruneCamStateBuffer() { if (state_server.cam_states.size() < max_cam_state_size) return; // Find two camera states to be removed. vector rm_cam_state_ids(0); findRedundantCamStates(rm_cam_state_ids); // Find the size of the Jacobian matrix. int jacobian_row_size = 0; int augmentationSize = 6; if(PHOTOMETRIC) augmentationSize = 7; for (auto& item : map_server) { auto& feature = item.second; // Check how many camera states to be removed are associated // with this feature. vector involved_cam_state_ids(0); for (const auto& cam_id : rm_cam_state_ids) { if (feature.observations.find(cam_id) != feature.observations.end()) involved_cam_state_ids.push_back(cam_id); } if (involved_cam_state_ids.size() == 0) continue; if (involved_cam_state_ids.size() == 1) { feature.observations.erase(involved_cam_state_ids[0]); continue; } if (!feature.is_initialized) { // Check if the feature can be initialize. if (!feature.checkMotion(state_server.cam_states)) { // If the feature cannot be initialized, just remove // the observations associated with the camera states // to be removed. for (const auto& cam_id : involved_cam_state_ids) feature.observations.erase(cam_id); continue; } else { if(!feature.initializePosition(state_server.cam_states)) { for (const auto& cam_id : involved_cam_state_ids) feature.observations.erase(cam_id); continue; } } } if(!feature.is_anchored) { if(!feature.initializeAnchor(cam0, N)) { for (const auto& cam_id : involved_cam_state_ids) feature.observations.erase(cam_id); continue; } } if(PHOTOMETRIC) jacobian_row_size += N*N*involved_cam_state_ids.size(); else jacobian_row_size += 4*involved_cam_state_ids.size() - 3; } //cout << "jacobian row #: " << jacobian_row_size << endl; // Compute the Jacobian and residual. MatrixXd H_xj; VectorXd r_j; MatrixXd H_x = MatrixXd::Zero(jacobian_row_size, 21+augmentationSize*state_server.cam_states.size()); VectorXd r = VectorXd::Zero(jacobian_row_size); int stack_cntr = 0; for (auto& item : map_server) { auto& feature = item.second; // Check how many camera states to be removed are associated // with this feature. vector involved_cam_state_ids(0); for (const auto& cam_id : rm_cam_state_ids) { if (feature.observations.find(cam_id) != feature.observations.end()) involved_cam_state_ids.push_back(cam_id); } if (involved_cam_state_ids.size() == 0) continue; if(PHOTOMETRIC) PhotometricFeatureJacobian(feature.id, involved_cam_state_ids, H_xj, r_j); else featureJacobian(feature.id, involved_cam_state_ids, H_xj, r_j); if (gatingTest(H_xj, r_j, r_j.size())) {// involved_cam_state_ids.size())) { H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj; r.segment(stack_cntr, r_j.rows()) = r_j; stack_cntr += H_xj.rows(); cout << "passed" << endl; } else { cout << "rejected" << endl; } for (const auto& cam_id : involved_cam_state_ids) feature.observations.erase(cam_id); } H_x.conservativeResize(stack_cntr, H_x.cols()); r.conservativeResize(stack_cntr); // Perform measurement update. measurementUpdate(H_x, r); //reduction for (const auto& cam_id : rm_cam_state_ids) { int cam_sequence = std::distance(state_server.cam_states.begin(), state_server.cam_states.find(cam_id)); int cam_state_start = 21 + augmentationSize*cam_sequence; int cam_state_end = cam_state_start + augmentationSize; // Remove the corresponding rows and columns in the state // covariance matrix. if (cam_state_end < state_server.state_cov.rows()) { state_server.state_cov.block(cam_state_start, 0, state_server.state_cov.rows()-cam_state_end, state_server.state_cov.cols()) = state_server.state_cov.block(cam_state_end, 0, state_server.state_cov.rows()-cam_state_end, state_server.state_cov.cols()); state_server.state_cov.block(0, cam_state_start, state_server.state_cov.rows(), state_server.state_cov.cols()-cam_state_end) = state_server.state_cov.block(0, cam_state_end, state_server.state_cov.rows(), state_server.state_cov.cols()-cam_state_end); state_server.state_cov.conservativeResize( state_server.state_cov.rows()-augmentationSize, state_server.state_cov.cols()-augmentationSize); } else { state_server.state_cov.conservativeResize( state_server.state_cov.rows()-augmentationSize, state_server.state_cov.cols()-augmentationSize); } // Remove this camera state in the state vector. for (const auto& cam_id : rm_cam_state_ids) { state_server.cam_states.erase(cam_id); cam0.moving_window.erase(cam_id); cam1.moving_window.erase(cam_id); // Remove this camera state in the true state vector. true_state_server.cam_states.erase(cam_id); err_state_server.cam_states.erase(cam_id); } } return; } void MsckfVio::onlineReset() { // Never perform online reset if position std threshold // is non-positive. if (position_std_threshold <= 0) return; static long long int online_reset_counter = 0; // Check the uncertainty of positions to determine if // the system can be reset. double position_x_std = std::sqrt(state_server.state_cov(12, 12)); double position_y_std = std::sqrt(state_server.state_cov(13, 13)); double position_z_std = std::sqrt(state_server.state_cov(14, 14)); if (position_x_std < position_std_threshold && position_y_std < position_std_threshold && position_z_std < position_std_threshold) return; ROS_WARN("Start %lld online reset procedure...", ++online_reset_counter); ROS_INFO("Stardard deviation in xyz: %f, %f, %f", position_x_std, position_y_std, position_z_std); // Remove all existing camera states. state_server.cam_states.clear(); // Clear all exsiting features in the map. map_server.clear(); // Reset the state covariance. double gyro_bias_cov, acc_bias_cov, velocity_cov; nh.param("initial_covariance/velocity", velocity_cov, 0.25); nh.param("initial_covariance/gyro_bias", gyro_bias_cov, 1e-4); nh.param("initial_covariance/acc_bias", acc_bias_cov, 1e-2); double extrinsic_rotation_cov, extrinsic_translation_cov; nh.param("initial_covariance/extrinsic_rotation_cov", extrinsic_rotation_cov, 3.0462e-4); nh.param("initial_covariance/extrinsic_translation_cov", extrinsic_translation_cov, 1e-4); // Reset the irradiance covariance nh.param("initial_covariance/irradiance_frame_bias", irradiance_frame_bias, 0.1); state_server.state_cov = MatrixXd::Zero(21, 21); for (int i = 3; i < 6; ++i) state_server.state_cov(i, i) = gyro_bias_cov; for (int i = 6; i < 9; ++i) state_server.state_cov(i, i) = velocity_cov; for (int i = 9; i < 12; ++i) state_server.state_cov(i, i) = acc_bias_cov; for (int i = 15; i < 18; ++i) state_server.state_cov(i, i) = extrinsic_rotation_cov; for (int i = 18; i < 21; ++i) state_server.state_cov(i, i) = extrinsic_translation_cov; ROS_WARN("%lld online reset complete...", online_reset_counter); return; } void MsckfVio::publish(const ros::Time& time) { // Convert the IMU frame to the body frame. const IMUState& imu_state = state_server.imu_state; Eigen::Isometry3d T_i_w = Eigen::Isometry3d::Identity(); T_i_w.linear() = quaternionToRotation( imu_state.orientation).transpose(); T_i_w.translation() = imu_state.position; Eigen::Isometry3d T_b_w = IMUState::T_imu_body * T_i_w * IMUState::T_imu_body.inverse(); Eigen::Vector3d body_velocity = IMUState::T_imu_body.linear() * imu_state.velocity; // Generate camera frame form IMU Frame Eigen::Isometry3d T_i_c = Eigen::Isometry3d::Identity(); T_i_c.linear() = imu_state.R_imu_cam0; T_i_c.translation() = imu_state.t_cam0_imu; // Publish tf if (publish_tf) { tf::Transform T_b_w_tf; tf::transformEigenToTF(T_b_w, T_b_w_tf); tf_pub.sendTransform(tf::StampedTransform( T_b_w_tf, time, fixed_frame_id, child_frame_id)); tf::Transform T_i_c_tf; tf::transformEigenToTF(T_i_c, T_i_c_tf); tf_pub.sendTransform(tf::StampedTransform( T_i_c_tf, time, child_frame_id, "camera")); } // Publish the odometry nav_msgs::Odometry odom_msg; odom_msg.header.stamp = time; odom_msg.header.frame_id = fixed_frame_id; odom_msg.child_frame_id = child_frame_id; tf::poseEigenToMsg(T_b_w, odom_msg.pose.pose); tf::vectorEigenToMsg(body_velocity, odom_msg.twist.twist.linear); // Convert the covariance. Matrix3d P_oo = state_server.state_cov.block<3, 3>(0, 0); Matrix3d P_op = state_server.state_cov.block<3, 3>(0, 12); Matrix3d P_po = state_server.state_cov.block<3, 3>(12, 0); Matrix3d P_pp = state_server.state_cov.block<3, 3>(12, 12); Matrix P_imu_pose = Matrix::Zero(); P_imu_pose << P_pp, P_po, P_op, P_oo; Matrix H_pose = Matrix::Zero(); H_pose.block<3, 3>(0, 0) = IMUState::T_imu_body.linear(); H_pose.block<3, 3>(3, 3) = IMUState::T_imu_body.linear(); Matrix P_body_pose = H_pose * P_imu_pose * H_pose.transpose(); for (int i = 0; i < 6; ++i) for (int j = 0; j < 6; ++j) odom_msg.pose.covariance[6*i+j] = P_body_pose(i, j); // Construct the covariance for the velocity. Matrix3d P_imu_vel = state_server.state_cov.block<3, 3>(6, 6); Matrix3d H_vel = IMUState::T_imu_body.linear(); Matrix3d P_body_vel = H_vel * P_imu_vel * H_vel.transpose(); for (int i = 0; i < 3; ++i) for (int j = 0; j < 3; ++j) odom_msg.twist.covariance[i*6+j] = P_body_vel(i, j); odom_pub.publish(odom_msg); // Publish the 3D positions of the features that // has been initialized. pcl::PointCloud::Ptr feature_msg_ptr( new pcl::PointCloud()); feature_msg_ptr->header.frame_id = fixed_frame_id; feature_msg_ptr->height = 1; for (const auto& item : map_server) { const auto& feature = item.second; if (feature.is_initialized) { Vector3d feature_position = IMUState::T_imu_body.linear() * feature.position; feature_msg_ptr->points.push_back(pcl::PointXYZ( feature_position(0), feature_position(1), feature_position(2))); } } feature_msg_ptr->width = feature_msg_ptr->points.size(); feature_pub.publish(feature_msg_ptr); return; } } // namespace msckf_vio