/* * COPYRIGHT AND PERMISSION NOTICE * Penn Software MSCKF_VIO * Copyright (C) 2017 The Trustees of the University of Pennsylvania * All rights reserved. */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include using namespace std; using namespace Eigen; namespace msckf_vio{ // Static member variables in IMUState class. StateIDType IMUState::next_id = 0; double IMUState::gyro_noise = 0.001; double IMUState::acc_noise = 0.01; double IMUState::gyro_bias_noise = 0.001; double IMUState::acc_bias_noise = 0.01; Vector3d IMUState::gravity = Vector3d(0, 0, -GRAVITY_ACCELERATION); Isometry3d IMUState::T_imu_body = Isometry3d::Identity(); // Static member variables in CAMState class. Isometry3d CAMState::T_cam0_cam1 = Isometry3d::Identity(); // Static member variables in Feature class. FeatureIDType Feature::next_id = 0; double Feature::observation_noise = 0.01; Feature::OptimizationConfig Feature::optimization_config; map MsckfVio::chi_squared_test_table; MsckfVio::MsckfVio(ros::NodeHandle& pnh): is_gravity_set(false), is_first_img(true), image_sub(10), nh(pnh) { return; } bool MsckfVio::loadParameters() { // Frame id nh.param("fixed_frame_id", fixed_frame_id, "world"); nh.param("child_frame_id", child_frame_id, "robot"); nh.param("publish_tf", publish_tf, true); nh.param("frame_rate", frame_rate, 40.0); nh.param("position_std_threshold", position_std_threshold, 8.0); nh.param("rotation_threshold", rotation_threshold, 0.2618); nh.param("translation_threshold", translation_threshold, 0.4); nh.param("tracking_rate_threshold", tracking_rate_threshold, 0.5); // Feature optimization parameters nh.param("feature/config/translation_threshold", Feature::optimization_config.translation_threshold, 0.2); // Noise related parameters nh.param("noise/gyro", IMUState::gyro_noise, 0.001); nh.param("noise/acc", IMUState::acc_noise, 0.01); nh.param("noise/gyro_bias", IMUState::gyro_bias_noise, 0.001); nh.param("noise/acc_bias", IMUState::acc_bias_noise, 0.01); nh.param("noise/feature", Feature::observation_noise, 0.01); // Use variance instead of standard deviation. IMUState::gyro_noise *= IMUState::gyro_noise; IMUState::acc_noise *= IMUState::acc_noise; IMUState::gyro_bias_noise *= IMUState::gyro_bias_noise; IMUState::acc_bias_noise *= IMUState::acc_bias_noise; Feature::observation_noise *= Feature::observation_noise; // Set the initial IMU state. // The intial orientation and position will be set to the origin // implicitly. But the initial velocity and bias can be // set by parameters. // TODO: is it reasonable to set the initial bias to 0? nh.param("initial_state/velocity/x", state_server.imu_state.velocity(0), 0.0); nh.param("initial_state/velocity/y", state_server.imu_state.velocity(1), 0.0); nh.param("initial_state/velocity/z", state_server.imu_state.velocity(2), 0.0); // The initial covariance of orientation and position can be // set to 0. But for velocity, bias and extrinsic parameters, // there should be nontrivial uncertainty. double gyro_bias_cov, acc_bias_cov, velocity_cov; nh.param("initial_covariance/velocity", velocity_cov, 0.25); nh.param("initial_covariance/gyro_bias", gyro_bias_cov, 1e-4); nh.param("initial_covariance/acc_bias", acc_bias_cov, 1e-2); double extrinsic_rotation_cov, extrinsic_translation_cov; nh.param("initial_covariance/extrinsic_rotation_cov", extrinsic_rotation_cov, 3.0462e-4); nh.param("initial_covariance/extrinsic_translation_cov", extrinsic_translation_cov, 1e-4); // get camera information (used for back projection) nh.param("cam0/distortion_model", cam0.distortion_model, string("radtan")); nh.param("cam1/distortion_model", cam1.distortion_model, string("radtan")); vector cam0_resolution_temp(2); nh.getParam("cam0/resolution", cam0_resolution_temp); cam0.resolution[0] = cam0_resolution_temp[0]; cam0.resolution[1] = cam0_resolution_temp[1]; vector cam1_resolution_temp(2); nh.getParam("cam1/resolution", cam1_resolution_temp); cam1.resolution[0] = cam1_resolution_temp[0]; cam1.resolution[1] = cam1_resolution_temp[1]; vector cam0_intrinsics_temp(4); nh.getParam("cam0/intrinsics", cam0_intrinsics_temp); cam0.intrinsics[0] = cam0_intrinsics_temp[0]; cam0.intrinsics[1] = cam0_intrinsics_temp[1]; cam0.intrinsics[2] = cam0_intrinsics_temp[2]; cam0.intrinsics[3] = cam0_intrinsics_temp[3]; vector cam1_intrinsics_temp(4); nh.getParam("cam1/intrinsics", cam1_intrinsics_temp); cam1.intrinsics[0] = cam1_intrinsics_temp[0]; cam1.intrinsics[1] = cam1_intrinsics_temp[1]; cam1.intrinsics[2] = cam1_intrinsics_temp[2]; cam1.intrinsics[3] = cam1_intrinsics_temp[3]; vector cam0_distortion_coeffs_temp(4); nh.getParam("cam0/distortion_coeffs", cam0_distortion_coeffs_temp); cam0.distortion_coeffs[0] = cam0_distortion_coeffs_temp[0]; cam0.distortion_coeffs[1] = cam0_distortion_coeffs_temp[1]; cam0.distortion_coeffs[2] = cam0_distortion_coeffs_temp[2]; cam0.distortion_coeffs[3] = cam0_distortion_coeffs_temp[3]; vector cam1_distortion_coeffs_temp(4); nh.getParam("cam1/distortion_coeffs", cam1_distortion_coeffs_temp); cam1.distortion_coeffs[0] = cam1_distortion_coeffs_temp[0]; cam1.distortion_coeffs[1] = cam1_distortion_coeffs_temp[1]; cam1.distortion_coeffs[2] = cam1_distortion_coeffs_temp[2]; cam1.distortion_coeffs[3] = cam1_distortion_coeffs_temp[3]; state_server.state_cov = MatrixXd::Zero(21, 21); for (int i = 3; i < 6; ++i) state_server.state_cov(i, i) = gyro_bias_cov; for (int i = 6; i < 9; ++i) state_server.state_cov(i, i) = velocity_cov; for (int i = 9; i < 12; ++i) state_server.state_cov(i, i) = acc_bias_cov; for (int i = 15; i < 18; ++i) state_server.state_cov(i, i) = extrinsic_rotation_cov; for (int i = 18; i < 21; ++i) state_server.state_cov(i, i) = extrinsic_translation_cov; // Transformation offsets between the frames involved. Isometry3d T_imu_cam0 = utils::getTransformEigen(nh, "cam0/T_cam_imu"); Isometry3d T_cam0_imu = T_imu_cam0.inverse(); state_server.imu_state.R_imu_cam0 = T_cam0_imu.linear().transpose(); state_server.imu_state.t_cam0_imu = T_cam0_imu.translation(); CAMState::T_cam0_cam1 = utils::getTransformEigen(nh, "cam1/T_cn_cnm1"); IMUState::T_imu_body = utils::getTransformEigen(nh, "T_imu_body").inverse(); // Maximum number of camera states to be stored nh.param("max_cam_state_size", max_cam_state_size, 30); //cam_state_size = max_cam_state_size; ROS_INFO("==========================================="); ROS_INFO("fixed frame id: %s", fixed_frame_id.c_str()); ROS_INFO("child frame id: %s", child_frame_id.c_str()); ROS_INFO("publish tf: %d", publish_tf); ROS_INFO("frame rate: %f", frame_rate); ROS_INFO("position std threshold: %f", position_std_threshold); ROS_INFO("Keyframe rotation threshold: %f", rotation_threshold); ROS_INFO("Keyframe translation threshold: %f", translation_threshold); ROS_INFO("Keyframe tracking rate threshold: %f", tracking_rate_threshold); ROS_INFO("gyro noise: %.10f", IMUState::gyro_noise); ROS_INFO("gyro bias noise: %.10f", IMUState::gyro_bias_noise); ROS_INFO("acc noise: %.10f", IMUState::acc_noise); ROS_INFO("acc bias noise: %.10f", IMUState::acc_bias_noise); ROS_INFO("observation noise: %.10f", Feature::observation_noise); ROS_INFO("initial velocity: %f, %f, %f", state_server.imu_state.velocity(0), state_server.imu_state.velocity(1), state_server.imu_state.velocity(2)); ROS_INFO("initial gyro bias cov: %f", gyro_bias_cov); ROS_INFO("initial acc bias cov: %f", acc_bias_cov); ROS_INFO("initial velocity cov: %f", velocity_cov); ROS_INFO("initial extrinsic rotation cov: %f", extrinsic_rotation_cov); ROS_INFO("initial extrinsic translation cov: %f", extrinsic_translation_cov); cout << T_imu_cam0.linear() << endl; cout << T_imu_cam0.translation().transpose() << endl; cout << "OpenCV version : " << CV_VERSION << endl; cout << "Major version : " << CV_MAJOR_VERSION << endl; cout << "Minor version : " << CV_MINOR_VERSION << endl; cout << "Subminor version : " << CV_SUBMINOR_VERSION << endl; ROS_INFO("max camera state #: %d", max_cam_state_size); ROS_INFO("==========================================="); return true; } bool MsckfVio::createRosIO() { odom_pub = nh.advertise("odom", 10); feature_pub = nh.advertise( "feature_point_cloud", 10); reset_srv = nh.advertiseService("reset", &MsckfVio::resetCallback, this); imu_sub = nh.subscribe("imu", 100, &MsckfVio::imuCallback, this); cam0_img_sub.subscribe(nh, "cam0_image", 10); cam1_img_sub.subscribe(nh, "cam1_image", 10); feature_sub.subscribe(nh, "features", 10); image_sub.connectInput(cam0_img_sub, cam1_img_sub, feature_sub); image_sub.registerCallback(&MsckfVio::imageCallback, this); mocap_odom_sub = nh.subscribe("mocap_odom", 10, &MsckfVio::mocapOdomCallback, this); mocap_odom_pub = nh.advertise("gt_odom", 1); return true; } bool MsckfVio::initialize() { if (!loadParameters()) return false; ROS_INFO("Finish loading ROS parameters..."); // Initialize state server state_server.continuous_noise_cov = Matrix::Zero(); state_server.continuous_noise_cov.block<3, 3>(0, 0) = Matrix3d::Identity()*IMUState::gyro_noise; state_server.continuous_noise_cov.block<3, 3>(3, 3) = Matrix3d::Identity()*IMUState::gyro_bias_noise; state_server.continuous_noise_cov.block<3, 3>(6, 6) = Matrix3d::Identity()*IMUState::acc_noise; state_server.continuous_noise_cov.block<3, 3>(9, 9) = Matrix3d::Identity()*IMUState::acc_bias_noise; // Initialize the chi squared test table with confidence // level 0.95. for (int i = 1; i < 100; ++i) { boost::math::chi_squared chi_squared_dist(i); chi_squared_test_table[i] = boost::math::quantile(chi_squared_dist, 0.05); } if (!createRosIO()) return false; ROS_INFO("Finish creating ROS IO..."); return true; } void MsckfVio::imuCallback( const sensor_msgs::ImuConstPtr& msg) { // IMU msgs are pushed backed into a buffer instead of // being processed immediately. The IMU msgs are processed // when the next image is available, in which way, we can // easily handle the transfer delay. imu_msg_buffer.push_back(*msg); if (!is_gravity_set) { if (imu_msg_buffer.size() < 200) return; //if (imu_msg_buffer.size() < 10) return; initializeGravityAndBias(); is_gravity_set = true; } return; } void MsckfVio::imageCallback( const sensor_msgs::ImageConstPtr& cam0_img, const sensor_msgs::ImageConstPtr& cam1_img, const CameraMeasurementConstPtr& feature_msg) { // Return if the gravity vector has not been set. if (!is_gravity_set) return; // Start the system if the first image is received. // The frame where the first image is received will be // the origin. if (is_first_img) { is_first_img = false; state_server.imu_state.time = feature_msg->header.stamp.toSec(); } static double max_processing_time = 0.0; static int critical_time_cntr = 0; double processing_start_time = ros::Time::now().toSec(); // Propogate the IMU state. // that are received before the image feature_msg. ros::Time start_time = ros::Time::now(); batchImuProcessing(feature_msg->header.stamp.toSec()); double imu_processing_time = ( ros::Time::now()-start_time).toSec(); // Augment the state vector. start_time = ros::Time::now(); stateAugmentation(feature_msg->header.stamp.toSec()); double state_augmentation_time = ( ros::Time::now()-start_time).toSec(); // Add new observations for existing features or new // features in the map server. start_time = ros::Time::now(); addFeatureObservations(feature_msg); double add_observations_time = ( ros::Time::now()-start_time).toSec(); // Add new images to moving window start_time = ros::Time::now(); manageMovingWindow(cam0_img, cam1_img, feature_msg); double manage_moving_window_time = ( ros::Time::now()-start_time).toSec(); // Perform measurement update if necessary. start_time = ros::Time::now(); removeLostFeatures(); double remove_lost_features_time = ( ros::Time::now()-start_time).toSec(); start_time = ros::Time::now(); pruneCamStateBuffer(); double prune_cam_states_time = ( ros::Time::now()-start_time).toSec(); // Publish the odometry. start_time = ros::Time::now(); publish(feature_msg->header.stamp); double publish_time = ( ros::Time::now()-start_time).toSec(); // Reset the system if necessary. onlineReset(); double processing_end_time = ros::Time::now().toSec(); double processing_time = processing_end_time - processing_start_time; if (processing_time > 1.0/frame_rate) { ++critical_time_cntr; ROS_INFO("\033[1;31mTotal processing time %f/%d...\033[0m", processing_time, critical_time_cntr); printf("IMU processing time: %f/%f\n", imu_processing_time, imu_processing_time/processing_time); printf("State augmentation time: %f/%f\n", state_augmentation_time, state_augmentation_time/processing_time); printf("Add observations time: %f/%f\n", add_observations_time, add_observations_time/processing_time); printf("Remove lost features time: %f/%f\n", remove_lost_features_time, remove_lost_features_time/processing_time); printf("Remove camera states time: %f/%f\n", prune_cam_states_time, prune_cam_states_time/processing_time); printf("Publish time: %f/%f\n", publish_time, publish_time/processing_time); } return; } void MsckfVio::manageMovingWindow( const sensor_msgs::ImageConstPtr& cam0_img, const sensor_msgs::ImageConstPtr& cam1_img, const CameraMeasurementConstPtr& feature_msg) { //save exposure Time into moving window cam0.moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam0_img->header.frame_id.data(), NULL) / 1000000; cam1.moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam1_img->header.frame_id.data(), NULL) / 1000000; if(cam0.moving_window[state_server.imu_state.id].exposureTime_ms < 1) cam0.moving_window[state_server.imu_state.id].exposureTime_ms = 1; if(cam1.moving_window[state_server.imu_state.id].exposureTime_ms < 1) cam1.moving_window[state_server.imu_state.id].exposureTime_ms = 1; if(cam0.moving_window[state_server.imu_state.id].exposureTime_ms > 100) cam0.moving_window[state_server.imu_state.id].exposureTime_ms = 100; if(cam1.moving_window[state_server.imu_state.id].exposureTime_ms > 100) cam1.moving_window[state_server.imu_state.id].exposureTime_ms = 100; // Get the current image. cv_bridge::CvImageConstPtr cam0_img_ptr = cv_bridge::toCvShare(cam0_img, sensor_msgs::image_encodings::MONO8); cv_bridge::CvImageConstPtr cam1_img_ptr = cv_bridge::toCvShare(cam1_img, sensor_msgs::image_encodings::MONO8); // save image information into moving window cam0.moving_window[state_server.imu_state.id].image = cam0_img_ptr->image.clone(); cam1.moving_window[state_server.imu_state.id].image = cam1_img_ptr->image.clone(); //TODO handle any massive overflow correctly (should be pruned, before this ever triggers) while(cam0.moving_window.size() > 100) { cam1.moving_window.erase(cam1.moving_window.begin()); cam0.moving_window.erase(cam0.moving_window.begin()); } } void MsckfVio::initializeGravityAndBias() { // Initialize gravity and gyro bias. Vector3d sum_angular_vel = Vector3d::Zero(); Vector3d sum_linear_acc = Vector3d::Zero(); for (const auto& imu_msg : imu_msg_buffer) { Vector3d angular_vel = Vector3d::Zero(); Vector3d linear_acc = Vector3d::Zero(); tf::vectorMsgToEigen(imu_msg.angular_velocity, angular_vel); tf::vectorMsgToEigen(imu_msg.linear_acceleration, linear_acc); sum_angular_vel += angular_vel; sum_linear_acc += linear_acc; } state_server.imu_state.gyro_bias = sum_angular_vel / imu_msg_buffer.size(); //IMUState::gravity = // -sum_linear_acc / imu_msg_buffer.size(); // This is the gravity in the IMU frame. Vector3d gravity_imu = sum_linear_acc / imu_msg_buffer.size(); // Initialize the initial orientation, so that the estimation // is consistent with the inertial frame. double gravity_norm = gravity_imu.norm(); IMUState::gravity = Vector3d(0.0, 0.0, -gravity_norm); Quaterniond q0_i_w = Quaterniond::FromTwoVectors( gravity_imu, -IMUState::gravity); state_server.imu_state.orientation = rotationToQuaternion(q0_i_w.toRotationMatrix().transpose()); // printf("gravity Norm %f\n", gravity_norm); // printf("linear_acc %f, %f, %f\n", gravity_imu(0), gravity_imu(1), gravity_imu(2)); // printf("quaterniond: %f, %f, %f, %f\n", q0_i_w.w(), q0_i_w.x(), q0_i_w.y(), q0_i_w.z()); return; } bool MsckfVio::resetCallback( std_srvs::Trigger::Request& req, std_srvs::Trigger::Response& res) { ROS_WARN("Start resetting msckf vio..."); // Temporarily shutdown the subscribers to prevent the // state from updating. imu_sub.shutdown(); // Reset the IMU state. IMUState& imu_state = state_server.imu_state; imu_state.time = 0.0; imu_state.orientation = Vector4d(0.0, 0.0, 0.0, 1.0); imu_state.position = Vector3d::Zero(); imu_state.velocity = Vector3d::Zero(); imu_state.gyro_bias = Vector3d::Zero(); imu_state.acc_bias = Vector3d::Zero(); imu_state.orientation_null = Vector4d(0.0, 0.0, 0.0, 1.0); imu_state.position_null = Vector3d::Zero(); imu_state.velocity_null = Vector3d::Zero(); // Remove all existing camera states. state_server.cam_states.clear(); // Reset the state covariance. double gyro_bias_cov, acc_bias_cov, velocity_cov; nh.param("initial_covariance/velocity", velocity_cov, 0.25); nh.param("initial_covariance/gyro_bias", gyro_bias_cov, 1e-4); nh.param("initial_covariance/acc_bias", acc_bias_cov, 1e-2); double extrinsic_rotation_cov, extrinsic_translation_cov; nh.param("initial_covariance/extrinsic_rotation_cov", extrinsic_rotation_cov, 3.0462e-4); nh.param("initial_covariance/extrinsic_translation_cov", extrinsic_translation_cov, 1e-4); state_server.state_cov = MatrixXd::Zero(21, 21); for (int i = 3; i < 6; ++i) state_server.state_cov(i, i) = gyro_bias_cov; for (int i = 6; i < 9; ++i) state_server.state_cov(i, i) = velocity_cov; for (int i = 9; i < 12; ++i) state_server.state_cov(i, i) = acc_bias_cov; for (int i = 15; i < 18; ++i) state_server.state_cov(i, i) = extrinsic_rotation_cov; for (int i = 18; i < 21; ++i) state_server.state_cov(i, i) = extrinsic_translation_cov; // Clear all exsiting features in the map. map_server.clear(); // Clear the IMU msg buffer. imu_msg_buffer.clear(); // Reset the starting flags. is_gravity_set = false; is_first_img = true; // Restart the subscribers. imu_sub = nh.subscribe("imu", 100, &MsckfVio::imuCallback, this); // feature_sub = nh.subscribe("features", 40, // &MsckfVio::featureCallback, this); // TODO: When can the reset fail? res.success = true; ROS_WARN("Resetting msckf vio completed..."); return true; } void MsckfVio::mocapOdomCallback( const nav_msgs::OdometryConstPtr& msg) { static bool first_mocap_odom_msg = true; // If this is the first mocap odometry messsage, set // the initial frame. if (first_mocap_odom_msg) { Quaterniond orientation; Vector3d translation; tf::pointMsgToEigen( msg->pose.pose.position, translation); tf::quaternionMsgToEigen( msg->pose.pose.orientation, orientation); //tf::vectorMsgToEigen( // msg->transform.translation, translation); //tf::quaternionMsgToEigen( // msg->transform.rotation, orientation); mocap_initial_frame.linear() = orientation.toRotationMatrix(); mocap_initial_frame.translation() = translation; first_mocap_odom_msg = false; } // Transform the ground truth. Quaterniond orientation; Vector3d translation; //tf::vectorMsgToEigen( // msg->transform.translation, translation); //tf::quaternionMsgToEigen( // msg->transform.rotation, orientation); tf::pointMsgToEigen( msg->pose.pose.position, translation); tf::quaternionMsgToEigen( msg->pose.pose.orientation, orientation); Eigen::Isometry3d T_b_v_gt; T_b_v_gt.linear() = orientation.toRotationMatrix(); T_b_v_gt.translation() = translation; Eigen::Isometry3d T_b_w_gt = mocap_initial_frame.inverse() * T_b_v_gt; //Eigen::Vector3d body_velocity_gt; //tf::vectorMsgToEigen(msg->twist.twist.linear, body_velocity_gt); //body_velocity_gt = mocap_initial_frame.linear().transpose() * // body_velocity_gt; // Ground truth tf. if (publish_tf) { tf::Transform T_b_w_gt_tf; tf::transformEigenToTF(T_b_w_gt, T_b_w_gt_tf); tf_pub.sendTransform(tf::StampedTransform( T_b_w_gt_tf, msg->header.stamp, fixed_frame_id, child_frame_id+"_mocap")); } // Ground truth odometry. nav_msgs::Odometry mocap_odom_msg; mocap_odom_msg.header.stamp = msg->header.stamp; mocap_odom_msg.header.frame_id = fixed_frame_id; mocap_odom_msg.child_frame_id = child_frame_id+"_mocap"; tf::poseEigenToMsg(T_b_w_gt, mocap_odom_msg.pose.pose); //tf::vectorEigenToMsg(body_velocity_gt, // mocap_odom_msg.twist.twist.linear); mocap_odom_pub.publish(mocap_odom_msg); return; } void MsckfVio::batchImuProcessing(const double& time_bound) { // Counter how many IMU msgs in the buffer are used. int used_imu_msg_cntr = 0; for (const auto& imu_msg : imu_msg_buffer) { double imu_time = imu_msg.header.stamp.toSec(); if (imu_time < state_server.imu_state.time) { ++used_imu_msg_cntr; continue; } if (imu_time > time_bound) break; // Convert the msgs. Vector3d m_gyro, m_acc; tf::vectorMsgToEigen(imu_msg.angular_velocity, m_gyro); tf::vectorMsgToEigen(imu_msg.linear_acceleration, m_acc); // Execute process model. processModel(imu_time, m_gyro, m_acc); ++used_imu_msg_cntr; } // Set the state ID for the new IMU state. state_server.imu_state.id = IMUState::next_id++; // Remove all used IMU msgs. imu_msg_buffer.erase(imu_msg_buffer.begin(), imu_msg_buffer.begin()+used_imu_msg_cntr); return; } void MsckfVio::processModel(const double& time, const Vector3d& m_gyro, const Vector3d& m_acc) { // Remove the bias from the measured gyro and acceleration IMUState& imu_state = state_server.imu_state; Vector3d gyro = m_gyro - imu_state.gyro_bias; Vector3d acc = m_acc - imu_state.acc_bias; double dtime = time - imu_state.time; // Compute discrete transition and noise covariance matrix Matrix F = Matrix::Zero(); Matrix G = Matrix::Zero(); F.block<3, 3>(0, 0) = -skewSymmetric(gyro); F.block<3, 3>(0, 3) = -Matrix3d::Identity(); F.block<3, 3>(6, 0) = -quaternionToRotation( imu_state.orientation).transpose()*skewSymmetric(acc); F.block<3, 3>(6, 9) = -quaternionToRotation( imu_state.orientation).transpose(); F.block<3, 3>(12, 6) = Matrix3d::Identity(); G.block<3, 3>(0, 0) = -Matrix3d::Identity(); G.block<3, 3>(3, 3) = Matrix3d::Identity(); G.block<3, 3>(6, 6) = -quaternionToRotation( imu_state.orientation).transpose(); G.block<3, 3>(9, 9) = Matrix3d::Identity(); // Approximate matrix exponential to the 3rd order, // which can be considered to be accurate enough assuming // dtime is within 0.01s. Matrix Fdt = F * dtime; Matrix Fdt_square = Fdt * Fdt; Matrix Fdt_cube = Fdt_square * Fdt; Matrix Phi = Matrix::Identity() + Fdt + 0.5*Fdt_square + (1.0/6.0)*Fdt_cube; // Propogate the state using 4th order Runge-Kutta predictNewState(dtime, gyro, acc); // Propogate the state covariance matrix. Matrix Q = Phi*G*state_server.continuous_noise_cov* G.transpose()*Phi.transpose()*dtime; state_server.state_cov.block<21, 21>(0, 0) = Phi*state_server.state_cov.block<21, 21>(0, 0)*Phi.transpose() + Q; if (state_server.cam_states.size() > 0) { state_server.state_cov.block( 0, 21, 21, state_server.state_cov.cols()-21) = Phi * state_server.state_cov.block( 0, 21, 21, state_server.state_cov.cols()-21); state_server.state_cov.block( 21, 0, state_server.state_cov.rows()-21, 21) = state_server.state_cov.block( 21, 0, state_server.state_cov.rows()-21, 21) * Phi.transpose(); } MatrixXd state_cov_fixed = (state_server.state_cov + state_server.state_cov.transpose()) / 2.0; state_server.state_cov = state_cov_fixed; // Update the state correspondes to null space. imu_state.orientation_null = imu_state.orientation; imu_state.position_null = imu_state.position; imu_state.velocity_null = imu_state.velocity; // Update the state info state_server.imu_state.time = time; return; } void MsckfVio::predictNewState(const double& dt, const Vector3d& gyro, const Vector3d& acc) { // TODO: Will performing the forward integration using // the inverse of the quaternion give better accuracy? double gyro_norm = gyro.norm(); Matrix4d Omega = Matrix4d::Zero(); Omega.block<3, 3>(0, 0) = -skewSymmetric(gyro); Omega.block<3, 1>(0, 3) = gyro; Omega.block<1, 3>(3, 0) = -gyro; Vector4d& q = state_server.imu_state.orientation; Vector3d& v = state_server.imu_state.velocity; Vector3d& p = state_server.imu_state.position; // Some pre-calculation Vector4d dq_dt, dq_dt2; if (gyro_norm > 1e-5) { dq_dt = (cos(gyro_norm*dt*0.5)*Matrix4d::Identity() + 1/gyro_norm*sin(gyro_norm*dt*0.5)*Omega) * q; dq_dt2 = (cos(gyro_norm*dt*0.25)*Matrix4d::Identity() + 1/gyro_norm*sin(gyro_norm*dt*0.25)*Omega) * q; } else { dq_dt = (Matrix4d::Identity()+0.5*dt*Omega) * cos(gyro_norm*dt*0.5) * q; dq_dt2 = (Matrix4d::Identity()+0.25*dt*Omega) * cos(gyro_norm*dt*0.25) * q; } Matrix3d dR_dt_transpose = quaternionToRotation(dq_dt).transpose(); Matrix3d dR_dt2_transpose = quaternionToRotation(dq_dt2).transpose(); // k1 = f(tn, yn) Vector3d k1_v_dot = quaternionToRotation(q).transpose()*acc + IMUState::gravity; Vector3d k1_p_dot = v; // k2 = f(tn+dt/2, yn+k1*dt/2) Vector3d k1_v = v + k1_v_dot*dt/2; Vector3d k2_v_dot = dR_dt2_transpose*acc + IMUState::gravity; Vector3d k2_p_dot = k1_v; // k3 = f(tn+dt/2, yn+k2*dt/2) Vector3d k2_v = v + k2_v_dot*dt/2; Vector3d k3_v_dot = dR_dt2_transpose*acc + IMUState::gravity; Vector3d k3_p_dot = k2_v; // k4 = f(tn+dt, yn+k3*dt) Vector3d k3_v = v + k3_v_dot*dt; Vector3d k4_v_dot = dR_dt_transpose*acc + IMUState::gravity; Vector3d k4_p_dot = k3_v; // yn+1 = yn + dt/6*(k1+2*k2+2*k3+k4) q = dq_dt; quaternionNormalize(q); v = v + dt/6*(k1_v_dot+2*k2_v_dot+2*k3_v_dot+k4_v_dot); p = p + dt/6*(k1_p_dot+2*k2_p_dot+2*k3_p_dot+k4_p_dot); return; } void MsckfVio::stateAugmentation(const double& time) { const Matrix3d& R_i_c = state_server.imu_state.R_imu_cam0; const Vector3d& t_c_i = state_server.imu_state.t_cam0_imu; // Add a new camera state to the state server. Matrix3d R_w_i = quaternionToRotation( state_server.imu_state.orientation); Matrix3d R_w_c = R_i_c * R_w_i; Vector3d t_c_w = state_server.imu_state.position + R_w_i.transpose()*t_c_i; state_server.cam_states[state_server.imu_state.id] = CAMState(state_server.imu_state.id); CAMState& cam_state = state_server.cam_states[ state_server.imu_state.id]; cam_state.time = time; cam_state.orientation = rotationToQuaternion(R_w_c); cam_state.position = t_c_w; cam_state.orientation_null = cam_state.orientation; cam_state.position_null = cam_state.position; // Update the covariance matrix of the state. // To simplify computation, the matrix J below is the nontrivial block // in Equation (16) in "A Multi-State Constraint Kalman Filter for Vision // -aided Inertial Navigation". Matrix J = Matrix::Zero(); J.block<3, 3>(0, 0) = R_i_c; J.block<3, 3>(0, 15) = Matrix3d::Identity(); J.block<3, 3>(3, 0) = skewSymmetric(R_w_i.transpose()*t_c_i); //J.block<3, 3>(3, 0) = -R_w_i.transpose()*skewSymmetric(t_c_i); J.block<3, 3>(3, 12) = Matrix3d::Identity(); J.block<3, 3>(3, 18) = Matrix3d::Identity(); // Resize the state covariance matrix. size_t old_rows = state_server.state_cov.rows(); size_t old_cols = state_server.state_cov.cols(); state_server.state_cov.conservativeResize(old_rows+6, old_cols+6); // Rename some matrix blocks for convenience. const Matrix& P11 = state_server.state_cov.block<21, 21>(0, 0); const MatrixXd& P12 = state_server.state_cov.block(0, 21, 21, old_cols-21); // Fill in the augmented state covariance. state_server.state_cov.block(old_rows, 0, 6, old_cols) << J*P11, J*P12; state_server.state_cov.block(0, old_cols, old_rows, 6) = state_server.state_cov.block(old_rows, 0, 6, old_cols).transpose(); state_server.state_cov.block<6, 6>(old_rows, old_cols) = J * P11 * J.transpose(); // Fix the covariance to be symmetric MatrixXd state_cov_fixed = (state_server.state_cov + state_server.state_cov.transpose()) / 2.0; state_server.state_cov = state_cov_fixed; return; } void MsckfVio::addFeatureObservations( const CameraMeasurementConstPtr& msg) { StateIDType state_id = state_server.imu_state.id; int curr_feature_num = map_server.size(); int tracked_feature_num = 0; // Add new observations for existing features or new // features in the map server. for (const auto& feature : msg->features) { if (map_server.find(feature.id) == map_server.end()) { // This is a new feature. map_server[feature.id] = Feature(feature.id); map_server[feature.id].observations[state_id] = Vector4d(feature.u0, feature.v0, feature.u1, feature.v1); } else { // This is an old feature. map_server[feature.id].observations[state_id] = Vector4d(feature.u0, feature.v0, feature.u1, feature.v1); ++tracked_feature_num; } } tracking_rate = static_cast(tracked_feature_num) / static_cast(curr_feature_num); return; } void MsckfVio::PhotometricMeasurementJacobian( const StateIDType& cam_state_id, const FeatureIDType& feature_id, Matrix& H_x, Matrix& H_f, Vector4d& r) { // Prepare all the required data. const CAMState& cam_state = state_server.cam_states[cam_state_id]; const Feature& feature = map_server[feature_id]; // Cam0 pose. Matrix3d R_w_c0 = quaternionToRotation(cam_state.orientation); const Vector3d& t_c0_w = cam_state.position; //temp N const int N = 3; // Cam1 pose. Matrix3d R_c0_c1 = CAMState::T_cam0_cam1.linear(); Matrix3d R_w_c1 = CAMState::T_cam0_cam1.linear() * R_w_c0; Vector3d t_c1_w = t_c0_w - R_w_c1.transpose()*CAMState::T_cam0_cam1.translation(); // 3d feature position in the world frame. // And its observation with the stereo cameras. const Vector3d& p_w = feature.position; //photometric observation std::vector photo_z; // individual Jacobians Matrix dI_dhj = Matrix::Zero(); Matrix dh_dCpij = Matrix::Zero(); Matrix dh_dGpij = Matrix::Zero(); Matrix dh_dXplj = Matrix::Zero(); Matrix dGpi_drhoj = Matrix::Zero(); Matrix dGpi_XpAj = Matrix::Zero(); // one line of the NxN Jacobians Eigen::Matrix H_rhoj; Eigen::Matrix H_plj; Eigen::Matrix H_pAj; // combined Jacobians Eigen::MatrixXd H_rho(N*N, 1); Eigen::MatrixXd H_pl(N*N, 6); Eigen::MatrixXd H_pA(N*N, 6); auto frame = cam0.moving_window.find(cam_state_id)->second.image; int count = 0; double dx, dy; for (auto point : feature.anchorPatch_3d) { Eigen::Vector3d p_c0 = R_w_c0 * (p_w-t_c0_w); cv::Point2f p_in_c0 = feature.projectPositionToCamera(cam_state, cam_state_id, cam0, point); //add observation photo_z.push_back(feature.PixelIrradiance(p_in_c0, frame)); //add jacobian // frame derivative calculated convoluting with kernel [-1, 0, 1] dx = feature.PixelIrradiance(cv::Point2f(p_in_c0.x+1, p_in_c0.y), frame)- feature.PixelIrradiance(cv::Point2f(p_in_c0.x-1, p_in_c0.y), frame); dy = feature.PixelIrradiance(cv::Point2f(p_in_c0.x, p_in_c0.y+1), frame)- feature.PixelIrradiance(cv::Point2f(p_in_c0.x, p_in_c0.y-1), frame); dI_dhj(0, 0) = dx; dI_dhj(1, 0) = dy; //dh / d{}^Cp_{ij} dh_dCpij.block<2, 2>(0, 0) = Eigen::Matrix::Identity(); dh_dCpij(0, 2) = -(p_c0(0))/(p_c0(2)*p_c0(2)); dh_dCpij(1, 2) = -(p_c0(1))/(p_c0(2)*p_c0(2)); dh_dGpij = dh_dCpij * quaternionToRotation(cam_state.orientation).transpose(); //dh / d X_{pl} dh_dXplj.block<2, 3>(3, 0) = dh_dCpij * skewSymmetric(point); dh_dXplj.block<2, 3>(3, 3) = dh_dCpij * -quaternionToRotation(cam_state.orientation).transpose(); //d{}^Gp_P{ij} / \rho_i double rho = feature.anchor_rho; dGpi_drhoj = feature.T_anchor_w.linear() * Eigen::Vector3d(-feature.anchorPatch_ideal[count].x/(rho*rho), -feature.anchorPatch_ideal[count].y/(rho*rho), 1/(rho*rho)); dGpi_XpAj.block<3, 3>(3, 0) = skewSymmetric(Eigen::Vector3d(feature.anchorPatch_ideal[count].x/(rho), feature.anchorPatch_ideal[count].y/(rho), 1/(rho))); dGpi_XpAj.block<3, 3>(3, 3) = Matrix::Identity(); // Intermediate Jakobians H_rhoj = dI_dhj * dh_dGpij * dGpi_drhoj; // 1 x 3 H_plj = dI_dhj * dh_dXplj; // 1 x 6 H_pAj = dI_dhj * dh_dGpij * dGpi_XpAj; // 1 x 6 H_rho.block<1, 1>(count, 0) = H_rhoj; H_pl.block<1, 6>(count, 0) = H_plj; H_pA.block<1, 6>(count, 0) = H_pAj; count++; } // calculate residual // visu -residual //printf("-----\n"); //observation const Vector4d& z = feature.observations.find(cam_state_id)->second; //estimate photometric measurement std::vector estimate_irradiance; std::vector estimate_photo_z; IlluminationParameter estimated_illumination; feature.estimate_FrameIrradiance(cam_state, cam_state_id, cam0, estimate_irradiance, estimated_illumination); for (auto& estimate_irradiance_j : estimate_irradiance) estimate_photo_z.push_back(estimate_irradiance_j * estimated_illumination.frame_gain * estimated_illumination.feature_gain + estimated_illumination.frame_bias + estimated_illumination.feature_bias); std::vector photo_r; //calculate photom. residual for(int i = 0; i < photo_z.size(); i++) photo_r.push_back(photo_z[i] - estimate_photo_z[i]); // visu- residual //for(int i = 0; i < photo_z.size(); i++) // printf("%.4f = %.4f - %.4f\n",photo_r[i], photo_z[i], estimate_photo_z[i]); //Final Jakobians // cout << "------------------------" << endl; // cout << "rho" << H_rho.rows() << "x" << H_rho.cols() << "\n" << H_rho << endl; // cout << "l" << H_pl.rows() << "x" << H_pl.cols() << "\n" << H_pl << endl; // cout << "A" << H_pA.rows() << "x" << H_pA.cols() << "\n" << H_pA << endl; MatrixXd H_xl = MatrixXd::Zero(N*N, 21+state_server.cam_states.size()*7); MatrixXd H_yl = MatrixXd::Zero(N*N, N*N+state_server.cam_states.size()+1); // set anchor Jakobi // get position of anchor in cam states auto cam_state_anchor = state_server.cam_states.find(feature.observations.begin()->first); int cam_state_cntr_anchor = std::distance(state_server.cam_states.begin(), cam_state_anchor); H_xl.block(0, 21+cam_state_cntr_anchor*7, N*N, 6) = -H_pA; // set frame Jakobi //get position of current frame in cam states auto cam_state_iter = state_server.cam_states.find(cam_state_id); int cam_state_cntr = std::distance(state_server.cam_states.begin(), cam_state_iter); // set jakobi of state H_xl.block(0, 21+cam_state_cntr*7, N*N, 6) = -H_pl; // set ones for irradiance bias H_xl.block(0, 21+cam_state_cntr*7+6, N*N, 1) = Eigen::ArrayXd::Ones(N*N); // set irradiance error Block H_yl.block(0, 0,N*N, N*N) = estimated_illumination.feature_gain * estimated_illumination.frame_gain * Eigen::MatrixXd::Identity(N*N, N*N); // TODO make this calculation more fluent for(int i = 0; i< N*N; i++) H_yl(i, N*N+cam_state_cntr) = estimate_irradiance[i]; H_yl.block(0, N*N+state_server.cam_states.size(), N*N, 1) = -H_rho; // Original calculation // Convert the feature position from the world frame to // the cam0 and cam1 frame. Vector3d p_c0 = R_w_c0 * (p_w-t_c0_w); Vector3d p_c1 = R_w_c1 * (p_w-t_c1_w); // Compute the residual. r = z - Vector4d(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2), p_c1(0)/p_c1(2), p_c1(1)/p_c1(2)); // Compute the Jacobians. Matrix dz_dpc0 = Matrix::Zero(); dz_dpc0(0, 0) = 1 / p_c0(2); dz_dpc0(1, 1) = 1 / p_c0(2); dz_dpc0(0, 2) = -p_c0(0) / (p_c0(2)*p_c0(2)); dz_dpc0(1, 2) = -p_c0(1) / (p_c0(2)*p_c0(2)); Matrix dz_dpc1 = Matrix::Zero(); dz_dpc1(2, 0) = 1 / p_c1(2); dz_dpc1(3, 1) = 1 / p_c1(2); dz_dpc1(2, 2) = -p_c1(0) / (p_c1(2)*p_c1(2)); dz_dpc1(3, 2) = -p_c1(1) / (p_c1(2)*p_c1(2)); Matrix dpc0_dxc = Matrix::Zero(); dpc0_dxc.leftCols(3) = skewSymmetric(p_c0); dpc0_dxc.rightCols(3) = -R_w_c0; Matrix dpc1_dxc = Matrix::Zero(); dpc1_dxc.leftCols(3) = R_c0_c1 * skewSymmetric(p_c0); dpc1_dxc.rightCols(3) = -R_w_c1; Matrix3d dpc0_dpg = R_w_c0; Matrix3d dpc1_dpg = R_w_c1; H_x = dz_dpc0*dpc0_dxc + dz_dpc1*dpc1_dxc; H_f = dz_dpc0*dpc0_dpg + dz_dpc1*dpc1_dpg; photo_z.clear(); return; } void MsckfVio::PhotometricFeatureJacobian( const FeatureIDType& feature_id, const std::vector& cam_state_ids, MatrixXd& H_x, VectorXd& r) { const auto& feature = map_server[feature_id]; // Check how many camera states in the provided camera // id camera has actually seen this feature. vector valid_cam_state_ids(0); for (const auto& cam_id : cam_state_ids) { if (feature.observations.find(cam_id) == feature.observations.end()) continue; valid_cam_state_ids.push_back(cam_id); } int jacobian_row_size = 0; jacobian_row_size = 4 * valid_cam_state_ids.size(); MatrixXd H_xj = MatrixXd::Zero(jacobian_row_size, 21+state_server.cam_states.size()*6); MatrixXd H_fj = MatrixXd::Zero(jacobian_row_size, 3); VectorXd r_j = VectorXd::Zero(jacobian_row_size); int stack_cntr = 0; // visu - residual printf("_____FEATURE:_____\n"); // visu - feature //cam0.featureVisu.release(); for (const auto& cam_id : valid_cam_state_ids) { Matrix H_xi = Matrix::Zero(); Matrix H_fi = Matrix::Zero(); Vector4d r_i = Vector4d::Zero(); PhotometricMeasurementJacobian(cam_id, feature.id, H_xi, H_fi, r_i); auto cam_state_iter = state_server.cam_states.find(cam_id); int cam_state_cntr = std::distance( state_server.cam_states.begin(), cam_state_iter); // Stack the Jacobians. H_xj.block<4, 6>(stack_cntr, 21+6*cam_state_cntr) = H_xi; H_fj.block<4, 3>(stack_cntr, 0) = H_fi; r_j.segment<4>(stack_cntr) = r_i; stack_cntr += 4; } // visu - feature /* if(!cam0.featureVisu.empty() && cam0.featureVisu.size().width > 3000) imshow("feature", cam0.featureVisu); cvWaitKey(1); if((ros::Time::now() - image_save_time).toSec() > 1) { std::stringstream ss; ss << "/home/raphael/dev/MSCKF_ws/img/feature_" << std::to_string(ros::Time::now().toSec()) << ".jpg"; imwrite(ss.str(), cam0.featureVisu); image_save_time = ros::Time::now(); } */ // Project the residual and Jacobians onto the nullspace // of H_fj. JacobiSVD svd_helper(H_fj, ComputeFullU | ComputeThinV); MatrixXd A = svd_helper.matrixU().rightCols( jacobian_row_size - 3); H_x = A.transpose() * H_xj; r = A.transpose() * r_j; return; } void MsckfVio::measurementJacobian( const StateIDType& cam_state_id, const FeatureIDType& feature_id, Matrix& H_x, Matrix& H_f, Vector4d& r) { // Prepare all the required data. const CAMState& cam_state = state_server.cam_states[cam_state_id]; const Feature& feature = map_server[feature_id]; // Cam0 pose. Matrix3d R_w_c0 = quaternionToRotation(cam_state.orientation); const Vector3d& t_c0_w = cam_state.position; // Cam1 pose. Matrix3d R_c0_c1 = CAMState::T_cam0_cam1.linear(); Matrix3d R_w_c1 = CAMState::T_cam0_cam1.linear() * R_w_c0; Vector3d t_c1_w = t_c0_w - R_w_c1.transpose()*CAMState::T_cam0_cam1.translation(); // 3d feature position in the world frame. // And its observation with the stereo cameras. const Vector3d& p_w = feature.position; const Vector4d& z = feature.observations.find(cam_state_id)->second; // Convert the feature position from the world frame to // the cam0 and cam1 frame. Vector3d p_c0 = R_w_c0 * (p_w-t_c0_w); Vector3d p_c1 = R_w_c1 * (p_w-t_c1_w); // Compute the Jacobians. Matrix dz_dpc0 = Matrix::Zero(); dz_dpc0(0, 0) = 1 / p_c0(2); dz_dpc0(1, 1) = 1 / p_c0(2); dz_dpc0(0, 2) = -p_c0(0) / (p_c0(2)*p_c0(2)); dz_dpc0(1, 2) = -p_c0(1) / (p_c0(2)*p_c0(2)); Matrix dz_dpc1 = Matrix::Zero(); dz_dpc1(2, 0) = 1 / p_c1(2); dz_dpc1(3, 1) = 1 / p_c1(2); dz_dpc1(2, 2) = -p_c1(0) / (p_c1(2)*p_c1(2)); dz_dpc1(3, 2) = -p_c1(1) / (p_c1(2)*p_c1(2)); Matrix dpc0_dxc = Matrix::Zero(); dpc0_dxc.leftCols(3) = skewSymmetric(p_c0); dpc0_dxc.rightCols(3) = -R_w_c0; Matrix dpc1_dxc = Matrix::Zero(); dpc1_dxc.leftCols(3) = R_c0_c1 * skewSymmetric(p_c0); dpc1_dxc.rightCols(3) = -R_w_c1; Matrix3d dpc0_dpg = R_w_c0; Matrix3d dpc1_dpg = R_w_c1; H_x = dz_dpc0*dpc0_dxc + dz_dpc1*dpc1_dxc; H_f = dz_dpc0*dpc0_dpg + dz_dpc1*dpc1_dpg; // Modifty the measurement Jacobian to ensure // observability constrain. Matrix A = H_x; Matrix u = Matrix::Zero(); u.block<3, 1>(0, 0) = quaternionToRotation( cam_state.orientation_null) * IMUState::gravity; u.block<3, 1>(3, 0) = skewSymmetric( p_w-cam_state.position_null) * IMUState::gravity; H_x = A - A*u*(u.transpose()*u).inverse()*u.transpose(); H_f = -H_x.block<4, 3>(0, 3); // Compute the residual. r = z - Vector4d(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2), p_c1(0)/p_c1(2), p_c1(1)/p_c1(2)); return; } void MsckfVio::featureJacobian( const FeatureIDType& feature_id, const std::vector& cam_state_ids, MatrixXd& H_x, VectorXd& r) { const auto& feature = map_server[feature_id]; // Check how many camera states in the provided camera // id camera has actually seen this feature. vector valid_cam_state_ids(0); for (const auto& cam_id : cam_state_ids) { if (feature.observations.find(cam_id) == feature.observations.end()) continue; valid_cam_state_ids.push_back(cam_id); } int jacobian_row_size = 0; jacobian_row_size = 4 * valid_cam_state_ids.size(); MatrixXd H_xj = MatrixXd::Zero(jacobian_row_size, 21+state_server.cam_states.size()*6); MatrixXd H_fj = MatrixXd::Zero(jacobian_row_size, 3); VectorXd r_j = VectorXd::Zero(jacobian_row_size); int stack_cntr = 0; for (const auto& cam_id : valid_cam_state_ids) { Matrix H_xi = Matrix::Zero(); Matrix H_fi = Matrix::Zero(); Vector4d r_i = Vector4d::Zero(); measurementJacobian(cam_id, feature.id, H_xi, H_fi, r_i); auto cam_state_iter = state_server.cam_states.find(cam_id); int cam_state_cntr = std::distance( state_server.cam_states.begin(), cam_state_iter); // Stack the Jacobians. H_xj.block<4, 6>(stack_cntr, 21+6*cam_state_cntr) = H_xi; H_fj.block<4, 3>(stack_cntr, 0) = H_fi; r_j.segment<4>(stack_cntr) = r_i; stack_cntr += 4; } // Project the residual and Jacobians onto the nullspace // of H_fj. JacobiSVD svd_helper(H_fj, ComputeFullU | ComputeThinV); MatrixXd A = svd_helper.matrixU().rightCols( jacobian_row_size - 3); H_x = A.transpose() * H_xj; r = A.transpose() * r_j; return; } void MsckfVio::measurementUpdate( const MatrixXd& H, const VectorXd& r) { if (H.rows() == 0 || r.rows() == 0) return; // Decompose the final Jacobian matrix to reduce computational // complexity as in Equation (28), (29). MatrixXd H_thin; VectorXd r_thin; if (H.rows() > H.cols()) { // Convert H to a sparse matrix. SparseMatrix H_sparse = H.sparseView(); // Perform QR decompostion on H_sparse. SPQR > spqr_helper; spqr_helper.setSPQROrdering(SPQR_ORDERING_NATURAL); spqr_helper.compute(H_sparse); MatrixXd H_temp; VectorXd r_temp; (spqr_helper.matrixQ().transpose() * H).evalTo(H_temp); (spqr_helper.matrixQ().transpose() * r).evalTo(r_temp); H_thin = H_temp.topRows(21+state_server.cam_states.size()*6); r_thin = r_temp.head(21+state_server.cam_states.size()*6); //HouseholderQR qr_helper(H); //MatrixXd Q = qr_helper.householderQ(); //MatrixXd Q1 = Q.leftCols(21+state_server.cam_states.size()*6); //H_thin = Q1.transpose() * H; //r_thin = Q1.transpose() * r; } else { H_thin = H; r_thin = r; } // Compute the Kalman gain. const MatrixXd& P = state_server.state_cov; MatrixXd S = H_thin*P*H_thin.transpose() + Feature::observation_noise*MatrixXd::Identity( H_thin.rows(), H_thin.rows()); //MatrixXd K_transpose = S.fullPivHouseholderQr().solve(H_thin*P); MatrixXd K_transpose = S.ldlt().solve(H_thin*P); MatrixXd K = K_transpose.transpose(); // Compute the error of the state. VectorXd delta_x = K * r_thin; // Update the IMU state. const VectorXd& delta_x_imu = delta_x.head<21>(); if (//delta_x_imu.segment<3>(0).norm() > 0.15 || //delta_x_imu.segment<3>(3).norm() > 0.15 || delta_x_imu.segment<3>(6).norm() > 0.5 || //delta_x_imu.segment<3>(9).norm() > 0.5 || delta_x_imu.segment<3>(12).norm() > 1.0) { printf("delta velocity: %f\n", delta_x_imu.segment<3>(6).norm()); printf("delta position: %f\n", delta_x_imu.segment<3>(12).norm()); ROS_WARN("Update change is too large."); //return; } const Vector4d dq_imu = smallAngleQuaternion(delta_x_imu.head<3>()); state_server.imu_state.orientation = quaternionMultiplication( dq_imu, state_server.imu_state.orientation); state_server.imu_state.gyro_bias += delta_x_imu.segment<3>(3); state_server.imu_state.velocity += delta_x_imu.segment<3>(6); state_server.imu_state.acc_bias += delta_x_imu.segment<3>(9); state_server.imu_state.position += delta_x_imu.segment<3>(12); const Vector4d dq_extrinsic = smallAngleQuaternion(delta_x_imu.segment<3>(15)); state_server.imu_state.R_imu_cam0 = quaternionToRotation( dq_extrinsic) * state_server.imu_state.R_imu_cam0; state_server.imu_state.t_cam0_imu += delta_x_imu.segment<3>(18); // Update the camera states. auto cam_state_iter = state_server.cam_states.begin(); for (int i = 0; i < state_server.cam_states.size(); ++i, ++cam_state_iter) { const VectorXd& delta_x_cam = delta_x.segment<6>(21+i*6); const Vector4d dq_cam = smallAngleQuaternion(delta_x_cam.head<3>()); cam_state_iter->second.orientation = quaternionMultiplication( dq_cam, cam_state_iter->second.orientation); cam_state_iter->second.position += delta_x_cam.tail<3>(); } // Update state covariance. MatrixXd I_KH = MatrixXd::Identity(K.rows(), H_thin.cols()) - K*H_thin; //state_server.state_cov = I_KH*state_server.state_cov*I_KH.transpose() + // K*K.transpose()*Feature::observation_noise; state_server.state_cov = I_KH*state_server.state_cov; // Fix the covariance to be symmetric MatrixXd state_cov_fixed = (state_server.state_cov + state_server.state_cov.transpose()) / 2.0; state_server.state_cov = state_cov_fixed; return; } bool MsckfVio::gatingTest( const MatrixXd& H, const VectorXd& r, const int& dof) { MatrixXd P1 = H * state_server.state_cov * H.transpose(); MatrixXd P2 = Feature::observation_noise * MatrixXd::Identity(H.rows(), H.rows()); double gamma = r.transpose() * (P1+P2).ldlt().solve(r); //cout << dof << " " << gamma << " " << // chi_squared_test_table[dof] << " "; if (gamma < chi_squared_test_table[dof]) { //cout << "passed" << endl; return true; } else { //cout << "failed" << endl; return false; } } void MsckfVio::removeLostFeatures() { // Remove the features that lost track. // BTW, find the size the final Jacobian matrix and residual vector. int jacobian_row_size = 0; vector invalid_feature_ids(0); vector processed_feature_ids(0); for (auto iter = map_server.begin(); iter != map_server.end(); ++iter) { // Rename the feature to be checked. auto& feature = iter->second; // Pass the features that are still being tracked. if (feature.observations.find(state_server.imu_state.id) != feature.observations.end()) continue; if (feature.observations.size() < 3) { invalid_feature_ids.push_back(feature.id); continue; } // Check if the feature can be initialized if it // has not been. if (!feature.is_initialized) { if (!feature.checkMotion(state_server.cam_states)) { invalid_feature_ids.push_back(feature.id); continue; } else { if(!feature.initializePosition(state_server.cam_states)) { invalid_feature_ids.push_back(feature.id); continue; } } } if(!feature.is_anchored) { if(!feature.initializeAnchor(cam0)) { invalid_feature_ids.push_back(feature.id); continue; } } jacobian_row_size += 4*feature.observations.size() - 3; processed_feature_ids.push_back(feature.id); } //cout << "invalid/processed feature #: " << // invalid_feature_ids.size() << "/" << // processed_feature_ids.size() << endl; //cout << "jacobian row #: " << jacobian_row_size << endl; // Remove the features that do not have enough measurements. for (const auto& feature_id : invalid_feature_ids) map_server.erase(feature_id); // Return if there is no lost feature to be processed. if (processed_feature_ids.size() == 0) return; MatrixXd H_x = MatrixXd::Zero(jacobian_row_size, 21+6*state_server.cam_states.size()); VectorXd r = VectorXd::Zero(jacobian_row_size); int stack_cntr = 0; // Process the features which lose track. for (const auto& feature_id : processed_feature_ids) { auto& feature = map_server[feature_id]; vector cam_state_ids(0); for (const auto& measurement : feature.observations) cam_state_ids.push_back(measurement.first); MatrixXd H_xj; VectorXd r_j; PhotometricFeatureJacobian(feature.id, cam_state_ids, H_xj, r_j); if (gatingTest(H_xj, r_j, cam_state_ids.size()-1)) { H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj; r.segment(stack_cntr, r_j.rows()) = r_j; stack_cntr += H_xj.rows(); } // Put an upper bound on the row size of measurement Jacobian, // which helps guarantee the executation time. if (stack_cntr > 1500) break; } H_x.conservativeResize(stack_cntr, H_x.cols()); r.conservativeResize(stack_cntr); // Perform the measurement update step. measurementUpdate(H_x, r); // Remove all processed features from the map. for (const auto& feature_id : processed_feature_ids) map_server.erase(feature_id); return; } void MsckfVio::findRedundantCamStates( vector& rm_cam_state_ids) { // Move the iterator to the key position. auto key_cam_state_iter = state_server.cam_states.end(); for (int i = 0; i < 4; ++i) --key_cam_state_iter; auto cam_state_iter = key_cam_state_iter; ++cam_state_iter; auto first_cam_state_iter = state_server.cam_states.begin(); // Pose of the key camera state. const Vector3d key_position = key_cam_state_iter->second.position; const Matrix3d key_rotation = quaternionToRotation( key_cam_state_iter->second.orientation); // Mark the camera states to be removed based on the // motion between states. for (int i = 0; i < 2; ++i) { const Vector3d position = cam_state_iter->second.position; const Matrix3d rotation = quaternionToRotation( cam_state_iter->second.orientation); double distance = (position-key_position).norm(); double angle = AngleAxisd( rotation*key_rotation.transpose()).angle(); //if (angle < 0.1745 && distance < 0.2 && tracking_rate > 0.5) { if (angle < 0.2618 && distance < 0.4 && tracking_rate > 0.5) { rm_cam_state_ids.push_back(cam_state_iter->first); ++cam_state_iter; } else { rm_cam_state_ids.push_back(first_cam_state_iter->first); ++first_cam_state_iter; } } // Sort the elements in the output vector. sort(rm_cam_state_ids.begin(), rm_cam_state_ids.end()); return; } void MsckfVio::pruneCamStateBuffer() { if (state_server.cam_states.size() < max_cam_state_size) return; // Find two camera states to be removed. vector rm_cam_state_ids(0); findRedundantCamStates(rm_cam_state_ids); // Find the size of the Jacobian matrix. int jacobian_row_size = 0; for (auto& item : map_server) { auto& feature = item.second; // Check how many camera states to be removed are associated // with this feature. vector involved_cam_state_ids(0); for (const auto& cam_id : rm_cam_state_ids) { if (feature.observations.find(cam_id) != feature.observations.end()) involved_cam_state_ids.push_back(cam_id); } if (involved_cam_state_ids.size() == 0) continue; if (involved_cam_state_ids.size() == 1) { feature.observations.erase(involved_cam_state_ids[0]); continue; } if (!feature.is_initialized) { // Check if the feature can be initialize. if (!feature.checkMotion(state_server.cam_states)) { // If the feature cannot be initialized, just remove // the observations associated with the camera states // to be removed. for (const auto& cam_id : involved_cam_state_ids) feature.observations.erase(cam_id); continue; } else { if(!feature.initializePosition(state_server.cam_states)) { for (const auto& cam_id : involved_cam_state_ids) feature.observations.erase(cam_id); continue; } } } if(!feature.is_anchored) { if(!feature.initializeAnchor(cam0)) { for (const auto& cam_id : involved_cam_state_ids) feature.observations.erase(cam_id); continue; } } jacobian_row_size += 4*involved_cam_state_ids.size() - 3; } //cout << "jacobian row #: " << jacobian_row_size << endl; // Compute the Jacobian and residual. MatrixXd H_x = MatrixXd::Zero(jacobian_row_size, 21+6*state_server.cam_states.size()); VectorXd r = VectorXd::Zero(jacobian_row_size); int stack_cntr = 0; for (auto& item : map_server) { auto& feature = item.second; // Check how many camera states to be removed are associated // with this feature. vector involved_cam_state_ids(0); for (const auto& cam_id : rm_cam_state_ids) { if (feature.observations.find(cam_id) != feature.observations.end()) involved_cam_state_ids.push_back(cam_id); } if (involved_cam_state_ids.size() == 0) continue; MatrixXd H_xj; VectorXd r_j; PhotometricFeatureJacobian(feature.id, involved_cam_state_ids, H_xj, r_j); if (gatingTest(H_xj, r_j, involved_cam_state_ids.size())) { H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj; r.segment(stack_cntr, r_j.rows()) = r_j; stack_cntr += H_xj.rows(); } for (const auto& cam_id : involved_cam_state_ids) feature.observations.erase(cam_id); } H_x.conservativeResize(stack_cntr, H_x.cols()); r.conservativeResize(stack_cntr); // Perform measurement update. measurementUpdate(H_x, r); for (const auto& cam_id : rm_cam_state_ids) { int cam_sequence = std::distance(state_server.cam_states.begin(), state_server.cam_states.find(cam_id)); int cam_state_start = 21 + 6*cam_sequence; int cam_state_end = cam_state_start + 6; // Remove the corresponding rows and columns in the state // covariance matrix. if (cam_state_end < state_server.state_cov.rows()) { state_server.state_cov.block(cam_state_start, 0, state_server.state_cov.rows()-cam_state_end, state_server.state_cov.cols()) = state_server.state_cov.block(cam_state_end, 0, state_server.state_cov.rows()-cam_state_end, state_server.state_cov.cols()); state_server.state_cov.block(0, cam_state_start, state_server.state_cov.rows(), state_server.state_cov.cols()-cam_state_end) = state_server.state_cov.block(0, cam_state_end, state_server.state_cov.rows(), state_server.state_cov.cols()-cam_state_end); state_server.state_cov.conservativeResize( state_server.state_cov.rows()-6, state_server.state_cov.cols()-6); } else { state_server.state_cov.conservativeResize( state_server.state_cov.rows()-6, state_server.state_cov.cols()-6); } // Remove this camera state in the state vector. state_server.cam_states.erase(cam_id); cam0.moving_window.erase(cam_id); cam1.moving_window.erase(cam_id); } return; } void MsckfVio::onlineReset() { // Never perform online reset if position std threshold // is non-positive. if (position_std_threshold <= 0) return; static long long int online_reset_counter = 0; // Check the uncertainty of positions to determine if // the system can be reset. double position_x_std = std::sqrt(state_server.state_cov(12, 12)); double position_y_std = std::sqrt(state_server.state_cov(13, 13)); double position_z_std = std::sqrt(state_server.state_cov(14, 14)); if (position_x_std < position_std_threshold && position_y_std < position_std_threshold && position_z_std < position_std_threshold) return; ROS_WARN("Start %lld online reset procedure...", ++online_reset_counter); ROS_INFO("Stardard deviation in xyz: %f, %f, %f", position_x_std, position_y_std, position_z_std); // Remove all existing camera states. state_server.cam_states.clear(); // Clear all exsiting features in the map. map_server.clear(); // Reset the state covariance. double gyro_bias_cov, acc_bias_cov, velocity_cov; nh.param("initial_covariance/velocity", velocity_cov, 0.25); nh.param("initial_covariance/gyro_bias", gyro_bias_cov, 1e-4); nh.param("initial_covariance/acc_bias", acc_bias_cov, 1e-2); double extrinsic_rotation_cov, extrinsic_translation_cov; nh.param("initial_covariance/extrinsic_rotation_cov", extrinsic_rotation_cov, 3.0462e-4); nh.param("initial_covariance/extrinsic_translation_cov", extrinsic_translation_cov, 1e-4); state_server.state_cov = MatrixXd::Zero(21, 21); for (int i = 3; i < 6; ++i) state_server.state_cov(i, i) = gyro_bias_cov; for (int i = 6; i < 9; ++i) state_server.state_cov(i, i) = velocity_cov; for (int i = 9; i < 12; ++i) state_server.state_cov(i, i) = acc_bias_cov; for (int i = 15; i < 18; ++i) state_server.state_cov(i, i) = extrinsic_rotation_cov; for (int i = 18; i < 21; ++i) state_server.state_cov(i, i) = extrinsic_translation_cov; ROS_WARN("%lld online reset complete...", online_reset_counter); return; } void MsckfVio::publish(const ros::Time& time) { // Convert the IMU frame to the body frame. const IMUState& imu_state = state_server.imu_state; Eigen::Isometry3d T_i_w = Eigen::Isometry3d::Identity(); T_i_w.linear() = quaternionToRotation( imu_state.orientation).transpose(); T_i_w.translation() = imu_state.position; Eigen::Isometry3d T_b_w = IMUState::T_imu_body * T_i_w * IMUState::T_imu_body.inverse(); Eigen::Vector3d body_velocity = IMUState::T_imu_body.linear() * imu_state.velocity; // Publish tf if (publish_tf) { tf::Transform T_b_w_tf; tf::transformEigenToTF(T_b_w, T_b_w_tf); tf_pub.sendTransform(tf::StampedTransform( T_b_w_tf, time, fixed_frame_id, child_frame_id)); } // Publish the odometry nav_msgs::Odometry odom_msg; odom_msg.header.stamp = time; odom_msg.header.frame_id = fixed_frame_id; odom_msg.child_frame_id = child_frame_id; tf::poseEigenToMsg(T_b_w, odom_msg.pose.pose); tf::vectorEigenToMsg(body_velocity, odom_msg.twist.twist.linear); // Convert the covariance. Matrix3d P_oo = state_server.state_cov.block<3, 3>(0, 0); Matrix3d P_op = state_server.state_cov.block<3, 3>(0, 12); Matrix3d P_po = state_server.state_cov.block<3, 3>(12, 0); Matrix3d P_pp = state_server.state_cov.block<3, 3>(12, 12); Matrix P_imu_pose = Matrix::Zero(); P_imu_pose << P_pp, P_po, P_op, P_oo; Matrix H_pose = Matrix::Zero(); H_pose.block<3, 3>(0, 0) = IMUState::T_imu_body.linear(); H_pose.block<3, 3>(3, 3) = IMUState::T_imu_body.linear(); Matrix P_body_pose = H_pose * P_imu_pose * H_pose.transpose(); for (int i = 0; i < 6; ++i) for (int j = 0; j < 6; ++j) odom_msg.pose.covariance[6*i+j] = P_body_pose(i, j); // Construct the covariance for the velocity. Matrix3d P_imu_vel = state_server.state_cov.block<3, 3>(6, 6); Matrix3d H_vel = IMUState::T_imu_body.linear(); Matrix3d P_body_vel = H_vel * P_imu_vel * H_vel.transpose(); for (int i = 0; i < 3; ++i) for (int j = 0; j < 3; ++j) odom_msg.twist.covariance[i*6+j] = P_body_vel(i, j); odom_pub.publish(odom_msg); // Publish the 3D positions of the features that // has been initialized. pcl::PointCloud::Ptr feature_msg_ptr( new pcl::PointCloud()); feature_msg_ptr->header.frame_id = fixed_frame_id; feature_msg_ptr->height = 1; for (const auto& item : map_server) { const auto& feature = item.second; if (feature.is_initialized) { Vector3d feature_position = IMUState::T_imu_body.linear() * feature.position; feature_msg_ptr->points.push_back(pcl::PointXYZ( feature_position(0), feature_position(1), feature_position(2))); } } feature_msg_ptr->width = feature_msg_ptr->points.size(); feature_pub.publish(feature_msg_ptr); return; } } // namespace msckf_vio