minor changes to visualization, jakobi tests

This commit is contained in:
Raphael Maenle 2019-05-03 16:42:27 +02:00
parent ee40dff740
commit 53b26f7613
4 changed files with 172 additions and 17 deletions

View File

@ -172,7 +172,8 @@ struct Feature {
const CAMState& cam_state, const CAMState& cam_state,
const StateIDType& cam_state_id, const StateIDType& cam_state_id,
CameraCalibration& cam0, CameraCalibration& cam0,
const std::vector<double> photo_r) const; const std::vector<double> photo_r,
std::stringstream& ss) const;
/* /*
* @brief projectPixelToPosition uses the calcualted pixels * @brief projectPixelToPosition uses the calcualted pixels
* of the anchor patch to generate 3D positions of all of em * of the anchor patch to generate 3D positions of all of em
@ -410,7 +411,8 @@ bool Feature::VisualizePatch(
const CAMState& cam_state, const CAMState& cam_state,
const StateIDType& cam_state_id, const StateIDType& cam_state_id,
CameraCalibration& cam0, CameraCalibration& cam0,
const std::vector<double> photo_r) const const std::vector<double> photo_r,
std::stringstream& ss) const
{ {
double rescale = 1; double rescale = 1;
@ -421,14 +423,15 @@ bool Feature::VisualizePatch(
cv::Mat dottedFrame(anchorImage.size(), CV_8UC3); cv::Mat dottedFrame(anchorImage.size(), CV_8UC3);
cv::cvtColor(anchorImage, dottedFrame, CV_GRAY2RGB); cv::cvtColor(anchorImage, dottedFrame, CV_GRAY2RGB);
// visualize the true anchor points (the surrounding of the original measurements)
for(auto point : anchorPatch_real) for(auto point : anchorPatch_real)
{ {
// visu - feature // visu - feature
cv::Point xs(point.x, point.y); cv::Point xs(point.x, point.y);
cv::Point ys(point.x, point.y); cv::Point ys(point.x, point.y);
cv::rectangle(dottedFrame, xs, ys, cv::Scalar(0,255,0)); cv::rectangle(dottedFrame, xs, ys, cv::Scalar(0,255,255));
} }
cam0.featureVisu = dottedFrame.clone(); cam0.featureVisu = dottedFrame.clone();
// visu - feature // visu - feature
cv::Mat current_image = cam0.moving_window.find(cam_state_id)->second.image; cv::Mat current_image = cam0.moving_window.find(cam_state_id)->second.image;
@ -487,23 +490,52 @@ bool Feature::VisualizePatch(
cv::Scalar(255, 255 + photo_r[i*N+j]*255, 255 + photo_r[i*N+j]*255), cv::Scalar(255, 255 + photo_r[i*N+j]*255, 255 + photo_r[i*N+j]*255),
CV_FILLED); CV_FILLED);
// visualize position of used observations and resulting feature position
cv::Mat positionFrame(anchorImage.size(), CV_8UC3, cv::Scalar(255, 240, 255));
cv::resize(positionFrame, positionFrame, cv::Size(), rescale, rescale);
// draw world zero
cv::line(positionFrame,
cv::Point(20,20),
cv::Point(20,30),
cv::Scalar(0,0,255),
CV_FILLED);
cv::line(positionFrame,
cv::Point(20,20),
cv::Point(30,20),
cv::Scalar(255,0,0),
CV_FILLED);
// for every observation, get cam state
for(auto obs : observations)
{
cam_state.find(obs->first);
cv::line(positionFrame,
cv::Point(20,20),
cv::Point(30,20),
cv::Scalar(255,0,0),
CV_FILLED);
}
// draw, x y position and arrow with direction - write z next to it
cv::resize(cam0.featureVisu, cam0.featureVisu, cv::Size(), rescale, rescale); cv::resize(cam0.featureVisu, cam0.featureVisu, cv::Size(), rescale, rescale);
cv::hconcat(cam0.featureVisu, irradianceFrame, cam0.featureVisu); cv::hconcat(cam0.featureVisu, irradianceFrame, cam0.featureVisu);
cv::hconcat(cam0.featureVisu, positionFrame, cam0.featureVisu);
// write feature position // write feature position
std::stringstream pos_s; std::stringstream pos_s;
pos_s << "u: " << observations.begin()->second(0) << " v: " << observations.begin()->second(1); pos_s << "u: " << observations.begin()->second(0) << " v: " << observations.begin()->second(1);
cv::putText(cam0.featureVisu, pos_s.str() , cvPoint(anchorImage.rows + 100, anchorImage.cols - 30), cv::putText(cam0.featureVisu, ss.str() , cvPoint(anchorImage.rows + 100, anchorImage.cols - 30),
cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(200,200,250), 1, CV_AA); cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(200,200,250), 1, CV_AA);
// create line? // create line?
//save image //save image
std::stringstream ss; std::stringstream loc;
ss << "/home/raphael/dev/MSCKF_ws/img/feature_" << std::to_string(ros::Time::now().toSec()) << ".jpg"; loc << "/home/raphael/dev/MSCKF_ws/img/feature_" << std::to_string(ros::Time::now().toSec()) << ".jpg";
cv::imwrite(ss.str(), cam0.featureVisu); cv::imwrite(loc.str(), cam0.featureVisu);
//cv::imshow("patch", cam0.featureVisu); //cv::imshow("patch", cam0.featureVisu);
//cvWaitKey(1); //cvWaitKey(1);

View File

@ -107,6 +107,15 @@ class MsckfVio {
*/ */
void imuCallback(const sensor_msgs::ImuConstPtr& msg); void imuCallback(const sensor_msgs::ImuConstPtr& msg);
/*
* @brief truthCallback
* Callback function for ground truth navigation information
* @param TransformStamped msg
*/
void truthCallback(
const geometry_msgs::TransformStampedPtr& msg);
/* /*
* @brief imageCallback * @brief imageCallback
* Callback function for feature measurements * Callback function for feature measurements
@ -144,11 +153,23 @@ class MsckfVio {
bool resetCallback(std_srvs::Trigger::Request& req, bool resetCallback(std_srvs::Trigger::Request& req,
std_srvs::Trigger::Response& res); std_srvs::Trigger::Response& res);
// Saves the exposure time and the camera measurementes
void manageMovingWindow( void manageMovingWindow(
const sensor_msgs::ImageConstPtr& cam0_img, const sensor_msgs::ImageConstPtr& cam0_img,
const sensor_msgs::ImageConstPtr& cam1_img, const sensor_msgs::ImageConstPtr& cam1_img,
const CameraMeasurementConstPtr& feature_msg); const CameraMeasurementConstPtr& feature_msg);
// Debug related Functions
// Propagate the true state
void batchTruthProcessing(
const double& time_bound);
void processTruthtoIMU(const double& time,
const Eigen::Vector4d& m_rot,
const Eigen::Vector3d& m_trans);
// Filter related functions // Filter related functions
// Propogate the state // Propogate the state
void batchImuProcessing( void batchImuProcessing(
@ -202,12 +223,16 @@ class MsckfVio {
void onlineReset(); void onlineReset();
// Photometry flag // Photometry flag
// visualization flag
bool PHOTOMETRIC; bool PHOTOMETRIC;
// debug flag
bool PRINTIMAGES; bool PRINTIMAGES;
bool GROUNDTRUTH;
bool nan_flag; bool nan_flag;
double last_time_bound;
// Patch size for Photometry // Patch size for Photometry
int N; int N;
@ -227,6 +252,8 @@ class MsckfVio {
// transfer delay between IMU and Image messages. // transfer delay between IMU and Image messages.
std::vector<sensor_msgs::Imu> imu_msg_buffer; std::vector<sensor_msgs::Imu> imu_msg_buffer;
// Ground Truth message data
std::vector<geometry_msgs::TransformStamped> truth_msg_buffer;
// Moving Window buffer // Moving Window buffer
movingWindow cam0_moving_window; movingWindow cam0_moving_window;
movingWindow cam1_moving_window; movingWindow cam1_moving_window;
@ -268,6 +295,7 @@ class MsckfVio {
// Subscribers and publishers // Subscribers and publishers
ros::Subscriber imu_sub; ros::Subscriber imu_sub;
ros::Subscriber truth_sub;
ros::Publisher odom_pub; ros::Publisher odom_pub;
ros::Publisher feature_pub; ros::Publisher feature_pub;
tf::TransformBroadcaster tf_pub; tf::TransformBroadcaster tf_pub;

View File

@ -19,7 +19,10 @@
<!-- Photometry Flag--> <!-- Photometry Flag-->
<param name="PHOTOMETRIC" value="true"/> <param name="PHOTOMETRIC" value="true"/>
<!-- Debugging Flaggs -->
<param name="PrintImages" value="true"/> <param name="PrintImages" value="true"/>
<param name="GroundTruth" value="false"/>
<param name="patch_size_n" value="7"/> <param name="patch_size_n" value="7"/>
<!-- Calibration parameters --> <!-- Calibration parameters -->
@ -59,6 +62,7 @@
<param name="initial_covariance/irradiance_frame_bias" value="0.1"/> <param name="initial_covariance/irradiance_frame_bias" value="0.1"/>
<remap from="~imu" to="/imu0"/> <remap from="~imu" to="/imu0"/>
<remap from="~ground_truth" to="/vrpn_client/raw_transform"/>
<remap from="~cam0_image" to="/cam0/image_raw"/> <remap from="~cam0_image" to="/cam0/image_raw"/>
<remap from="~cam1_image" to="/cam1/image_raw"/> <remap from="~cam1_image" to="/cam1/image_raw"/>

View File

@ -56,6 +56,7 @@ MsckfVio::MsckfVio(ros::NodeHandle& pnh):
is_first_img(true), is_first_img(true),
image_sub(10), image_sub(10),
nan_flag(false), nan_flag(false),
last_time_bound(0),
nh(pnh) { nh(pnh) {
return; return;
} }
@ -64,6 +65,7 @@ bool MsckfVio::loadParameters() {
//Photometry Flag //Photometry Flag
nh.param<bool>("PHOTOMETRIC", PHOTOMETRIC, false); nh.param<bool>("PHOTOMETRIC", PHOTOMETRIC, false);
nh.param<bool>("PrintImages", PRINTIMAGES, false); nh.param<bool>("PrintImages", PRINTIMAGES, false);
nh.param<bool>("GroundTruth", GROUNDTRUTH, false);
// Frame id // Frame id
nh.param<string>("fixed_frame_id", fixed_frame_id, "world"); nh.param<string>("fixed_frame_id", fixed_frame_id, "world");
@ -255,6 +257,8 @@ bool MsckfVio::createRosIO() {
imu_sub = nh.subscribe("imu", 100, imu_sub = nh.subscribe("imu", 100,
&MsckfVio::imuCallback, this); &MsckfVio::imuCallback, this);
truth_sub = nh.subscribe("ground_truth", 100,
&MsckfVio::truthCallback, this);
cam0_img_sub.subscribe(nh, "cam0_image", 10); cam0_img_sub.subscribe(nh, "cam0_image", 10);
cam1_img_sub.subscribe(nh, "cam1_image", 10); cam1_img_sub.subscribe(nh, "cam1_image", 10);
@ -263,7 +267,6 @@ bool MsckfVio::createRosIO() {
image_sub.connectInput(cam0_img_sub, cam1_img_sub, feature_sub); image_sub.connectInput(cam0_img_sub, cam1_img_sub, feature_sub);
image_sub.registerCallback(&MsckfVio::imageCallback, this); image_sub.registerCallback(&MsckfVio::imageCallback, this);
mocap_odom_sub = nh.subscribe("mocap_odom", 10, mocap_odom_sub = nh.subscribe("mocap_odom", 10,
&MsckfVio::mocapOdomCallback, this); &MsckfVio::mocapOdomCallback, this);
mocap_odom_pub = nh.advertise<nav_msgs::Odometry>("gt_odom", 1); mocap_odom_pub = nh.advertise<nav_msgs::Odometry>("gt_odom", 1);
@ -292,7 +295,7 @@ bool MsckfVio::initialize() {
for (int i = 1; i < 100; ++i) { for (int i = 1; i < 100; ++i) {
boost::math::chi_squared chi_squared_dist(i); boost::math::chi_squared chi_squared_dist(i);
chi_squared_test_table[i] = chi_squared_test_table[i] =
boost::math::quantile(chi_squared_dist, 0.05); boost::math::quantile(chi_squared_dist, 0.2);
} }
if (!createRosIO()) return false; if (!createRosIO()) return false;
@ -319,6 +322,10 @@ void MsckfVio::imuCallback(const sensor_msgs::ImuConstPtr& msg){
return; return;
} }
void MsckfVio::truthCallback(const geometry_msgs::TransformStampedPtr& msg){
truth_msg_buffer.push_back(*msg);
}
void MsckfVio::imageCallback( void MsckfVio::imageCallback(
const sensor_msgs::ImageConstPtr& cam0_img, const sensor_msgs::ImageConstPtr& cam0_img,
const sensor_msgs::ImageConstPtr& cam1_img, const sensor_msgs::ImageConstPtr& cam1_img,
@ -342,7 +349,10 @@ void MsckfVio::imageCallback(
// Propogate the IMU state. // Propogate the IMU state.
// that are received before the image feature_msg. // that are received before the image feature_msg.
ros::Time start_time = ros::Time::now(); ros::Time start_time = ros::Time::now();
batchImuProcessing(feature_msg->header.stamp.toSec()); if(!GROUNDTRUTH)
batchImuProcessing(feature_msg->header.stamp.toSec());
else
batchTruthProcessing(feature_msg->header.stamp.toSec());
double imu_processing_time = ( double imu_processing_time = (
ros::Time::now()-start_time).toSec(); ros::Time::now()-start_time).toSec();
@ -558,6 +568,9 @@ bool MsckfVio::resetCallback(
imu_sub = nh.subscribe("imu", 100, imu_sub = nh.subscribe("imu", 100,
&MsckfVio::imuCallback, this); &MsckfVio::imuCallback, this);
truth_sub = nh.subscribe("ground_truth", 100,
&MsckfVio::truthCallback, this);
// feature_sub = nh.subscribe("features", 40, // feature_sub = nh.subscribe("features", 40,
// &MsckfVio::featureCallback, this); // &MsckfVio::featureCallback, this);
@ -632,6 +645,71 @@ void MsckfVio::mocapOdomCallback(const nav_msgs::OdometryConstPtr& msg) {
return; return;
} }
void MsckfVio::batchTruthProcessing(const double& time_bound) {
// Counter how many IMU msgs in the buffer are used.
int used_truth_msg_cntr = 0;
for (const auto& truth_msg : truth_msg_buffer) {
double truth_time = truth_msg.header.stamp.toSec();
if (truth_time < state_server.imu_state.time) {
++used_truth_msg_cntr;
continue;
}
if (truth_time > time_bound) break;
// Convert the msgs.
Eigen::Vector3d m_translation;
Eigen::Vector4d m_rotation;
tf::vectorMsgToEigen(truth_msg.transform.translation, m_translation);
m_rotation[0] = truth_msg.transform.rotation.x;
m_rotation[1] = truth_msg.transform.rotation.y;
m_rotation[2] = truth_msg.transform.rotation.z;
m_rotation[3] = truth_msg.transform.rotation.w;
// Execute process model.
processTruthtoIMU(truth_time, m_rotation, m_translation);
++used_truth_msg_cntr;
}
last_time_bound = time_bound;
// Set the state ID for the new IMU state.
state_server.imu_state.id = IMUState::next_id++;
// Remove all used Truth msgs.
truth_msg_buffer.erase(truth_msg_buffer.begin(),
truth_msg_buffer.begin()+used_truth_msg_cntr);
}
void MsckfVio::processTruthtoIMU(const double& time,
const Vector4d& m_rot,
const Vector3d& m_trans){
IMUState& imu_state = state_server.imu_state;
double dtime = time - imu_state.time;
Vector4d& q = imu_state.orientation;
Vector3d& v = imu_state.velocity;
Vector3d& p = imu_state.position;
double dt = time - imu_state.time;
v = (m_trans-v)/dt;
p = m_trans;
q = m_rot;
// Update the state correspondes to null space.
imu_state.orientation_null = imu_state.orientation;
imu_state.position_null = imu_state.position;
imu_state.velocity_null = imu_state.velocity;
// Update the state info
state_server.imu_state.time = time;
}
void MsckfVio::batchImuProcessing(const double& time_bound) { void MsckfVio::batchImuProcessing(const double& time_bound) {
// Counter how many IMU msgs in the buffer are used. // Counter how many IMU msgs in the buffer are used.
int used_imu_msg_cntr = 0; int used_imu_msg_cntr = 0;
@ -1039,17 +1117,22 @@ void MsckfVio::PhotometricMeasurementJacobian(
dh_dCpij.block<2, 2>(0, 0) = Eigen::Matrix<double, 2, 2>::Identity(); dh_dCpij.block<2, 2>(0, 0) = Eigen::Matrix<double, 2, 2>::Identity();
dh_dCpij(0, 2) = -(p_c0(0))/(p_c0(2)*p_c0(2)); dh_dCpij(0, 2) = -(p_c0(0))/(p_c0(2)*p_c0(2));
dh_dCpij(1, 2) = -(p_c0(1))/(p_c0(2)*p_c0(2)); dh_dCpij(1, 2) = -(p_c0(1))/(p_c0(2)*p_c0(2));
//orientation takes camera frame to world frame, we wa
dh_dGpij = dh_dCpij * quaternionToRotation(cam_state.orientation).transpose(); dh_dGpij = dh_dCpij * quaternionToRotation(cam_state.orientation).transpose();
//dh / d X_{pl} //dh / d X_{pl}
dh_dXplj.block<2, 3>(0, 0) = dh_dCpij * skewSymmetric(point); dh_dXplj.block<2, 3>(0, 0) = dh_dCpij * skewSymmetric(point);
dh_dXplj.block<2, 3>(0, 3) = dh_dCpij * -quaternionToRotation(cam_state.orientation);//.transpose(); dh_dXplj.block<2, 3>(0, 3) = dh_dCpij * -quaternionToRotation(cam_state.orientation).transpose();
//d{}^Gp_P{ij} / \rho_i //d{}^Gp_P{ij} / \rho_i
double rho = feature.anchor_rho; double rho = feature.anchor_rho;
dGpi_drhoj = feature.T_anchor_w.linear() * Eigen::Vector3d(-feature.anchorPatch_ideal[count].x/(rho*rho), -feature.anchorPatch_ideal[count].y/(rho*rho), -1/(rho*rho)); dGpi_drhoj = feature.T_anchor_w.linear() * Eigen::Vector3d(-feature.anchorPatch_ideal[count].x/(rho*rho), -feature.anchorPatch_ideal[count].y/(rho*rho), -1/(rho*rho));
dGpi_XpAj.block<3, 3>(0, 0) = skewSymmetric(Eigen::Vector3d(feature.anchorPatch_ideal[count].x/(rho), feature.anchorPatch_ideal[count].y/(rho), 1/(rho))); dGpi_XpAj.block<3, 3>(0, 0) = - skewSymmetric(feature.T_anchor_w.linear()
* Eigen::Vector3d(feature.anchorPatch_ideal[count].x/(rho),
feature.anchorPatch_ideal[count].y/(rho),
1/(rho)));
dGpi_XpAj.block<3, 3>(0, 3) = Matrix<double, 3, 3>::Identity(); dGpi_XpAj.block<3, 3>(0, 3) = Matrix<double, 3, 3>::Identity();
// Intermediate Jakobians // Intermediate Jakobians
@ -1091,6 +1174,7 @@ void MsckfVio::PhotometricMeasurementJacobian(
// set anchor Jakobi // set anchor Jakobi
// get position of anchor in cam states // get position of anchor in cam states
auto cam_state_anchor = state_server.cam_states.find(feature.observations.begin()->first); auto cam_state_anchor = state_server.cam_states.find(feature.observations.begin()->first);
int cam_state_cntr_anchor = std::distance(state_server.cam_states.begin(), cam_state_anchor); int cam_state_cntr_anchor = std::distance(state_server.cam_states.begin(), cam_state_anchor);
H_xl.block(0, 21+cam_state_cntr_anchor*7, N*N, 6) = -H_pA; H_xl.block(0, 21+cam_state_cntr_anchor*7, N*N, 6) = -H_pA;
@ -1122,8 +1206,10 @@ void MsckfVio::PhotometricMeasurementJacobian(
for(auto data : photo_r) for(auto data : photo_r)
r[count++] = data; r[count++] = data;
std::stringstream ss;
ss << "INFO:" << " anchor: " << cam_state_cntr_anchor << " frame: " << cam_state_cntr;
if(PRINTIMAGES) if(PRINTIMAGES)
feature.VisualizePatch(cam_state, cam_state_id, cam0, photo_r); feature.VisualizePatch(cam_state, cam_state_id, cam0, photo_r, ss);
return; return;
} }
@ -1238,7 +1324,12 @@ void MsckfVio::measurementJacobian(
dz_dpc1(3, 2) = -p_c1(1) / (p_c1(2)*p_c1(2)); dz_dpc1(3, 2) = -p_c1(1) / (p_c1(2)*p_c1(2));
Matrix<double, 3, 6> dpc0_dxc = Matrix<double, 3, 6>::Zero(); Matrix<double, 3, 6> dpc0_dxc = Matrix<double, 3, 6>::Zero();
dpc0_dxc.leftCols(3) = skewSymmetric(p_c0);
// original jacobi
//dpc0_dxc.leftCols(3) = skewSymmetric(p_c0);
// my version of calculation
dpc0_dxc.leftCols(3) = skewSymmetric(R_w_c0 * p_w) - skewSymmetric(R_w_c0 * t_c0_w);
//dpc0_dxc.leftCols(3) = - skewSymmetric(R_w_c0.transpose() * (t_c0_w - p_w)) * R_w_c0;
dpc0_dxc.rightCols(3) = -R_w_c0; dpc0_dxc.rightCols(3) = -R_w_c0;
Matrix<double, 3, 6> dpc1_dxc = Matrix<double, 3, 6>::Zero(); Matrix<double, 3, 6> dpc1_dxc = Matrix<double, 3, 6>::Zero();