added jakobi x calculation, y calculation (of photometric update) still missing

This commit is contained in:
Raphael Maenle 2019-04-23 19:16:46 +02:00
parent 8defb20c8e
commit 5958adb57c
3 changed files with 109 additions and 91 deletions

View File

@ -167,11 +167,10 @@ struct Feature {
CameraCalibration& cam0,
std::vector<float>& anchorPatch_estimate) const;
bool FrameIrradiance(
bool VisualizePatch(
const CAMState& cam_state,
const StateIDType& cam_state_id,
CameraCalibration& cam0,
std::vector<float>& anchorPatch_measurement) const;
CameraCalibration& cam0) const;
/*
* @brief projectPixelToPosition uses the calcualted pixels
@ -396,48 +395,41 @@ bool Feature::estimate_FrameIrradiance(
}
bool Feature::FrameIrradiance(
bool Feature::VisualizePatch(
const CAMState& cam_state,
const StateIDType& cam_state_id,
CameraCalibration& cam0,
std::vector<float>& anchorPatch_measurement) const
CameraCalibration& cam0) const
{
// visu - feature
/*cv::Mat current_image = cam0.moving_window.find(cam_state_id)->second.image;
cv::Mat current_image = cam0.moving_window.find(cam_state_id)->second.image;
cv::Mat dottedFrame(current_image.size(), CV_8UC3);
cv::cvtColor(current_image, dottedFrame, CV_GRAY2RGB);
*/
// project every point in anchorPatch_3d.
auto frame = cam0.moving_window.find(cam_state_id)->second.image;
for(auto point : anchorPatch_3d)
{
cv::Point2f p_in_c0 = projectPositionToCamera(cam_state, cam_state_id, cam0, point);
// visu - feature
/*cv::Point xs(p_in_c0.x, p_in_c0.y);
cv::Point xs(p_in_c0.x, p_in_c0.y);
cv::Point ys(p_in_c0.x, p_in_c0.y);
cv::rectangle(dottedFrame, xs, ys, cv::Scalar(0,255,0));
*/
float irradiance = PixelIrradiance(p_in_c0, cam0.moving_window.find(cam_state_id)->second.image);
anchorPatch_measurement.push_back(irradiance);
}
// testing
//if(cam_state_id == observations.begin()->first)
//if(count++ == 4)
//printf("dist:\n \tpos: %f, %f\n\ttrue pos: %f, %f\n\n", p_in_c0.x, p_in_c0.y, anchor_center_pos.x, anchor_center_pos.y);
}
// visu - feature
//cv::resize(dottedFrame, dottedFrame, cv::Size(dottedFrame.cols*0.2, dottedFrame.rows*0.2));
/*if(cam0.featureVisu.empty())
cv::resize(dottedFrame, dottedFrame, cv::Size(dottedFrame.cols*0.2, dottedFrame.rows*0.2));
if(cam0.featureVisu.empty())
cam0.featureVisu = dottedFrame.clone();
else
cv::hconcat(cam0.featureVisu, dottedFrame, cam0.featureVisu);
*/
}
float Feature::PixelIrradiance(cv::Point2f pose, cv::Mat image) const

View File

@ -190,7 +190,7 @@ bool MsckfVio::loadParameters() {
// Maximum number of camera states to be stored
nh.param<int>("max_cam_state_size", max_cam_state_size, 30);
//cam_state_size = max_cam_state_size;
ROS_INFO("===========================================");
ROS_INFO("fixed frame id: %s", fixed_frame_id.c_str());
ROS_INFO("child frame id: %s", child_frame_id.c_str());
@ -684,25 +684,6 @@ void MsckfVio::processModel(const double& time,
// Propogate the state using 4th order Runge-Kutta
predictNewState(dtime, gyro, acc);
// Modify the transition matrix
Matrix3d R_kk_1 = quaternionToRotation(imu_state.orientation_null);
Phi.block<3, 3>(0, 0) =
quaternionToRotation(imu_state.orientation) * R_kk_1.transpose();
Vector3d u = R_kk_1 * IMUState::gravity;
RowVector3d s = (u.transpose()*u).inverse() * u.transpose();
Matrix3d A1 = Phi.block<3, 3>(6, 0);
Vector3d w1 = skewSymmetric(
imu_state.velocity_null-imu_state.velocity) * IMUState::gravity;
Phi.block<3, 3>(6, 0) = A1 - (A1*u-w1)*s;
Matrix3d A2 = Phi.block<3, 3>(12, 0);
Vector3d w2 = skewSymmetric(
dtime*imu_state.velocity_null+imu_state.position_null-
imu_state.position) * IMUState::gravity;
Phi.block<3, 3>(12, 0) = A2 - (A2*u-w2)*s;
// Propogate the state covariance matrix.
Matrix<double, 21, 21> Q = Phi*G*state_server.continuous_noise_cov*
G.transpose()*Phi.transpose()*dtime;
@ -908,6 +889,9 @@ void MsckfVio::PhotometricMeasurementJacobian(
Matrix3d R_w_c0 = quaternionToRotation(cam_state.orientation);
const Vector3d& t_c0_w = cam_state.position;
//temp N
const int N = 3;
// Cam1 pose.
Matrix3d R_c0_c1 = CAMState::T_cam0_cam1.linear();
Matrix3d R_w_c1 = CAMState::T_cam0_cam1.linear() * R_w_c0;
@ -922,57 +906,99 @@ void MsckfVio::PhotometricMeasurementJacobian(
//photometric observation
std::vector<float> photo_z;
feature.FrameIrradiance(cam_state, cam_state_id, cam0, photo_z);
// individual Jacobians
Matrix<double, 1, 2> dI_dhj = Matrix<double, 1, 2>::Zero();
Matrix<double, 2, 3> dh_dCpij = Matrix<double, 2, 3>::Zero();
Matrix<double, 2, 3> dh_dGpij = Matrix<double, 2, 3>::Zero();
Matrix<double, 2, 6> dh_dXplj = Matrix<double, 2, 6>::Zero();
Matrix<double, 3, 1> dGpi_drhoj = Matrix<double, 3, 1>::Zero();
Matrix<double, 3, 6> dGpi_XpAj = Matrix<double, 3, 6>::Zero();
// one line of the NxN Jacobians
Eigen::Matrix<double, 1, 1> H_rhoj;
Eigen::Matrix<double, 1, 6> H_plj;
Eigen::Matrix<double, 1, 6> H_pAj;
// combined Jacobians
Eigen::MatrixXd H_rho(N*N, 3);
Eigen::MatrixXd H_pl(N*N, 6);
Eigen::MatrixXd H_pA(N*N, 6);
auto frame = cam0.moving_window.find(cam_state_id)->second.image;
int count = 0;
float dx, dy;
for (auto point : feature.anchorPatch_3d)
{
cv::Point2f p_in_c0 = feature.projectPositionToCamera(cam_state, cam_state_id, cam0, point);
//add observation
photo_z.push_back(feature.PixelIrradiance(p_in_c0, frame));
//add jacobian
// frame derivative calculated convoluting with kernel [-1, 0, 1]
dx = feature.PixelIrradiance(cv::Point2f(p_in_c0.x+1, p_in_c0.y), frame)- feature.PixelIrradiance(cv::Point2f(p_in_c0.x-1, p_in_c0.y), frame);
dy = feature.PixelIrradiance(cv::Point2f(p_in_c0.x, p_in_c0.y+1), frame)- feature.PixelIrradiance(cv::Point2f(p_in_c0.x, p_in_c0.y-1), frame);
dI_dhj(0, 0) = dx;
dI_dhj(1, 0) = dy;
//dh / d{}^Cp_{ij}
dh_dCpij.block<2, 2>(0, 0) = Eigen::Matrix<double, 2, 2>::Identity();
dh_dCpij(0, 2) = -(point(0))/(point(2)*point(2));
dh_dCpij(1, 2) = -(point(1))/(point(2)*point(2));
dh_dGpij = dh_dCpij * quaternionToRotation(cam_state.orientation).transpose();
//dh / d X_{pl}
dh_dXplj.block<2, 3>(3, 0) = dh_dCpij * skewSymmetric(point);
dh_dXplj.block<2, 3>(3, 3) = dh_dCpij * -quaternionToRotation(cam_state.orientation).transpose();
//d{}^Gp_P{ij} / \rho_i
double rho = feature.anchor_rho;
dGpi_drhoj = feature.T_anchor_w.linear() * Eigen::Vector3d(p_in_c0.x/(rho*rho), p_in_c0.y/(rho*rho), 1/(rho*rho));
dGpi_XpAj.block<3, 3>(3, 0) = skewSymmetric(Eigen::Vector3d(p_in_c0.x/(rho), p_in_c0.y/(rho), 1/(rho)));
dGpi_XpAj.block<3, 3>(3, 3) = Matrix<double, 3, 3>::Identity();
// Intermediate Jakobians
H_rhoj = dI_dhj * dh_dGpij * dGpi_drhoj; // 1 x 3
H_plj = dI_dhj * dh_dXplj; // 1 x 6
H_pAj = dI_dhj * dh_dGpij * dGpi_XpAj; // 1 x 6
H_rho.block<1, 1>(count, 0) = H_rhoj;
H_pl.block<1, 6>(count, 0) = H_plj;
H_pA.block<1, 6>(count, 0) = H_pAj;
count++;
}
//Final Jakobians
MatrixXd H_xl = MatrixXd::Zero(N*N, 21+state_server.cam_states.size()*7);
MatrixXd H_yl = MatrixXd::Zero(N*N, N*N+2);
auto cam_state_iter = state_server.cam_states.find(feature.observations.begin()->first);
int cam_state_cntr = std::distance(state_server.cam_states.begin(), state_server.cam_states.find(cam_state_id));
// set anchor Jakobi
H_xl.block<N*N, 6>(0,21+cam_state_cntr*7) = -H_pA;
//H_yl
cam_state_iter = state_server.cam_states.find(cam_state_id);
cam_state_cntr = std::distance(state_server.cam_states.begin(), state_server.cam_states.find(cam_state_id));
// set frame Jakobi
H_xl.block(N*N, 6, 0, 21+cam_state_cntr*7) = -H_pl;
H_xl.block(N*N, 1, 0, 21+cam_state_cntr*7) = Eigen::ArrayXd::Ones(N*N);
// Convert the feature position from the world frame to
// the cam0 and cam1 frame.
Vector3d p_c0 = R_w_c0 * (p_w-t_c0_w);
Vector3d p_c1 = R_w_c1 * (p_w-t_c1_w);
//compute resulting esimtated position in image
cv::Point2f out_p = cv::Point2f(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2));
std::vector<cv::Point2f> out_v;
out_v.push_back(out_p);
// Compute the Jacobians.
Matrix<double, 4, 3> dz_dpc0 = Matrix<double, 4, 3>::Zero();
dz_dpc0(0, 0) = 1 / p_c0(2);
dz_dpc0(1, 1) = 1 / p_c0(2);
dz_dpc0(0, 2) = -p_c0(0) / (p_c0(2)*p_c0(2));
dz_dpc0(1, 2) = -p_c0(1) / (p_c0(2)*p_c0(2));
Matrix<double, 4, 3> dz_dpc1 = Matrix<double, 4, 3>::Zero();
dz_dpc1(2, 0) = 1 / p_c1(2);
dz_dpc1(3, 1) = 1 / p_c1(2);
dz_dpc1(2, 2) = -p_c1(0) / (p_c1(2)*p_c1(2));
dz_dpc1(3, 2) = -p_c1(1) / (p_c1(2)*p_c1(2));
Matrix<double, 3, 6> dpc0_dxc = Matrix<double, 3, 6>::Zero();
dpc0_dxc.leftCols(3) = skewSymmetric(p_c0);
dpc0_dxc.rightCols(3) = -R_w_c0;
Matrix<double, 3, 6> dpc1_dxc = Matrix<double, 3, 6>::Zero();
dpc1_dxc.leftCols(3) = R_c0_c1 * skewSymmetric(p_c0);
dpc1_dxc.rightCols(3) = -R_w_c1;
Matrix3d dpc0_dpg = R_w_c0;
Matrix3d dpc1_dpg = R_w_c1;
H_x = dz_dpc0*dpc0_dxc + dz_dpc1*dpc1_dxc;
H_f = dz_dpc0*dpc0_dpg + dz_dpc1*dpc1_dpg;
// Modifty the measurement Jacobian to ensure
// observability constrain.
Matrix<double, 4, 6> A = H_x;
Matrix<double, 6, 1> u = Matrix<double, 6, 1>::Zero();
u.block<3, 1>(0, 0) = quaternionToRotation(
cam_state.orientation_null) * IMUState::gravity;
u.block<3, 1>(3, 0) = skewSymmetric(
p_w-cam_state.position_null) * IMUState::gravity;
H_x = A - A*u*(u.transpose()*u).inverse()*u.transpose();
H_f = -H_x.block<4, 3>(0, 3);
// Compute the residual.
r = z - Vector4d(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2),
p_c1(0)/p_c1(2), p_c1(1)/p_c1(2));