calculate average of jacobis and residual to reduce to single point in caclualtion - does not produce desired result

This commit is contained in:
Raphael Maenle 2019-05-29 10:38:40 +02:00
parent 8bebf99c37
commit 6e151510cf
3 changed files with 271 additions and 50 deletions

View File

@ -915,7 +915,157 @@ bool Feature::initializeAnchor(const CameraCalibration& cam, int N)
is_anchored = true;
return true;
}
/*
bool Feature::initializeRho(const CamStateServer& cam_states) {
// Organize camera poses and feature observations properly.
std::vector<Eigen::Isometry3d,
Eigen::aligned_allocator<Eigen::Isometry3d> > cam_poses(0);
std::vector<Eigen::Vector2d,
Eigen::aligned_allocator<Eigen::Vector2d> > measurements(0);
for (auto& m : observations) {
// TODO: This should be handled properly. Normally, the
// required camera states should all be available in
// the input cam_states buffer.
auto cam_state_iter = cam_states.find(m.first);
if (cam_state_iter == cam_states.end()) continue;
// Add the measurement.
measurements.push_back(m.second.head<2>());
measurements.push_back(m.second.tail<2>());
// This camera pose will take a vector from this camera frame
// to the world frame.
Eigen::Isometry3d cam0_pose;
cam0_pose.linear() = quaternionToRotation(
cam_state_iter->second.orientation).transpose();
cam0_pose.translation() = cam_state_iter->second.position;
Eigen::Isometry3d cam1_pose;
cam1_pose = cam0_pose * CAMState::T_cam0_cam1.inverse();
cam_poses.push_back(cam0_pose);
cam_poses.push_back(cam1_pose);
}
// All camera poses should be modified such that it takes a
// vector from the first camera frame in the buffer to this
// camera frame.
Eigen::Isometry3d T_c0_w = cam_poses[0];
T_anchor_w = T_c0_w;
for (auto& pose : cam_poses)
pose = pose.inverse() * T_c0_w;
// Generate initial guess
Eigen::Vector3d initial_position(0.0, 0.0, 0.0);
generateInitialGuess(cam_poses[cam_poses.size()-1], measurements[0],
measurements[measurements.size()-1], initial_position);
Eigen::Vector3d solution(
initial_position(0)/initial_position(2),
initial_position(1)/initial_position(2),
1.0/initial_position(2));
// Apply Levenberg-Marquart method to solve for the 3d position.
double lambda = optimization_config.initial_damping;
int inner_loop_cntr = 0;
int outer_loop_cntr = 0;
bool is_cost_reduced = false;
double delta_norm = 0;
// Compute the initial cost.
double total_cost = 0.0;
for (int i = 0; i < cam_poses.size(); ++i) {
double this_cost = 0.0;
cost(cam_poses[i], solution, measurements[i], this_cost);
total_cost += this_cost;
}
// Outer loop.
do {
Eigen::Matrix3d A = Eigen::Matrix3d::Zero();
Eigen::Vector3d b = Eigen::Vector3d::Zero();
for (int i = 0; i < cam_poses.size(); ++i) {
Eigen::Matrix<double, 2, 3> J;
Eigen::Vector2d r;
double w;
jacobian(cam_poses[i], solution, measurements[i], J, r, w);
if (w == 1) {
A += J.transpose() * J;
b += J.transpose() * r;
} else {
double w_square = w * w;
A += w_square * J.transpose() * J;
b += w_square * J.transpose() * r;
}
}
// Inner loop.
// Solve for the delta that can reduce the total cost.
do {
Eigen::Matrix3d damper = lambda * Eigen::Matrix3d::Identity();
Eigen::Vector3d delta = (A+damper).ldlt().solve(b);
Eigen::Vector3d new_solution = solution - delta;
delta_norm = delta.norm();
double new_cost = 0.0;
for (int i = 0; i < cam_poses.size(); ++i) {
double this_cost = 0.0;
cost(cam_poses[i], new_solution, measurements[i], this_cost);
new_cost += this_cost;
}
if (new_cost < total_cost) {
is_cost_reduced = true;
solution = new_solution;
total_cost = new_cost;
lambda = lambda/10 > 1e-10 ? lambda/10 : 1e-10;
} else {
is_cost_reduced = false;
lambda = lambda*10 < 1e12 ? lambda*10 : 1e12;
}
} while (inner_loop_cntr++ <
optimization_config.inner_loop_max_iteration && !is_cost_reduced);
inner_loop_cntr = 0;
} while (outer_loop_cntr++ <
optimization_config.outer_loop_max_iteration &&
delta_norm > optimization_config.estimation_precision);
// Covert the feature position from inverse depth
// representation to its 3d coordinate.
Eigen::Vector3d final_position(solution(0)/solution(2),
solution(1)/solution(2), 1.0/solution(2));
// Check if the solution is valid. Make sure the feature
// is in front of every camera frame observing it.
bool is_valid_solution = true;
for (const auto& pose : cam_poses) {
Eigen::Vector3d position =
pose.linear()*final_position + pose.translation();
if (position(2) <= 0) {
is_valid_solution = false;
break;
}
}
//save inverse depth distance from camera
anchor_rho = solution(2);
// Convert the feature position to the world frame.
position = T_c0_w.linear()*final_position + T_c0_w.translation();
if (is_valid_solution)
is_initialized = true;
return is_valid_solution;
}
*/
bool Feature::initializePosition(const CamStateServer& cam_states) {
// Organize camera poses and feature observations properly.

View File

@ -21,10 +21,10 @@
<param name="PHOTOMETRIC" value="true"/>
<!-- Debugging Flaggs -->
<param name="PrintImages" value="false"/>
<param name="PrintImages" value="true"/>
<param name="GroundTruth" value="false"/>
<param name="patch_size_n" value="3"/>
<param name="patch_size_n" value="7"/>
<!-- Calibration parameters -->
<rosparam command="load" file="$(arg calibration_file)"/>

View File

@ -1233,11 +1233,17 @@ void MsckfVio::PhotometricMeasurementJacobian(
const Vector3d& t_c0_w = cam_state.position;
// one line of the NxN Jacobians
Matrix<double, 1, 1> H_rho;
Matrix<double, 1, 6> H_xi;
Matrix<double, 1, 6> H_xA;
/*
MatrixXd H_xl = MatrixXd::Zero(feature.anchorPatch_3d.size(), 6);
MatrixXd H_xAl = MatrixXd::Zero(feature.anchorPatch_3d.size(), 6);
MatrixXd H_rhol = MatrixXd::Zero(feature.anchorPatch_3d.size(), 1);
*/
MatrixXd H_xl = MatrixXd::Zero(1, 6);
MatrixXd H_xAl = MatrixXd::Zero(1, 6);
MatrixXd H_rhol = MatrixXd::Zero(1, 1);
auto frame = cam0.moving_window.find(cam_state_id)->second.image;
@ -1252,25 +1258,35 @@ void MsckfVio::PhotometricMeasurementJacobian(
cv::Point2f residualVector(0,0);
double res_sum = 0;
// get patch around feature in current image
cv::Point2f p_f(feature.observations.find(cam_state_id)->second(0),feature.observations.find(cam_state_id)->second(1));
// move to real pixels
p_f = image_handler::distortPoint(p_f, cam0.intrinsics, cam0.distortion_model, cam0.distortion_coeffs);
std::vector<double> cur_image_irr;
std::vector<cv::Point2f> cur_image_p;
for(int i = 0; i<N; i++)
{
for(int j = 0; j<N ; j++)
{
cur_image_p.push_back(cv::Point2f(p_f.x + (i-(N-1)/2), p_f.y + (j-(N-1)/2)));
cur_image_irr.push_back(feature.PixelIrradiance(cv::Point2f(p_f.x + (i-(N-1)/2), p_f.y + (j-(N-1)/2)), frame));
}
}
for (auto point : feature.anchorPatch_3d)
{
Eigen::Vector3d p_c0 = R_w_c0 * (point-t_c0_w);
cv::Point2f p_in_c0 = feature.projectPositionToCamera(cam_state, cam_state_id, cam0, point);
cv::Point2f p_in_anchor = feature.projectPositionToCamera(anchor_state, anchor_state_id, cam0, point);
Matrix<double, 1, 2> dI_dhj = Matrix<double, 1, 2>::Zero();
// add jacobians
cv::Point2f pixelDistance = feature.pixelDistanceAt(anchor_state, anchor_state_id, cam0, point);
std::cout << "pixelDistance: \n" << pixelDistance << std::endl;
cv::Point2f pixelDistance = feature.pixelDistanceAt(cam_state, cam_state_id, cam0, point);
// calculate derivation for anchor frame, use position for derivation calculation
// frame derivative calculated convoluting with kernel [-1, 0, 1]
dx = feature.PixelIrradiance(cv::Point2f(p_in_anchor.x+1, p_in_anchor.y), frame) - feature.PixelIrradiance(cv::Point2f(p_in_anchor.x-1, p_in_anchor.y), frame);
dy = feature.PixelIrradiance(cv::Point2f(p_in_anchor.x, p_in_anchor.y+1), frame) - feature.PixelIrradiance(cv::Point2f(p_in_anchor.x, p_in_anchor.y-1), frame);
dI_dhj(0, 0) = dx/pixelDistance.x;
dI_dhj(0, 1) = dy/pixelDistance.y;
Matrix<double, 1, 2> dI_dhj = Matrix<double, 1, 2>::Zero();
dx = feature.PixelIrradiance(cv::Point2f(cur_image_p[count].x+1, cur_image_p[count].y), frame) - feature.PixelIrradiance(cv::Point2f(cur_image_p[count].x-1, cur_image_p[count].y), frame);
dy = feature.PixelIrradiance(cv::Point2f(cur_image_p[count].x, cur_image_p[count].y+1), frame) - feature.PixelIrradiance(cv::Point2f(cur_image_p[count].x, cur_image_p[count].y-1), frame);
dI_dhj(0, 0) = (double)dx/pixelDistance.x;
dI_dhj(0, 1) = (double)dy/pixelDistance.y;
// Compute the Jacobians.
Matrix<double, 2, 3> dz_dpc0 = Matrix<double, 2, 3>::Zero();
@ -1285,8 +1301,6 @@ void MsckfVio::PhotometricMeasurementJacobian(
dpc0_dxc.leftCols(3) = skewSymmetric(p_c0);
dpc0_dxc.rightCols(3) = -R_w_c0;
H_xi = dI_dhj*dz_dpc0*dpc0_dxc;
Matrix3d dCpij_dGpij = quaternionToRotation(cam_state.orientation);
//orientation takes camera frame to world frame, we wa
@ -1302,11 +1316,24 @@ void MsckfVio::PhotometricMeasurementJacobian(
1/(rho)));
dGpj_XpAj.block<3, 3>(0, 3) = Matrix<double, 3, 3>::Identity();
H_xA = dI_dhj*dh_dGpij*dGpj_XpAj;
// Isometry T_anchor_w takes a vector in anchor frame to world frame
Matrix<double, 3, 1> dGpj_drhoj = Matrix<double, 3, 1>::Zero();
dGpj_drhoj = -feature.T_anchor_w.linear() * Eigen::Vector3d(feature.anchorPatch_ideal[count].x/(rho*rho), feature.anchorPatch_ideal[count].y/(rho*rho), 1/(rho*rho));
H_xi = dI_dhj * dz_dpc0 * dpc0_dxc;
H_xA = dI_dhj * dh_dGpij * dGpj_XpAj;
H_rho = dI_dhj * dh_dGpij * dGpj_drhoj; // 1 x 1
/*
H_xl.block<1, 6>(count, 0) = H_xi;
H_xAl.block<1, 6>(count, 0) = H_xA;
H_rhol.block<1, 1>(count, 0) = H_rho;
*/
H_xl += H_xi;
H_xAl += H_xA;
H_rhol += H_rho;
count++;
}
@ -1317,21 +1344,50 @@ void MsckfVio::PhotometricMeasurementJacobian(
cv::Point2f p_in_c0 = feature.projectPositionToCamera(cam_state, cam_state_id, cam0, point);
projectionPatch.push_back(feature.PixelIrradiance(p_in_c0, frame));
}
Eigen::VectorXd r_l = Eigen::VectorXd::Zero(count);
//Eigen::VectorXd r_l = Eigen::VectorXd::Zero(count);
Eigen::VectorXd r_l = Eigen::VectorXd::Zero(1);
std::vector<double> residual_v;
for(int i = 0; i < r_l.size(); i++)
double residual_v_sum = 0;
for(int i = 0; i < N*N; i++)
{
residual_v.push_back(projectionPatch[i]- feature.anchorPatch[i]);
r_l(i) = projectionPatch[i] - feature.anchorPatch[i];
residual_v_sum += cur_image_irr[i] - projectionPatch[i];
residual_v.push_back(cur_image_irr[i] - projectionPatch[i]);
//r_l(i) = cur_image_irr[i] - projectionPatch[i];
}
r = r_l;
H_x = H_xl;
H_y = H_xAl;
r_l(0) = residual_v_sum/(N*N);
H_xl /=(N*N);
H_xAl /= (N*N);
H_rhol /= (N*N);
cout << "dI/dxl\n" << H_xl << endl;
cout << "dI/dAl\n" << H_xAl << endl;
cout << "dI/drhol\n" << H_rhol << endl;
r = r_l;
MatrixXd H_xt = MatrixXd::Zero(1, 21+state_server.cam_states.size()*7);
// set anchor Jakobi
// get position of anchor in cam states
auto cam_state_anchor = state_server.cam_states.find(feature.observations.begin()->first);
int cam_state_cntr_anchor = std::distance(state_server.cam_states.begin(), cam_state_anchor);
//H_xt.block(0, 21+cam_state_cntr_anchor*7, N*N, 6) = H_xAl;
H_xt.block(0, 21+cam_state_cntr_anchor*7, 1, 6) = H_xAl;
// set frame Jakobi
//get position of current frame in cam states
auto cam_state_iter = state_server.cam_states.find(cam_state_id);
int cam_state_cntr = std::distance(state_server.cam_states.begin(), cam_state_iter);
//H_xt.block(0, 21+cam_state_cntr*7, N*N, 6) = H_xl;
H_xt.block(0, 21+cam_state_cntr*7, 1, 6) = H_xl;
H_x = H_xt;
H_y = H_rhol;
std::stringstream ss;
ss << "INFO:"; // << " anchor: " << cam_state_cntr_anchor << " frame: " << cam_state_cntr;
@ -1384,15 +1440,18 @@ void MsckfVio::PhotometricFeatureJacobian(
if (feature.observations.find(cam_id) ==
feature.observations.end()) continue;
valid_cam_state_ids.push_back(cam_id);
// if not anchor frame
if (cam_id != feature.observations.begin()->first)
valid_cam_state_ids.push_back(cam_id);
}
int jacobian_row_size = 0;
jacobian_row_size = N * N * valid_cam_state_ids.size();
//jacobian_row_size = N * N * valid_cam_state_ids.size();
jacobian_row_size = valid_cam_state_ids.size();
MatrixXd H_xi = MatrixXd::Zero(jacobian_row_size,
21+state_server.cam_states.size()*7);
MatrixXd H_yi = MatrixXd::Zero(jacobian_row_size, N*N+state_server.cam_states.size()+1);
MatrixXd H_yi = MatrixXd::Zero(jacobian_row_size, 1);
VectorXd r_i = VectorXd::Zero(jacobian_row_size);
int stack_cntr = 0;
@ -1413,24 +1472,29 @@ void MsckfVio::PhotometricFeatureJacobian(
// Stack the Jacobians.
H_xi.block(stack_cntr, 21+cam_state_cntr_anchor*7, H_yl.rows(), H_yl.cols()) = H_yl;
H_xi.block(stack_cntr, 21+cam_state_cntr*7, H_xl.rows(), H_xl.cols()) = -H_xl;
H_xi.block(stack_cntr, 0, H_xl.rows(), H_xl.cols()) = H_xl;
H_yi.block(stack_cntr,0, H_yl.rows(), H_yl.cols()) = H_yl;
r_i.segment(stack_cntr, N*N) = r_l;
stack_cntr += N*N;
//r_i.segment(stack_cntr, N*N) = r_l;
//stack_cntr += N*N;
r_i.segment(stack_cntr, 1) = r_l;
stack_cntr += 1;
}
H_x = H_xi;
r = r_i;
ofstream myfile;
myfile.open ("/home/raphael/dev/MSCKF_ws/log.txt");
myfile << "Hx\n" << H_x << "r\n" << r << "\n x\n" << H_x.colPivHouseholderQr().solve(r) << endl;
myfile.close();
cout << "---------- LOGGED -------- " << endl;
FullPivLU<MatrixXd> lu(H_yi.transpose());
MatrixXd A = lu.kernel();
H_x = A.transpose() * H_xi;
r = A.transpose() * r_i;
if(PRINTIMAGES)
{
ofstream myfile;
myfile.open ("/home/raphael/dev/MSCKF_ws/log.txt");
myfile << "Hyi\n" << H_yi << "Hxi\n" << H_xi << "Hx\n" << H_x << "r\n" << r << "\n x\n" << H_x.colPivHouseholderQr().solve(r) << endl;
myfile.close();
cout << "---------- LOGGED -------- " << endl;
std::cout << "resume playback" << std::endl;
nh.setParam("/play_bag", true);
}
@ -1516,6 +1580,8 @@ void MsckfVio::featureJacobian(
const std::vector<StateIDType>& cam_state_ids,
MatrixXd& H_x, VectorXd& r)
{
if(PRINTIMAGES)
nh.setParam("/play_bag", false);
const auto& feature = map_server[feature_id];
@ -1578,11 +1644,17 @@ void MsckfVio::featureJacobian(
r = A.transpose() * r_j;
ofstream myfile;
myfile.open ("/home/raphael/dev/MSCKF_ws/log.txt");
myfile << "Hx\n" << H_x << "r\n" << r << "from residual estimated error state: " << H_x.ldlt().solve(r) << endl;
myfile.close();
cout << "---------- LOGGED -------- " << endl;
if(PRINTIMAGES)
{
ofstream myfile;
myfile.open ("/home/raphael/dev/MSCKF_ws/log.txt");
myfile << H_x.colPivHouseholderQr().solve(r) << endl;
myfile.close();
cout << "---------- LOGGED -------- " << endl;
std::cout << "resume playback" << std::endl;
nh.setParam("/play_bag", true);
}
return;
}
@ -1696,7 +1768,6 @@ void MsckfVio::measurementUpdate(const MatrixXd& H, const VectorXd& r) {
}
bool MsckfVio::gatingTest(const MatrixXd& H, const VectorXd& r, const int& dof) {
return true;
MatrixXd P1 = H * state_server.state_cov * H.transpose();
@ -1806,7 +1877,7 @@ void MsckfVio::removeLostFeatures() {
featureJacobian(feature.id, cam_state_ids, H_xj, r_j);
if (gatingTest(H_xj, r_j, r_j.size()-1)){ //cam_state_ids.size()-1)) {
if (gatingTest(H_xj, r_j, state_server.cam_states.size() - 1)){ //cam_state_ids.size()-1)) {
H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj;
r.segment(stack_cntr, r_j.rows()) = r_j;
stack_cntr += H_xj.rows();
@ -1968,7 +2039,7 @@ void MsckfVio::pruneCamStateBuffer() {
else
featureJacobian(feature.id, involved_cam_state_ids, H_xj, r_j);
if (gatingTest(H_xj, r_j, r_j.size()-1)) { //involved_cam_state_ids.size())) {
if (gatingTest(H_xj, r_j, involved_cam_state_ids.size() - 1)) { //involved_cam_state_ids.size())) {
H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj;
r.segment(stack_cntr, r_j.rows()) = r_j;
stack_cntr += H_xj.rows();