deactivated most to find reason for slowdown
This commit is contained in:
parent
6ae83f0db7
commit
f4a17f8512
@ -157,12 +157,19 @@ struct Feature {
|
|||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
|
|
||||||
bool IrradianceOfAnchorPatch(
|
bool estimate_FrameIrradiance(
|
||||||
const CAMState& cam_state,
|
const CAMState& cam_state,
|
||||||
const StateIDType& cam_state_id,
|
const StateIDType& cam_state_id,
|
||||||
const CameraCalibration& cam,
|
const CameraCalibration& cam0,
|
||||||
const movingWindow& cam0_moving_window,
|
const movingWindow& cam0_moving_window,
|
||||||
std::vector<uint8_t>& anchorPatch_measurement) const;
|
std::vector<float>& anchorPatch_estimate) const;
|
||||||
|
|
||||||
|
bool FrameIrradiance(
|
||||||
|
const CAMState& cam_state,
|
||||||
|
const StateIDType& cam_state_id,
|
||||||
|
const CameraCalibration& cam0,
|
||||||
|
const movingWindow& cam0_moving_window,
|
||||||
|
std::vector<float>& anchorPatch_measurement) const;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* @brief projectPixelToPosition uses the calcualted pixels
|
* @brief projectPixelToPosition uses the calcualted pixels
|
||||||
@ -175,7 +182,7 @@ inline Eigen::Vector3d projectPixelToPosition(cv::Point2f in_p,
|
|||||||
* @brief Irradiance returns irradiance value of a pixel
|
* @brief Irradiance returns irradiance value of a pixel
|
||||||
*/
|
*/
|
||||||
|
|
||||||
inline uint8_t Irradiance(cv::Point2f pose, cv::Mat image) const;
|
inline float PixelIrradiance(cv::Point2f pose, cv::Mat image) const;
|
||||||
|
|
||||||
// An unique identifier for the feature.
|
// An unique identifier for the feature.
|
||||||
// In case of long time running, the variable
|
// In case of long time running, the variable
|
||||||
@ -350,25 +357,56 @@ bool Feature::checkMotion(
|
|||||||
else return false;
|
else return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Feature::IrradianceOfAnchorPatch(
|
bool Feature::estimate_FrameIrradiance(
|
||||||
const CAMState& cam_state,
|
const CAMState& cam_state,
|
||||||
const StateIDType& cam_state_id,
|
const StateIDType& cam_state_id,
|
||||||
const CameraCalibration& cam,
|
const CameraCalibration& cam0,
|
||||||
const movingWindow& cam0_moving_window,
|
const movingWindow& cam0_moving_window,
|
||||||
std::vector<uint8_t>& anchorPatch_measurement) const
|
std::vector<float>& anchorPatch_estimate) const
|
||||||
|
{
|
||||||
|
// get irradiance of patch in anchor frame
|
||||||
|
// subtract estimated b and divide by a of anchor frame
|
||||||
|
// muliply by a and add b of this frame
|
||||||
|
|
||||||
|
auto anchor = observations.begin();
|
||||||
|
if(cam0_moving_window.find(anchor->first) == cam0_moving_window.end())
|
||||||
|
return false;
|
||||||
|
|
||||||
|
double anchorExposureTime_ms = cam0_moving_window.find(anchor->first)->second.exposureTime_ms;
|
||||||
|
double frameExposureTime_ms = cam0_moving_window.find(cam_state_id)->second.exposureTime_ms;
|
||||||
|
|
||||||
|
|
||||||
|
double a_A = anchorExposureTime_ms;
|
||||||
|
double b_A = 0;
|
||||||
|
double a_l =frameExposureTime_ms;
|
||||||
|
double b_l = 0;
|
||||||
|
for (double anchorPixel : anchorPatch)
|
||||||
|
{
|
||||||
|
float irradiance = ((anchorPixel - b_A) / a_A ) * a_l - b_l;
|
||||||
|
anchorPatch_estimate.push_back(irradiance);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Feature::FrameIrradiance(
|
||||||
|
const CAMState& cam_state,
|
||||||
|
const StateIDType& cam_state_id,
|
||||||
|
const CameraCalibration& cam0,
|
||||||
|
const movingWindow& cam0_moving_window,
|
||||||
|
std::vector<float>& anchorPatch_measurement) const
|
||||||
{
|
{
|
||||||
//project every point in anchorPatch_3d.
|
//project every point in anchorPatch_3d.
|
||||||
for (auto point : anchorPatch_3d)
|
for (auto point : anchorPatch_3d)
|
||||||
{
|
{
|
||||||
cv::Point2f p_in_c0 = projectPositionToCamera(cam_state, cam_state_id, cam, point);
|
cv::Point2f p_in_c0 = projectPositionToCamera(cam_state, cam_state_id, cam0, point);
|
||||||
uint8_t irradiance = Irradiance(p_in_c0 , cam0_moving_window.find(cam_state_id)->second.image);
|
float irradiance = PixelIrradiance(p_in_c0, cam0_moving_window.find(cam_state_id)->second.image);
|
||||||
anchorPatch_measurement.push_back(irradiance);
|
anchorPatch_measurement.push_back(irradiance);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
uint8_t Feature::Irradiance(cv::Point2f pose, cv::Mat image) const
|
float Feature::PixelIrradiance(cv::Point2f pose, cv::Mat image) const
|
||||||
{
|
{
|
||||||
return image.at<uint8_t>(pose.x, pose.y);
|
return (float)image.at<uint8_t>(pose.x, pose.y);
|
||||||
}
|
}
|
||||||
|
|
||||||
cv::Point2f Feature::projectPositionToCamera(
|
cv::Point2f Feature::projectPositionToCamera(
|
||||||
@ -389,7 +427,10 @@ cv::Point2f Feature::projectPositionToCamera(
|
|||||||
out_p = cv::Point2f(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2));
|
out_p = cv::Point2f(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2));
|
||||||
std::vector<cv::Point2f> out_v;
|
std::vector<cv::Point2f> out_v;
|
||||||
out_v.push_back(out_p);
|
out_v.push_back(out_p);
|
||||||
std::vector<cv::Point2f> my_p = image_handler::distortPoints(out_v, cam.intrinsics, cam.distortion_model, cam.distortion_coeffs);
|
std::vector<cv::Point2f> my_p = image_handler::distortPoints(out_v,
|
||||||
|
cam.intrinsics,
|
||||||
|
cam.distortion_model,
|
||||||
|
cam.distortion_coeffs);
|
||||||
|
|
||||||
// printf("truPosition: %f, %f, %f\n", position.x(), position.y(), position.z());
|
// printf("truPosition: %f, %f, %f\n", position.x(), position.y(), position.z());
|
||||||
// printf("camPosition: %f, %f, %f\n", p_c0(0), p_c0(1), p_c0(2));
|
// printf("camPosition: %f, %f, %f\n", p_c0(0), p_c0(1), p_c0(2));
|
||||||
@ -418,6 +459,8 @@ bool Feature::initializeAnchor(
|
|||||||
const CameraCalibration& cam)
|
const CameraCalibration& cam)
|
||||||
{
|
{
|
||||||
|
|
||||||
|
//initialize patch Size
|
||||||
|
//TODO make N size a ros parameter
|
||||||
int N = 3;
|
int N = 3;
|
||||||
int n = (int)(N-1)/2;
|
int n = (int)(N-1)/2;
|
||||||
|
|
||||||
@ -428,25 +471,32 @@ bool Feature::initializeAnchor(
|
|||||||
cv::Mat anchorImage = cam0_moving_window.find(anchor->first)->second.image;
|
cv::Mat anchorImage = cam0_moving_window.find(anchor->first)->second.image;
|
||||||
auto u = anchor->second(0)*cam.intrinsics[0] + cam.intrinsics[2];
|
auto u = anchor->second(0)*cam.intrinsics[0] + cam.intrinsics[2];
|
||||||
auto v = anchor->second(1)*cam.intrinsics[1] + cam.intrinsics[3];
|
auto v = anchor->second(1)*cam.intrinsics[1] + cam.intrinsics[3];
|
||||||
int count = 0;
|
|
||||||
|
|
||||||
//go through surrounding pixels
|
//for NxN patch pixels around feature
|
||||||
for(double u_run = u - n; u_run <= u + n; u_run = u_run + 1)
|
for(double u_run = u - n; u_run <= u + n; u_run = u_run + 1)
|
||||||
{
|
{
|
||||||
for(double v_run = v - n; v_run <= v + n; v_run = v_run + 1)
|
for(double v_run = v - n; v_run <= v + n; v_run = v_run + 1)
|
||||||
{
|
{
|
||||||
anchorPatch.push_back(anchorImage.at<uint8_t>((int)u_run,(int)v_run));
|
// add irradiance information
|
||||||
cv::Point2f currentPoint((u_run-cam.intrinsics[2])/cam.intrinsics[0], (v_run-cam.intrinsics[3])/cam.intrinsics[1]);
|
anchorPatch.push_back((double)anchorImage.at<uint8_t>((int)u_run,(int)v_run));
|
||||||
|
|
||||||
|
// project patch pixel to 3D space
|
||||||
|
auto intr = cam.intrinsics;
|
||||||
|
cv::Point2f currentPoint((u_run-intr[2])/intr[0], (v_run-intr[3])/intr[1]);
|
||||||
Eigen::Vector3d Npose = projectPixelToPosition(currentPoint, cam);
|
Eigen::Vector3d Npose = projectPixelToPosition(currentPoint, cam);
|
||||||
|
|
||||||
|
//save position
|
||||||
anchorPatch_3d.push_back(Npose);
|
anchorPatch_3d.push_back(Npose);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//TODO test if NxN patch can be selected
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Feature::initializePosition(
|
bool Feature::initializePosition(
|
||||||
const CamStateServer& cam_states) {
|
const CamStateServer& cam_states) {
|
||||||
|
|
||||||
// Organize camera poses and feature observations properly.
|
// Organize camera poses and feature observations properly.
|
||||||
std::vector<Eigen::Isometry3d,
|
std::vector<Eigen::Isometry3d,
|
||||||
Eigen::aligned_allocator<Eigen::Isometry3d> > cam_poses(0);
|
Eigen::aligned_allocator<Eigen::Isometry3d> > cam_poses(0);
|
||||||
|
@ -344,7 +344,7 @@ void MsckfVio::imageCallback(
|
|||||||
|
|
||||||
// Add new images to moving window
|
// Add new images to moving window
|
||||||
start_time = ros::Time::now();
|
start_time = ros::Time::now();
|
||||||
manageMovingWindow(cam0_img, cam1_img, feature_msg);
|
//manageMovingWindow(cam0_img, cam1_img, feature_msg);
|
||||||
double manage_moving_window_time = (
|
double manage_moving_window_time = (
|
||||||
ros::Time::now()-start_time).toSec();
|
ros::Time::now()-start_time).toSec();
|
||||||
|
|
||||||
@ -373,20 +373,20 @@ void MsckfVio::imageCallback(
|
|||||||
processing_end_time - processing_start_time;
|
processing_end_time - processing_start_time;
|
||||||
if (processing_time > 1.0/frame_rate) {
|
if (processing_time > 1.0/frame_rate) {
|
||||||
++critical_time_cntr;
|
++critical_time_cntr;
|
||||||
//ROS_INFO("\033[1;31mTotal processing time %f/%d...\033[0m",
|
ROS_INFO("\033[1;31mTotal processing time %f/%d...\033[0m",
|
||||||
// processing_time, critical_time_cntr);
|
processing_time, critical_time_cntr);
|
||||||
//printf("IMU processing time: %f/%f\n",
|
printf("IMU processing time: %f/%f\n",
|
||||||
// imu_processing_time, imu_processing_time/processing_time);
|
imu_processing_time, imu_processing_time/processing_time);
|
||||||
//printf("State augmentation time: %f/%f\n",
|
printf("State augmentation time: %f/%f\n",
|
||||||
// state_augmentation_time, state_augmentation_time/processing_time);
|
state_augmentation_time, state_augmentation_time/processing_time);
|
||||||
//printf("Add observations time: %f/%f\n",
|
printf("Add observations time: %f/%f\n",
|
||||||
// add_observations_time, add_observations_time/processing_time);
|
add_observations_time, add_observations_time/processing_time);
|
||||||
//printf("Remove lost features time: %f/%f\n",
|
printf("Remove lost features time: %f/%f\n",
|
||||||
// remove_lost_features_time, remove_lost_features_time/processing_time);
|
remove_lost_features_time, remove_lost_features_time/processing_time);
|
||||||
//printf("Remove camera states time: %f/%f\n",
|
printf("Remove camera states time: %f/%f\n",
|
||||||
// prune_cam_states_time, prune_cam_states_time/processing_time);
|
prune_cam_states_time, prune_cam_states_time/processing_time);
|
||||||
//printf("Publish time: %f/%f\n",
|
printf("Publish time: %f/%f\n",
|
||||||
// publish_time, publish_time/processing_time);
|
publish_time, publish_time/processing_time);
|
||||||
}
|
}
|
||||||
|
|
||||||
return;
|
return;
|
||||||
@ -400,7 +400,15 @@ void MsckfVio::manageMovingWindow(
|
|||||||
//save exposure Time into moving window
|
//save exposure Time into moving window
|
||||||
cam0_moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam0_img->header.frame_id.data(), NULL) / 1000;
|
cam0_moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam0_img->header.frame_id.data(), NULL) / 1000;
|
||||||
cam1_moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam1_img->header.frame_id.data(), NULL) / 1000;
|
cam1_moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam1_img->header.frame_id.data(), NULL) / 1000;
|
||||||
printf("exposure: %f\n", cam0_moving_window[state_server.imu_state.id].exposureTime_ms);
|
if(cam0_moving_window[state_server.imu_state.id].exposureTime_ms < 1)
|
||||||
|
cam0_moving_window[state_server.imu_state.id].exposureTime_ms = 1;
|
||||||
|
if(cam1_moving_window[state_server.imu_state.id].exposureTime_ms < 1)
|
||||||
|
cam1_moving_window[state_server.imu_state.id].exposureTime_ms = 1;
|
||||||
|
if(cam0_moving_window[state_server.imu_state.id].exposureTime_ms > 500)
|
||||||
|
cam0_moving_window[state_server.imu_state.id].exposureTime_ms = 500;
|
||||||
|
if(cam1_moving_window[state_server.imu_state.id].exposureTime_ms > 500)
|
||||||
|
cam1_moving_window[state_server.imu_state.id].exposureTime_ms = 500;
|
||||||
|
|
||||||
// Get the current image.
|
// Get the current image.
|
||||||
cv_bridge::CvImageConstPtr cam0_img_ptr = cv_bridge::toCvShare(cam0_img,
|
cv_bridge::CvImageConstPtr cam0_img_ptr = cv_bridge::toCvShare(cam0_img,
|
||||||
sensor_msgs::image_encodings::MONO8);
|
sensor_msgs::image_encodings::MONO8);
|
||||||
@ -908,8 +916,8 @@ void MsckfVio::PhotometricMeasurementJacobian(
|
|||||||
const Vector4d& z = feature.observations.find(cam_state_id)->second;
|
const Vector4d& z = feature.observations.find(cam_state_id)->second;
|
||||||
|
|
||||||
//photometric observation
|
//photometric observation
|
||||||
std::vector<uint8_t> photo_z;
|
std::vector<float> photo_z;
|
||||||
feature.IrradianceOfAnchorPatch(cam_state, cam_state_id, cam0, cam0_moving_window, photo_z);
|
//feature.FrameIrradiance(cam_state, cam_state_id, cam0, cam0_moving_window, photo_z);
|
||||||
|
|
||||||
// Convert the feature position from the world frame to
|
// Convert the feature position from the world frame to
|
||||||
// the cam0 and cam1 frame.
|
// the cam0 and cam1 frame.
|
||||||
@ -973,6 +981,20 @@ void MsckfVio::PhotometricMeasurementJacobian(
|
|||||||
r = z - Vector4d(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2),
|
r = z - Vector4d(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2),
|
||||||
p_c1(0)/p_c1(2), p_c1(1)/p_c1(2));
|
p_c1(0)/p_c1(2), p_c1(1)/p_c1(2));
|
||||||
|
|
||||||
|
//estimate photometric measurement
|
||||||
|
std::vector<float> estimate_photo_z;
|
||||||
|
//feature.estimate_FrameIrradiance(cam_state, cam_state_id, cam0, cam0_moving_window, estimate_photo_z);
|
||||||
|
std::vector<float> photo_r;
|
||||||
|
|
||||||
|
//calculate photom. residual
|
||||||
|
//for(int i = 0; i < photo_z.size(); i++)
|
||||||
|
// photo_r.push_back(photo_z[i] - estimate_photo_z[i]);
|
||||||
|
|
||||||
|
// printf("-----\n");
|
||||||
|
// for(int i = 0; i < photo_z.size(); i++)
|
||||||
|
// printf("%.4f - %.4f\n", photo_z[i], estimate_photo_z[i]);
|
||||||
|
|
||||||
|
photo_z.clear();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1002,6 +1024,7 @@ void MsckfVio::PhotometricFeatureJacobian(
|
|||||||
VectorXd r_j = VectorXd::Zero(jacobian_row_size);
|
VectorXd r_j = VectorXd::Zero(jacobian_row_size);
|
||||||
int stack_cntr = 0;
|
int stack_cntr = 0;
|
||||||
|
|
||||||
|
|
||||||
for (const auto& cam_id : valid_cam_state_ids) {
|
for (const auto& cam_id : valid_cam_state_ids) {
|
||||||
|
|
||||||
Matrix<double, 4, 6> H_xi = Matrix<double, 4, 6>::Zero();
|
Matrix<double, 4, 6> H_xi = Matrix<double, 4, 6>::Zero();
|
||||||
@ -1318,12 +1341,11 @@ void MsckfVio::removeLostFeatures() {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
/*if(!feature.initializeAnchor(cam0_moving_window, cam0))
|
||||||
|
{
|
||||||
if(!feature.initializeAnchor(cam0_moving_window, cam0))
|
invalid_feature_ids.push_back(feature.id);
|
||||||
{
|
continue;
|
||||||
invalid_feature_ids.push_back(feature.id);
|
}*/
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
jacobian_row_size += 4*feature.observations.size() - 3;
|
jacobian_row_size += 4*feature.observations.size() - 3;
|
||||||
@ -1472,13 +1494,12 @@ void MsckfVio::pruneCamStateBuffer() {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
/*if(!feature.initializeAnchor(cam0_moving_window, cam0))
|
||||||
|
{
|
||||||
if(!feature.initializeAnchor(cam0_moving_window, cam0))
|
for (const auto& cam_id : involved_cam_state_ids)
|
||||||
{
|
feature.observations.erase(cam_id);
|
||||||
for (const auto& cam_id : involved_cam_state_ids)
|
continue;
|
||||||
feature.observations.erase(cam_id);
|
}*/
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
jacobian_row_size += 4*involved_cam_state_ids.size() - 3;
|
jacobian_row_size += 4*involved_cam_state_ids.size() - 3;
|
||||||
@ -1491,7 +1512,7 @@ void MsckfVio::pruneCamStateBuffer() {
|
|||||||
21+6*state_server.cam_states.size());
|
21+6*state_server.cam_states.size());
|
||||||
VectorXd r = VectorXd::Zero(jacobian_row_size);
|
VectorXd r = VectorXd::Zero(jacobian_row_size);
|
||||||
int stack_cntr = 0;
|
int stack_cntr = 0;
|
||||||
|
ros::Time start_time = ros::Time::now();
|
||||||
for (auto& item : map_server) {
|
for (auto& item : map_server) {
|
||||||
auto& feature = item.second;
|
auto& feature = item.second;
|
||||||
// Check how many camera states to be removed are associated
|
// Check how many camera states to be removed are associated
|
||||||
@ -1507,8 +1528,8 @@ void MsckfVio::pruneCamStateBuffer() {
|
|||||||
|
|
||||||
MatrixXd H_xj;
|
MatrixXd H_xj;
|
||||||
VectorXd r_j;
|
VectorXd r_j;
|
||||||
|
|
||||||
PhotometricFeatureJacobian(feature.id, involved_cam_state_ids, H_xj, r_j);
|
PhotometricFeatureJacobian(feature.id, involved_cam_state_ids, H_xj, r_j);
|
||||||
|
|
||||||
if (gatingTest(H_xj, r_j, involved_cam_state_ids.size())) {
|
if (gatingTest(H_xj, r_j, involved_cam_state_ids.size())) {
|
||||||
H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj;
|
H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj;
|
||||||
r.segment(stack_cntr, r_j.rows()) = r_j;
|
r.segment(stack_cntr, r_j.rows()) = r_j;
|
||||||
@ -1518,13 +1539,15 @@ void MsckfVio::pruneCamStateBuffer() {
|
|||||||
for (const auto& cam_id : involved_cam_state_ids)
|
for (const auto& cam_id : involved_cam_state_ids)
|
||||||
feature.observations.erase(cam_id);
|
feature.observations.erase(cam_id);
|
||||||
}
|
}
|
||||||
|
double anchorPrune_processing_time = (ros::Time::now()-start_time).toSec();
|
||||||
|
printf("FeatureJacobian Time: %f\n", anchorPrune_processing_time);
|
||||||
|
|
||||||
H_x.conservativeResize(stack_cntr, H_x.cols());
|
H_x.conservativeResize(stack_cntr, H_x.cols());
|
||||||
r.conservativeResize(stack_cntr);
|
r.conservativeResize(stack_cntr);
|
||||||
|
|
||||||
// Perform measurement update.
|
// Perform measurement update.
|
||||||
measurementUpdate(H_x, r);
|
measurementUpdate(H_x, r);
|
||||||
|
|
||||||
for (const auto& cam_id : rm_cam_state_ids) {
|
for (const auto& cam_id : rm_cam_state_ids) {
|
||||||
int cam_sequence = std::distance(state_server.cam_states.begin(),
|
int cam_sequence = std::distance(state_server.cam_states.begin(),
|
||||||
state_server.cam_states.find(cam_id));
|
state_server.cam_states.find(cam_id));
|
||||||
|
Loading…
Reference in New Issue
Block a user