added anchor information generation

This commit is contained in:
Raphael Maenle 2019-04-12 11:02:58 +02:00
parent a6af82a269
commit a85a4745f2
4 changed files with 61 additions and 102 deletions

View File

@ -62,6 +62,10 @@ struct CAMState {
typedef std::map<StateIDType, CAMState, std::less<int>, typedef std::map<StateIDType, CAMState, std::less<int>,
Eigen::aligned_allocator< Eigen::aligned_allocator<
std::pair<const StateIDType, CAMState> > > CamStateServer; std::pair<const StateIDType, CAMState> > > CamStateServer;
typedef std::map<StateIDType, cv::Mat, std::less<StateIDType>,
Eigen::aligned_allocator<
std::pair<StateIDType, cv::Mat> > > movingWindow;
} // namespace msckf_vio } // namespace msckf_vio
#endif // MSCKF_VIO_CAM_STATE_H #endif // MSCKF_VIO_CAM_STATE_H

View File

@ -114,6 +114,19 @@ struct Feature {
inline bool checkMotion( inline bool checkMotion(
const CamStateServer& cam_states) const; const CamStateServer& cam_states) const;
/*
* @brief InitializeAnchor generates the NxN patch around the
* feature in the Anchor image
* @param cam_states: A map containing all recorded images
* currently presented in the camera state vector
* @return the irradiance of the Anchor NxN Patch
* @return True if the Anchor can be estimated
*/
inline bool initializeAnchor(
const movingWindow& cam0_moving_window);
/* /*
* @brief InitializePosition Intialize the feature position * @brief InitializePosition Intialize the feature position
* based on all current available measurements. * based on all current available measurements.
@ -145,7 +158,7 @@ struct Feature {
std::pair<const StateIDType, Eigen::Vector4d> > > observations; std::pair<const StateIDType, Eigen::Vector4d> > > observations;
// NxN Patch of Anchor Image // NxN Patch of Anchor Image
std::vector<double> patch; std::vector<double> anchorPatch;
// 3d postion of the feature in the world frame. // 3d postion of the feature in the world frame.
Eigen::Vector3d position; Eigen::Vector3d position;
@ -292,6 +305,29 @@ bool Feature::checkMotion(
else return false; else return false;
} }
bool Feature::initializeAnchor(
const movingWindow& cam0_moving_window)
{
int N = 5;
int n = (int)(N-1)/2;
auto anchor = observations.begin();
if(cam0_moving_window.find(anchor->first) == cam0_moving_window.end())
return false;
cv::Mat anchorImage = cam0_moving_window.find(anchor->first)->second;
auto u = anchor->second(0)*anchorImage.rows/2 + anchorImage.rows/2;
auto v = anchor->second(1)*anchorImage.cols/2 + anchorImage.cols/2;
int count = 0;
for(int u_run = (int)u - n; u_run <= (int)u + n; u_run++)
for(int v_run = v - n; v_run <= v + n; v_run++)
anchorPatch.push_back(anchorImage.at<uint8_t>(u_run,v_run));
return true;
}
bool Feature::initializePosition( bool Feature::initializePosition(
const CamStateServer& cam_states) { const CamStateServer& cam_states) {
// Organize camera poses and feature observations properly. // Organize camera poses and feature observations properly.

View File

@ -23,6 +23,7 @@
#include <sensor_msgs/Imu.h> #include <sensor_msgs/Imu.h>
#include <sensor_msgs/Image.h> #include <sensor_msgs/Image.h>
#include <nav_msgs/Odometry.h> #include <nav_msgs/Odometry.h>
#include <std_msgs/Float64.h>
#include <tf/transform_broadcaster.h> #include <tf/transform_broadcaster.h>
#include <std_srvs/Trigger.h> #include <std_srvs/Trigger.h>
@ -106,13 +107,14 @@ class MsckfVio {
void imuCallback(const sensor_msgs::ImuConstPtr& msg); void imuCallback(const sensor_msgs::ImuConstPtr& msg);
/* /*
* @brief featureCallback * @brief imageCallback
* Callback function for feature measurements. * Callback function for feature measurements
* @param msg Stereo feature measurements. * Triggers measurement update
* @param msg
* Camera 0 Image
* Camera 1 Image
* Stereo feature measurements.
*/ */
void featureCallback(const CameraMeasurementConstPtr& msg);
void imageCallback ( void imageCallback (
const sensor_msgs::ImageConstPtr& cam0_img, const sensor_msgs::ImageConstPtr& cam0_img,
const sensor_msgs::ImageConstPtr& cam1_img, const sensor_msgs::ImageConstPtr& cam1_img,
@ -200,13 +202,8 @@ class MsckfVio {
std::vector<sensor_msgs::Imu> imu_msg_buffer; std::vector<sensor_msgs::Imu> imu_msg_buffer;
// Moving Window buffer // Moving Window buffer
std::map<StateIDType, cv::Mat, std::less<StateIDType>, movingWindow cam0_moving_window;
Eigen::aligned_allocator< movingWindow cam1_moving_window;
std::pair<StateIDType, cv::Mat> > > cam0_moving_window;
std::map<StateIDType, cv::Mat, std::less<StateIDType>,
Eigen::aligned_allocator<
std::pair<StateIDType, cv::Mat> > > cam1_moving_window;
// Indicate if the gravity vector is set. // Indicate if the gravity vector is set.
bool is_gravity_set; bool is_gravity_set;

View File

@ -187,8 +187,6 @@ bool MsckfVio::createRosIO() {
imu_sub = nh.subscribe("imu", 100, imu_sub = nh.subscribe("imu", 100,
&MsckfVio::imuCallback, this); &MsckfVio::imuCallback, this);
//feature_sub = nh.subscribe("features", 40,
// &MsckfVio::featureCallback, this);
cam0_img_sub.subscribe(nh, "cam0_image", 10); cam0_img_sub.subscribe(nh, "cam0_image", 10);
cam1_img_sub.subscribe(nh, "cam1_image", 10); cam1_img_sub.subscribe(nh, "cam1_image", 10);
@ -236,7 +234,8 @@ bool MsckfVio::initialize() {
} }
void MsckfVio::imuCallback( void MsckfVio::imuCallback(
const sensor_msgs::ImuConstPtr& msg) { const sensor_msgs::ImuConstPtr& msg)
{
// IMU msgs are pushed backed into a buffer instead of // IMU msgs are pushed backed into a buffer instead of
// being processed immediately. The IMU msgs are processed // being processed immediately. The IMU msgs are processed
@ -254,7 +253,7 @@ void MsckfVio::imuCallback(
return; return;
} }
void MsckfVio::imageCallback ( void MsckfVio::imageCallback(
const sensor_msgs::ImageConstPtr& cam0_img, const sensor_msgs::ImageConstPtr& cam0_img,
const sensor_msgs::ImageConstPtr& cam1_img, const sensor_msgs::ImageConstPtr& cam1_img,
const CameraMeasurementConstPtr& feature_msg) const CameraMeasurementConstPtr& feature_msg)
@ -358,9 +357,8 @@ void MsckfVio::manageMovingWindow(
cam0_moving_window[state_server.imu_state.id] = cam0_image_ptr->image.clone(); cam0_moving_window[state_server.imu_state.id] = cam0_image_ptr->image.clone();
cam1_moving_window[state_server.imu_state.id] = cam1_image_ptr->image.clone(); cam1_moving_window[state_server.imu_state.id] = cam1_image_ptr->image.clone();
std::cout << cam0_moving_window.begin()->second << std::endl;
//TODO handle massive overflow correctly (should be pruned, before this ever triggers) //TODO handle any massive overflow correctly (should be pruned, before this ever triggers)
while(cam0_moving_window.size() > 100) while(cam0_moving_window.size() > 100)
{ {
cam1_moving_window.erase(cam1_moving_window.begin()); cam1_moving_window.erase(cam1_moving_window.begin());
@ -480,88 +478,6 @@ bool MsckfVio::resetCallback(
return true; return true;
} }
void MsckfVio::featureCallback(
const CameraMeasurementConstPtr& msg) {
// Return if the gravity vector has not been set.
if (!is_gravity_set) return;
// Start the system if the first image is received.
// The frame where the first image is received will be
// the origin.
if (is_first_img) {
is_first_img = false;
state_server.imu_state.time = msg->header.stamp.toSec();
}
static double max_processing_time = 0.0;
static int critical_time_cntr = 0;
double processing_start_time = ros::Time::now().toSec();
// Propogate the IMU state.
// that are received before the image msg.
ros::Time start_time = ros::Time::now();
batchImuProcessing(msg->header.stamp.toSec());
double imu_processing_time = (
ros::Time::now()-start_time).toSec();
// Augment the state vector.
start_time = ros::Time::now();
stateAugmentation(msg->header.stamp.toSec());
double state_augmentation_time = (
ros::Time::now()-start_time).toSec();
// Add new observations for existing features or new
// features in the map server.
start_time = ros::Time::now();
addFeatureObservations(msg);
double add_observations_time = (
ros::Time::now()-start_time).toSec();
// Perform measurement update if necessary.
start_time = ros::Time::now();
removeLostFeatures();
double remove_lost_features_time = (
ros::Time::now()-start_time).toSec();
start_time = ros::Time::now();
pruneCamStateBuffer();
double prune_cam_states_time = (
ros::Time::now()-start_time).toSec();
// Publish the odometry.
start_time = ros::Time::now();
publish(msg->header.stamp);
double publish_time = (
ros::Time::now()-start_time).toSec();
// Reset the system if necessary.
onlineReset();
double processing_end_time = ros::Time::now().toSec();
double processing_time =
processing_end_time - processing_start_time;
if (processing_time > 1.0/frame_rate) {
++critical_time_cntr;
ROS_INFO("\033[1;31mTotal processing time %f/%d...\033[0m",
processing_time, critical_time_cntr);
//printf("IMU processing time: %f/%f\n",
// imu_processing_time, imu_processing_time/processing_time);
//printf("State augmentation time: %f/%f\n",
// state_augmentation_time, state_augmentation_time/processing_time);
//printf("Add observations time: %f/%f\n",
// add_observations_time, add_observations_time/processing_time);
printf("Remove lost features time: %f/%f\n",
remove_lost_features_time, remove_lost_features_time/processing_time);
printf("Remove camera states time: %f/%f\n",
prune_cam_states_time, prune_cam_states_time/processing_time);
//printf("Publish time: %f/%f\n",
// publish_time, publish_time/processing_time);
}
return;
}
void MsckfVio::mocapOdomCallback( void MsckfVio::mocapOdomCallback(
const nav_msgs::OdometryConstPtr& msg) { const nav_msgs::OdometryConstPtr& msg) {
static bool first_mocap_odom_msg = true; static bool first_mocap_odom_msg = true;
@ -1198,6 +1114,12 @@ void MsckfVio::removeLostFeatures() {
} }
} }
if(!feature.initializeAnchor(cam0_moving_window))
{
invalid_feature_ids.push_back(feature.id);
continue;
}
jacobian_row_size += 4*feature.observations.size() - 3; jacobian_row_size += 4*feature.observations.size() - 3;
processed_feature_ids.push_back(feature.id); processed_feature_ids.push_back(feature.id);
} }