added 3d visualization and stepping through bag file - minor edits in jakobi
This commit is contained in:
@ -16,12 +16,17 @@
|
||||
#include <Eigen/Geometry>
|
||||
#include <Eigen/StdVector>
|
||||
|
||||
#include <visualization_msgs/Marker.h>
|
||||
#include <visualization_msgs/MarkerArray.h>
|
||||
#include <geometry_msgs/Point.h>
|
||||
|
||||
#include "image_handler.h"
|
||||
|
||||
#include "math_utils.hpp"
|
||||
#include "imu_state.h"
|
||||
#include "cam_state.h"
|
||||
|
||||
|
||||
namespace msckf_vio {
|
||||
|
||||
/*
|
||||
@ -168,6 +173,10 @@ struct Feature {
|
||||
std::vector<double>& anchorPatch_estimate,
|
||||
IlluminationParameter& estimatedIllumination) const;
|
||||
|
||||
bool MarkerGeneration(
|
||||
ros::Publisher& marker_pub,
|
||||
const CamStateServer& cam_states) const;
|
||||
|
||||
bool VisualizePatch(
|
||||
const CAMState& cam_state,
|
||||
const StateIDType& cam_state_id,
|
||||
@ -207,7 +216,7 @@ inline Eigen::Vector3d projectPixelToPosition(cv::Point2f in_p,
|
||||
std::vector<cv::Point2f> anchorPatch_ideal;
|
||||
std::vector<cv::Point2f> anchorPatch_real;
|
||||
|
||||
// Position of NxN Patch in 3D space
|
||||
// Position of NxN Patch in 3D space in anchor camera frame
|
||||
std::vector<Eigen::Vector3d> anchorPatch_3d;
|
||||
|
||||
// Anchor Isometry
|
||||
@ -406,6 +415,118 @@ bool Feature::estimate_FrameIrradiance(
|
||||
}
|
||||
}
|
||||
|
||||
// generates markers for every camera position/observation
|
||||
// and estimated feature/path position
|
||||
bool Feature::MarkerGeneration(
|
||||
ros::Publisher& marker_pub,
|
||||
const CamStateServer& cam_states) const
|
||||
{
|
||||
visualization_msgs::MarkerArray ma;
|
||||
|
||||
// add all camera states used for estimation
|
||||
int count = 0;
|
||||
|
||||
for(auto observation : observations)
|
||||
{
|
||||
visualization_msgs::Marker marker;
|
||||
marker.header.frame_id = "world";
|
||||
marker.header.stamp = ros::Time::now();
|
||||
|
||||
marker.ns = "cameras";
|
||||
marker.id = count++;
|
||||
marker.type = visualization_msgs::Marker::ARROW;
|
||||
marker.action = visualization_msgs::Marker::ADD;
|
||||
|
||||
marker.pose.position.x = cam_states.find(observation.first)->second.position(0);
|
||||
marker.pose.position.y = cam_states.find(observation.first)->second.position(1);
|
||||
marker.pose.position.z = cam_states.find(observation.first)->second.position(2);
|
||||
|
||||
// rotate form x to z axis
|
||||
Eigen::Vector4d q = quaternionMultiplication(Eigen::Vector4d(0, -0.707, 0, 0.707), cam_states.find(observation.first)->second.orientation);
|
||||
|
||||
marker.pose.orientation.x = q(0);
|
||||
marker.pose.orientation.y = q(1);
|
||||
marker.pose.orientation.z = q(2);
|
||||
marker.pose.orientation.w = q(3);
|
||||
|
||||
marker.scale.x = 0.15;
|
||||
marker.scale.y = 0.05;
|
||||
marker.scale.z = 0.05;
|
||||
|
||||
if(count == 1)
|
||||
{
|
||||
marker.color.r = 0.0f;
|
||||
marker.color.g = 0.0f;
|
||||
marker.color.b = 1.0f;
|
||||
}
|
||||
else
|
||||
{
|
||||
marker.color.r = 0.0f;
|
||||
marker.color.g = 1.0f;
|
||||
marker.color.b = 0.0f;
|
||||
}
|
||||
marker.color.a = 1.0;
|
||||
|
||||
marker.lifetime = ros::Duration(0);
|
||||
|
||||
ma.markers.push_back(marker);
|
||||
}
|
||||
|
||||
// 'delete' any existing cameras (make invisible)
|
||||
for(int i = count; i < 20; i++)
|
||||
{
|
||||
visualization_msgs::Marker marker;
|
||||
|
||||
marker.header.frame_id = "world";
|
||||
marker.header.stamp = ros::Time::now();
|
||||
|
||||
marker.ns = "cameras";
|
||||
marker.id = i;
|
||||
marker.type = visualization_msgs::Marker::ARROW;
|
||||
marker.action = visualization_msgs::Marker::ADD;
|
||||
|
||||
marker.pose.orientation.w = 1;
|
||||
|
||||
marker.color.a = 0.0;
|
||||
|
||||
marker.lifetime = ros::Duration(1);
|
||||
|
||||
ma.markers.push_back(marker);
|
||||
}
|
||||
|
||||
//generate feature patch points position
|
||||
visualization_msgs::Marker marker;
|
||||
|
||||
marker.header.frame_id = "world";
|
||||
marker.header.stamp = ros::Time::now();
|
||||
|
||||
marker.ns = "patch";
|
||||
marker.id = 0;
|
||||
marker.type = visualization_msgs::Marker::POINTS;
|
||||
marker.action = visualization_msgs::Marker::ADD;
|
||||
|
||||
marker.pose.orientation.w = 1;
|
||||
|
||||
marker.scale.x = 0.02;
|
||||
marker.scale.y = 0.02;
|
||||
|
||||
marker.color.r = 1.0f;
|
||||
marker.color.g = 0.0f;
|
||||
marker.color.b = 0.0f;
|
||||
marker.color.a = 1.0;
|
||||
|
||||
for(auto point : anchorPatch_3d)
|
||||
{
|
||||
geometry_msgs::Point p;
|
||||
p.x = point(0);
|
||||
p.y = point(1);
|
||||
p.z = point(2);
|
||||
marker.points.push_back(p);
|
||||
}
|
||||
ma.markers.push_back(marker);
|
||||
|
||||
marker_pub.publish(ma);
|
||||
}
|
||||
|
||||
bool Feature::VisualizePatch(
|
||||
const CAMState& cam_state,
|
||||
@ -489,12 +610,15 @@ bool Feature::VisualizePatch(
|
||||
cv::Point(20+scale*(N+i), 15+scale*(N/2+j+1)),
|
||||
cv::Scalar(255, 255 + photo_r[i*N+j]*255, 255 + photo_r[i*N+j]*255),
|
||||
CV_FILLED);
|
||||
|
||||
|
||||
cv::hconcat(cam0.featureVisu, irradianceFrame, cam0.featureVisu);
|
||||
|
||||
/*
|
||||
// visualize position of used observations and resulting feature position
|
||||
cv::Mat positionFrame(anchorImage.size(), CV_8UC3, cv::Scalar(255, 240, 255));
|
||||
cv::resize(positionFrame, positionFrame, cv::Size(), rescale, rescale);
|
||||
|
||||
|
||||
// draw world zero
|
||||
cv::line(positionFrame,
|
||||
cv::Point(20,20),
|
||||
@ -510,20 +634,20 @@ bool Feature::VisualizePatch(
|
||||
// for every observation, get cam state
|
||||
for(auto obs : observations)
|
||||
{
|
||||
cam_state.find(obs->first);
|
||||
cv::line(positionFrame,
|
||||
cv::Point(20,20),
|
||||
cv::Point(30,20),
|
||||
cv::Scalar(255,0,0),
|
||||
CV_FILLED);
|
||||
}
|
||||
|
||||
// draw, x y position and arrow with direction - write z next to it
|
||||
|
||||
cv::resize(cam0.featureVisu, cam0.featureVisu, cv::Size(), rescale, rescale);
|
||||
|
||||
cv::hconcat(cam0.featureVisu, irradianceFrame, cam0.featureVisu);
|
||||
cv::hconcat(cam0.featureVisu, positionFrame, cam0.featureVisu);
|
||||
|
||||
cv::hconcat(cam0.featureVisu, positionFrame, cam0.featureVisu);
|
||||
*/
|
||||
// write feature position
|
||||
std::stringstream pos_s;
|
||||
pos_s << "u: " << observations.begin()->second(0) << " v: " << observations.begin()->second(1);
|
||||
@ -534,11 +658,11 @@ bool Feature::VisualizePatch(
|
||||
//save image
|
||||
|
||||
std::stringstream loc;
|
||||
loc << "/home/raphael/dev/MSCKF_ws/img/feature_" << std::to_string(ros::Time::now().toSec()) << ".jpg";
|
||||
cv::imwrite(loc.str(), cam0.featureVisu);
|
||||
// loc << "/home/raphael/dev/MSCKF_ws/img/feature_" << std::to_string(ros::Time::now().toSec()) << ".jpg";
|
||||
//cv::imwrite(loc.str(), cam0.featureVisu);
|
||||
|
||||
//cv::imshow("patch", cam0.featureVisu);
|
||||
//cvWaitKey(1);
|
||||
cv::imshow("patch", cam0.featureVisu);
|
||||
cvWaitKey(0);
|
||||
}
|
||||
|
||||
float Feature::PixelIrradiance(cv::Point2f pose, cv::Mat image) const
|
||||
@ -586,7 +710,7 @@ Eigen::Vector3d Feature::projectPixelToPosition(cv::Point2f in_p, const CameraCa
|
||||
// save resulting NxN positions for this feature
|
||||
|
||||
Eigen::Vector3d PositionInCamera(in_p.x/anchor_rho, in_p.y/anchor_rho, 1/anchor_rho);
|
||||
Eigen::Vector3d PositionInWorld= T_anchor_w.linear()*PositionInCamera + T_anchor_w.translation();
|
||||
Eigen::Vector3d PositionInWorld = T_anchor_w.linear()*PositionInCamera + T_anchor_w.translation();
|
||||
return PositionInWorld;
|
||||
//printf("%f, %f, %f\n",PositionInWorld[0], PositionInWorld[1], PositionInWorld[2]);
|
||||
}
|
||||
@ -653,7 +777,7 @@ bool Feature::initializeAnchor(const CameraCalibration& cam, int N)
|
||||
for(auto point : anchorPatch_real)
|
||||
anchorPatch.push_back(PixelIrradiance(point, anchorImage));
|
||||
|
||||
// project patch pixel to 3D space
|
||||
// project patch pixel to 3D space in camera coordinate system
|
||||
for(auto point : und_v)
|
||||
{
|
||||
anchorPatch_ideal.push_back(point);
|
||||
|
@ -43,6 +43,21 @@ inline void quaternionNormalize(Eigen::Vector4d& q) {
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
* @brief invert rotation of passed quaternion through conjugation
|
||||
* and return conjugation
|
||||
*/
|
||||
inline Eigen::Vector4d quaternionConjugate(Eigen::Vector4d& q)
|
||||
{
|
||||
Eigen::Vector4d p;
|
||||
p(0) = -q(0);
|
||||
p(1) = -q(1);
|
||||
p(2) = -q(2);
|
||||
p(3) = q(3);
|
||||
|
||||
return p;
|
||||
}
|
||||
|
||||
/*
|
||||
* @brief Perform q1 * q2
|
||||
*/
|
||||
|
@ -181,6 +181,9 @@ class MsckfVio {
|
||||
const Eigen::Vector3d& gyro,
|
||||
const Eigen::Vector3d& acc);
|
||||
|
||||
// groundtruth state augmentation
|
||||
void truePhotometricStateAugmentation(const double& time);
|
||||
|
||||
// Measurement update
|
||||
void stateAugmentation(const double& time);
|
||||
void PhotometricStateAugmentation(const double& time);
|
||||
@ -230,7 +233,7 @@ class MsckfVio {
|
||||
bool GROUNDTRUTH;
|
||||
|
||||
bool nan_flag;
|
||||
|
||||
bool play;
|
||||
double last_time_bound;
|
||||
|
||||
// Patch size for Photometry
|
||||
@ -241,6 +244,13 @@ class MsckfVio {
|
||||
|
||||
// State vector
|
||||
StateServer state_server;
|
||||
|
||||
// Ground truth state vector
|
||||
StateServer true_state_server;
|
||||
|
||||
// error state based on ground truth
|
||||
StateServer err_state_server;
|
||||
|
||||
// Maximum number of camera states
|
||||
int max_cam_state_size;
|
||||
|
||||
@ -297,6 +307,7 @@ class MsckfVio {
|
||||
ros::Subscriber imu_sub;
|
||||
ros::Subscriber truth_sub;
|
||||
ros::Publisher odom_pub;
|
||||
ros::Publisher marker_pub;
|
||||
ros::Publisher feature_pub;
|
||||
tf::TransformBroadcaster tf_pub;
|
||||
ros::ServiceServer reset_srv;
|
||||
|
Reference in New Issue
Block a user