diff --git a/CMakeLists.txt b/CMakeLists.txt
index d81aed6..1a77616 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,5 +1,5 @@
 cmake_minimum_required(VERSION 2.8.12)
-project(msckf_vio)
+project(msckf)
 
 add_compile_options(-std=c++11)
 
diff --git a/package.xml b/package.xml
index 66d6b26..df884ce 100644
--- a/package.xml
+++ b/package.xml
@@ -3,13 +3,12 @@
 
   msckf_vio
   0.0.1
-  Multi-State Constraint Kalman Filter for Vision-aided Inertial Navigation
+  Multi-State Constraint Kalman Filter - Photometric expansion
 
-  Ke Sun
+  Raphael Maenle
   Penn Software License
 
-  Ke Sun
-  Kartik Mohta
+  Raphael Maenle
 
   catkin
 
diff --git a/pseudocode/pseudocode_image_processing b/pseudocode/pseudocode_image_processing
new file mode 100644
index 0000000..d1472f0
--- /dev/null
+++ b/pseudocode/pseudocode_image_processing
@@ -0,0 +1,97 @@
+
+
+stereo callback()
+
+	create image pyramids
+		_Constructs the image pyramid which can be passed to calcOpticalFlowPyrLK._
+.
+	if first Frame:
+		*initialize first Frame ()
+
+	else:
+		*track Features ()
+
+		*addnewFeatures ()
+
+		*pruneGridFeatures()
+			_removes worst features from any overflowing grid_
+
+	publish features (u1, v1, u2, v2)
+		_undistorts them beforehand_
+
+addnewFeatures()
+	*mask existing features
+	*detect new fast features
+	*collect in a grid, keep only best n per grid
+	*stereomatch()
+	*save inliers into a new feature with u,v on cam0 and cam1
+
+
+track Features()
+	*integrateIMUData ()
+		_uses existing IMU data between two frames to calc. rotation between the two frames_
+
+	*predictFeatureTracking()
+		_compensates the rotation between consecutive frames - rotates previous camera frame features to current camera rotation_
+
+	*calcOpticalFlowPyrLK()
+		_measures the change between the features in the previous frame and in the current frame (using the predicted features)_
+
+	*remove points outside of image region
+		_how does this even happen?_
+
+	*stereo match()
+		_find tracked features from optical flow in the camera 1 image_
+		_remove all features that could not be matched_
+
+	*twoPointRansac(cam0)
+	*twoPointRansac(cam1)
+		_remove any features outside best found ransac model_
+
+
+
+
+twoPointRansac()
+	*mark all points as inliers
+	*compensate rotation between frames
+	*normalize points
+	*calculate difference bewteen previous and current points
+	*mark large distances (over 50 pixels currently)
+	*calculate mean points distance
+	*return if inliers (non marked) < 3
+	*return if motion smaller than norm pixel unit
+	*ransac
+		*optimize with found inlier after random sample
+
+	*set inlier markers
+
+initialize first Frame()
+
+	features = FastFeatureDetector detect ()
+
+	*stereo match ()
+
+	group features into grid
+		- according to position in the image
+		- sorting them by response
+		- save the top responses
+		- save the top responses
+
+
+stereo match ()
+
+	*undistort cam0 Points
+	*project cam0 Points to cam1 to initialize points in cam1
+
+	*calculate lk optical flow
+		_used because camera calibrations not perfect enough_
+		_also, calculation more efficient, because LK calculated anyway_
+
+	*compute relative trans/rot between cam0 and cam1*
+
+	*remove outliers based on essential matrix
+		_essential matrix relates points in stereo image (pinhole model)_
+		for every point:
+			- calculate epipolar line of point in cam0
+			- calculate error of cam1 to epipolar line 
+			- remove if to big
diff --git a/pseudocode/pseudocode_msckf b/pseudocode/pseudocode_msckf
new file mode 100644
index 0000000..9dc2ecd
--- /dev/null
+++ b/pseudocode/pseudocode_msckf
@@ -0,0 +1,82 @@
+featureCallback
+	propagate IMU state()
+	state Augmentation()
+	add Feature Observations()
+
+	#the following possibly trigger ekf update step:
+	remove Lost Features ()
+	prune Camera State Buffer ()
+
+
+remove Lost Features()
+	every feature that does not have a current observation:
+		*just delete if not enough features
+
+		check Motion of Feature ()
+			_calculation here makes no sense - he uses pixel position as direction vector for feature?_
+		*initialize Position ()
+
+		caculate feature Jakobian and Residual()
+			*for every observation in this feature
+				- calculate u and v in camera frames, based on estimated feature position
+				- input results into jakobi d(measurement)/d(camera 0/1)
+				- input results into jakobi d(camera 0/1)/d(state) and jakobi d(camera 0/1)/d(feature position)
+				- project both jakobis to nullspace of feature position jakobi
+				- calculate residual: measurement - u and v of camera frames
+				- project residual onto nullspace of feature position jakobi
+
+			- stack residual and jakobians
+
+		gating Test()
+		
+		*measurementUpdate()
+			_use calculated residuals and jakobians to calculate change in error_
+measurementUpdate():
+	- QR reduce the stacked Measurment Jakobis
+	- calcualte Kalman Gain based on Measurement Jakobian, Error-State Covariance and Noise
+		_does some fancy shit here_
+	- calculate estimated error after observation: delta_x = KalmanGain * residual 
+	- add estimated error to state (imu states and cam states)
+
+initialize Position ():
+	* create initial guess for global feature position ()
+		_uses first feature measurement on left camera and last feature measurement of right camera_
+		- transform first measurement to plane of last measurement
+		- calcualte least square point between rays
+	* get position approximation using measured feature positions
+		_using Levenberg Marqhart iterative search_
+
+
+
+add Feature Observations()
+	* if feature not in map, add feature to map
+		_and add u0, v0, u1, v1 as first observation
+	* if feature in map, add new observation u0,v0,u1,v1
+
+
+
+state Augmentation()
+	* Add estimated cam position to state
+	* Update P with Jakobian of cam Position 
+
+
+propagate IMU state ()
+	_uses IMU process model for every saved IMU state_
+	
+	for every buffered imu state:
+		
+		*remove bias
+		
+		*Compute F and G matrix (continuous transition and noise cov.)
+			_using current orientation, gyro and acc. reading_
+		* approximate phi: phi = 1 + Fdt + ...
+
+		* calculate new state propagating through runge kutta
+		* modify transition matrix to have a propper null space?
+
+		* calculate Q = Phi*G*Q_noise*GT*PhiT
+		* calculate P = Phi*P*PhiT + Q
+
+
+	stateAugmentation ()
+		_save current IMU state as camera position_