diff --git a/applications/registration/src/aruco.cpp b/applications/registration/src/aruco.cpp
index 6e8c1bbc9c2a463cd1f241c0e69f4e804a38cefb..28cddabea859cdef754c80d9303099eea03dbc45 100644
--- a/applications/registration/src/aruco.cpp
+++ b/applications/registration/src/aruco.cpp
@@ -15,6 +15,8 @@ using std::vector;
 using ftl::Configurable;
 using ftl::net::Universe;
 using ftl::rgbd::Source;
+using std::pair;
+using std::make_pair;
 
 void ftl::registration::aruco(ftl::Configurable *root) {
 	using namespace cv;
@@ -29,6 +31,7 @@ void ftl::registration::aruco(ftl::Configurable *root) {
 	int curtarget = 0;
 	bool freeze = false;
 	bool depthtoggle = false;
+	double lastScore = 1000.0;
 
 	if (sources.size() == 0) return;
 
@@ -44,6 +47,11 @@ void ftl::registration::aruco(ftl::Configurable *root) {
 
 	Correspondances *current = corrs[sources[curtarget]->getURI()];
 
+	map<int, vector<Point2f>> targ_corners;
+	map<int, vector<Point2f>> src_corners;
+	map<int, pair<Vec3d,Vec3d>> targ_trans;
+	map<int, pair<Vec3d,Vec3d>> src_trans;
+
 	while (ftl::running) {
 		if (!freeze) {
 			// Grab the latest frames from sources
@@ -77,23 +85,54 @@ void ftl::registration::aruco(ftl::Configurable *root) {
 			tsdepth.convertTo(sdepth, CV_8U, 255.0f / 10.0f);
 			applyColorMap(sdepth, sdepth, cv::COLORMAP_JET);
 
+			vector<int> mids_targ, mids_src;
+			vector<vector<Point2f>> mpoints_targ, mpoints_src;
+			Ptr<aruco::DetectorParameters> parameters(new aruco::DetectorParameters);
+			auto dictionary = aruco::getPredefinedDictionary(aruco::DICT_4X4_50);
+
+			aruco::detectMarkers(trgb, dictionary, mpoints_targ, mids_targ, parameters);
+			aruco::drawDetectedMarkers(trgb, mpoints_targ, mids_targ);
+
+			// Cache the marker positions
+			for (size_t i=0; i<mids_targ.size(); i++) {
+				targ_corners[mids_targ[i]] = mpoints_targ[i];
+			}
+
+			vector<Vec3d> rvecs, tvecs;
+			cv::Mat distCoef(cv::Size(14,1), CV_64F, cv::Scalar(0.0));
+			cv::Mat targ_cam = current->target()->cameraMatrix();
+			aruco::estimatePoseSingleMarkers(mpoints_targ, 0.1, targ_cam, distCoef, rvecs, tvecs);
+			for (size_t i=0; i<rvecs.size(); i++) {
+				targ_trans[mids_targ[i]] = make_pair(tvecs[i], rvecs[i]);
+				aruco::drawAxis(trgb, current->target()->cameraMatrix(), distCoef, rvecs[i], tvecs[i], 0.1);
+			}
+
+			aruco::detectMarkers(srgb, dictionary, mpoints_src, mids_src, parameters);
+			aruco::drawDetectedMarkers(srgb, mpoints_src, mids_src);
+
+			rvecs.clear();
+			tvecs.clear();
+			cv::Mat src_cam = current->source()->cameraMatrix();
+			aruco::estimatePoseSingleMarkers(mpoints_src, 0.1, src_cam, distCoef, rvecs, tvecs);
+			for (size_t i=0; i<rvecs.size(); i++) {
+				src_trans[mids_src[i]] = make_pair(tvecs[i], rvecs[i]);
+				aruco::drawAxis(srgb, current->source()->cameraMatrix(), distCoef, rvecs[i], tvecs[i], 0.1);
+			}
+
+			// Cache the marker positions
+			for (size_t i=0; i<mids_src.size(); i++) {
+				src_corners[mids_src[i]] = mpoints_src[i];
+			}
+
 			current->drawTarget(trgb);
 			current->drawTarget(tdepth);
 			current->drawSource(srgb);
 			current->drawSource(sdepth);
 
-			vector<int> mids;
-			vector<vector<Point2f>> mpoints;
-			Ptr<aruco::DetectorParameters> parameters(new aruco::DetectorParameters);
-			auto dictionary = aruco::getPredefinedDictionary(aruco::DICT_4X4_50);
-			aruco::detectMarkers(trgb, dictionary, mpoints, mids, parameters);
-
-			aruco::drawDetectedMarkers(trgb, mpoints, mids);
-
 			putText(srgb, string("Source: ") + current->source()->getURI(), Point(10,20), FONT_HERSHEY_PLAIN, 1.0, Scalar(0,0,255), 1);
 			putText(trgb, string("Target: ") + current->target()->getURI(), Point(10,20), FONT_HERSHEY_PLAIN, 1.0, Scalar(0,0,255), 1);
-			//putText(srgb, string("Score: ") + std::to_string(lastScore), Point(10,40), FONT_HERSHEY_PLAIN, 1.0, Scalar(0,0,255), 1);
-			//putText(trgb, string("Score: ") + std::to_string(lastScore), Point(10,40), FONT_HERSHEY_PLAIN, 1.0, Scalar(0,0,255), 1);
+			putText(srgb, string("Score: ") + std::to_string(lastScore), Point(10,40), FONT_HERSHEY_PLAIN, 1.0, Scalar(0,0,255), 1);
+			putText(trgb, string("Score: ") + std::to_string(lastScore), Point(10,40), FONT_HERSHEY_PLAIN, 1.0, Scalar(0,0,255), 1);
 			if (freeze) putText(srgb, string("Paused"), Point(10,50), FONT_HERSHEY_PLAIN, 1.0, Scalar(0,0,255), 1);
 			if (freeze) putText(trgb, string("Paused"), Point(10,50), FONT_HERSHEY_PLAIN, 1.0, Scalar(0,0,255), 1);
 
@@ -110,6 +149,46 @@ void ftl::registration::aruco(ftl::Configurable *root) {
 			//lastScore = 1000.0;
 		} else if (key == 'd') {
 			depthtoggle = !depthtoggle;
+		} else if (key == 'i') {
+			current->icp();
+		} else if (key == 'e') {
+			vector<Vec3d> targfeat;
+			vector<Vec3d> srcfeat;
+
+			for (auto i : targ_trans) {
+				auto si = src_trans.find(i.first);
+				if (si != src_trans.end()) {
+					//Vec3d dt = std::get<0>(si->second) - std::get<0>(i.second);
+					targfeat.push_back(std::get<0>(i.second));
+					srcfeat.push_back(std::get<0>(si->second));
+				}
+			}
+
+			Eigen::Matrix4d t;
+			lastScore = current->estimateTransform(t, srcfeat, targfeat);
+			current->setTransform(t);
+		} else if (key == 'f') {
+			Mat f1,f2;
+			current->capture(f1,f2);
+
+			LOG(INFO) << "Captured frames";
+
+			// Convert matching marker corners into correspondence points
+			for (auto i : targ_corners) {
+				auto si = src_corners.find(i.first);
+				if (si != src_corners.end()) {
+					for (size_t j=0; j<4; ++j) {
+						current->add(i.second[j].x, i.second[j].y, si->second[j].x, si->second[j].y);
+					}
+				}
+			}
+
+			LOG(INFO) << "Estimating transform...";
+
+			Eigen::Matrix4d t;
+			lastScore = current->estimateTransform(t);
+			current->setTransform(t);
+			lastScore = current->icp();
 		} else if (key == 's') {
 			// Save
 			map<string, Eigen::Matrix4d> transforms;
diff --git a/applications/registration/src/correspondances.cpp b/applications/registration/src/correspondances.cpp
index 1c3cbda3e9debb86c6d32b14d9829ac59290f2dd..f40eaca97242d51ada14428b2293e562a4df10c4 100644
--- a/applications/registration/src/correspondances.cpp
+++ b/applications/registration/src/correspondances.cpp
@@ -198,8 +198,8 @@ bool Correspondances::capture(cv::Mat &rgb1, cv::Mat &rgb2) {
 
 		std::this_thread::sleep_for(std::chrono::milliseconds(50));
 	}
-	averageDepth(buffer[0], d1, 0.01f);
-	averageDepth(buffer[1], d2, 0.01f);
+	averageDepth(buffer[0], d1, 0.02f);
+	averageDepth(buffer[1], d2, 0.02f);
 	Mat d1_v, d2_v;
 	d1.convertTo(d1_v, CV_8U, 255.0f / 10.0f);
 	d2.convertTo(d2_v, CV_8U, 255.0f / 10.0f);
@@ -230,7 +230,7 @@ bool Correspondances::capture(cv::Mat &rgb1, cv::Mat &rgb2) {
 			float d1_value = d1ptr[x];
 			float d2_value = d2ptr[x];
 
-			if (d1_value < 39.0f) {
+			if (d1_value > 0.1f && d1_value < 39.0f) {
 				// Make PCL points with specific depth value
 				pcl::PointXYZ p1;
 				Eigen::Vector4d p1e = src_->point(x,y,d1_value);
@@ -242,7 +242,7 @@ bool Correspondances::capture(cv::Mat &rgb1, cv::Mat &rgb2) {
 				six++;
 			}
 
-			if (d2_value < 39.0f) {
+			if (d2_value > 0.1f && d2_value < 39.0f) {
 				// Make PCL points with specific depth value
 				pcl::PointXYZ p2;
 				Eigen::Vector4d p2e = targ_->point(x,y,d2_value);
@@ -290,10 +290,55 @@ double Correspondances::estimateTransform(Eigen::Matrix4d &T) {
 	pcl::registration::TransformationValidationEuclidean<PointXYZ, PointXYZ, double> validate;
 	pcl::registration::TransformationEstimationSVD<PointXYZ,PointXYZ, double> svd;
 
-	validate.setMaxRange(0.1);
+	//validate.setMaxRange(0.1);
+
+	pcl::PointCloud<pcl::PointXYZ>::Ptr targ_cloud(new pcl::PointCloud<pcl::PointXYZ>);
+	pcl::PointCloud<pcl::PointXYZ>::Ptr src_cloud(new pcl::PointCloud<pcl::PointXYZ>);
+	pcl::PointCloud<pcl::PointXYZ>::Ptr tsrc_cloud(new pcl::PointCloud<pcl::PointXYZ>);
+
+	vector<int> idx;
+
+	for (int i=0; i<src_feat_.size(); i++) {
+		idx.push_back(i);
+		src_cloud->push_back(src_cloud_->at(src_feat_[i]));
+		targ_cloud->push_back(targ_cloud_->at(targ_feat_[i]));
+	}
+
+	pcl::transformPointCloud(*src_cloud, *tsrc_cloud, transform_);
+
+	svd.estimateRigidTransformation(*src_cloud, idx, *targ_cloud, idx, T);
+	return validate.validateTransformation(src_cloud, targ_cloud, T);
+}
+
+double Correspondances::estimateTransform(Eigen::Matrix4d &T, const std::vector<cv::Vec3d> &src_feat, const std::vector<cv::Vec3d> &targ_feat) {
+	pcl::registration::TransformationValidationEuclidean<PointXYZ, PointXYZ, double> validate;
+	pcl::registration::TransformationEstimationSVD<PointXYZ,PointXYZ, double> svd;
+
+	pcl::PointCloud<pcl::PointXYZ>::Ptr targ_cloud(new pcl::PointCloud<pcl::PointXYZ>);
+	pcl::PointCloud<pcl::PointXYZ>::Ptr src_cloud(new pcl::PointCloud<pcl::PointXYZ>);
+	pcl::PointCloud<pcl::PointXYZ>::Ptr tsrc_cloud(new pcl::PointCloud<pcl::PointXYZ>);
+
+	vector<int> idx;
+
+	for (int i=0; i<src_feat.size(); i++) {
+		pcl::PointXYZ ps,pt;
+
+		ps.x = src_feat[i][0];
+		ps.y = src_feat[i][1];
+		ps.z = src_feat[i][2];
+		pt.x = targ_feat[i][0];
+		pt.y = targ_feat[i][1];
+		pt.z = targ_feat[i][2];
+
+		idx.push_back(i);
+		src_cloud->push_back(ps);
+		targ_cloud->push_back(pt);
+	}
+
+	pcl::transformPointCloud(*src_cloud, *tsrc_cloud, transform_);
 
-	svd.estimateRigidTransformation(*src_cloud_, src_feat_, *targ_cloud_, targ_feat_, T);
-	return validate.validateTransformation(src_cloud_, targ_cloud_, T);
+	svd.estimateRigidTransformation(*src_cloud, idx, *targ_cloud, idx, T);
+	return validate.validateTransformation(src_cloud, targ_cloud, T);
 }
 
 double Correspondances::estimateTransform(Eigen::Matrix4d &T, const vector<int> &src_feat, const vector<int> &targ_feat) {
diff --git a/applications/registration/src/correspondances.hpp b/applications/registration/src/correspondances.hpp
index d1ed095cef4f42152b2fc9ec0b4162107217e1a5..53f50a155ba4921c4b14f65571ee775473d738fb 100644
--- a/applications/registration/src/correspondances.hpp
+++ b/applications/registration/src/correspondances.hpp
@@ -65,6 +65,7 @@ class Correspondances {
 	 */
 	double estimateTransform(Eigen::Matrix4d &);
 	double estimateTransform(Eigen::Matrix4d &T, const std::vector<int> &src_feat, const std::vector<int> &targ_feat);
+	double estimateTransform(Eigen::Matrix4d &T, const std::vector<cv::Vec3d> &src_feat, const std::vector<cv::Vec3d> &targ_feat);
 
 	double findBestSubset(Eigen::Matrix4d &tr, int K, int N);
 
diff --git a/components/rgbd-sources/include/ftl/rgbd/source.hpp b/components/rgbd-sources/include/ftl/rgbd/source.hpp
index d5b57a72c34f145c83dfa6acb09d00aef6b4e826..d90385f50aa0a61c9c79c80a307cbf224ad75ca1 100644
--- a/components/rgbd-sources/include/ftl/rgbd/source.hpp
+++ b/components/rgbd-sources/include/ftl/rgbd/source.hpp
@@ -113,6 +113,8 @@ class Source : public ftl::Configurable {
 		else return params_;
 	}
 
+	cv::Mat cameraMatrix() const;
+
 	/**
 	 * Change the camera extrinsics by providing a new pose matrix. For virtual
 	 * cameras this will move the camera, for physical cameras it is set by the
diff --git a/components/rgbd-sources/src/source.cpp b/components/rgbd-sources/src/source.cpp
index 06caebb232f64fd2b244c914cb8e01a518a2f6bc..7beea75193d4ee0110cfb6a03a5373505d357a3a 100644
--- a/components/rgbd-sources/src/source.cpp
+++ b/components/rgbd-sources/src/source.cpp
@@ -53,6 +53,11 @@ Source::~Source() {
 
 }
 
+cv::Mat Source::cameraMatrix() const {
+	cv::Mat m = (cv::Mat_<float>(3,3) << parameters().fx, 0.0, -parameters().cx, 0.0, parameters().fy, -parameters().cy, 0.0, 0.0, 1.0);
+	return m;
+}
+
 void Source::customImplementation(ftl::rgbd::detail::Source *impl) {
 	if (impl_) delete impl_;
 	impl_ = impl;