From 858fbbf4ca6fced52aac91e528a832625510dbc6 Mon Sep 17 00:00:00 2001
From: Nicolas Pope <nicolas.pope@utu.fi>
Date: Sun, 27 Oct 2019 11:47:57 +0200
Subject: [PATCH] Add configuration to stream, H264 support, codec selection

---
 components/codecs/include/ftl/codecs/h264.hpp |  70 ++++++
 .../codecs/include/ftl/codecs/reader.hpp      |   1 +
 components/codecs/src/decoder.cpp             |   1 +
 components/codecs/src/nvpipe_decoder.cpp      |  37 +--
 components/codecs/src/opencv_decoder.cpp      |   2 +
 components/codecs/src/reader.cpp              |   3 +-
 .../include/ftl/rgbd/detail/netframe.hpp      |   3 +-
 components/rgbd-sources/src/source.cpp        |   1 +
 .../src/sources/ftlfile/file_source.cpp       |  93 ++++++--
 .../src/sources/ftlfile/file_source.hpp       |   2 +
 .../rgbd-sources/src/sources/net/net.cpp      | 215 ++++++------------
 .../rgbd-sources/src/sources/net/net.hpp      |   7 +-
 components/rgbd-sources/src/streamer.cpp      |   5 +-
 13 files changed, 243 insertions(+), 197 deletions(-)
 create mode 100644 components/codecs/include/ftl/codecs/h264.hpp

diff --git a/components/codecs/include/ftl/codecs/h264.hpp b/components/codecs/include/ftl/codecs/h264.hpp
new file mode 100644
index 000000000..17f649c52
--- /dev/null
+++ b/components/codecs/include/ftl/codecs/h264.hpp
@@ -0,0 +1,70 @@
+#ifndef _FTL_CODECS_H264_HPP_
+#define _FTL_CODECS_H264_HPP_
+
+namespace ftl {
+namespace codecs {
+
+/**
+ * H.264 codec utility functions.
+ */
+namespace h264 {
+
+/**
+ * H264 Network Abstraction Layer Unit types.
+ */
+enum class NALType : int {
+	UNSPECIFIED_0 = 0,
+	CODED_SLICE_NON_IDR = 1,
+	CODED_SLICE_PART_A = 2,
+	CODED_SLICE_PART_B = 3,
+	CODED_SLICE_PART_C = 4,
+	CODED_SLICE_IDR = 5,
+	SEI = 6,
+	SPS = 7,
+	PPS = 8,
+	ACCESS_DELIMITER = 9,
+	EO_SEQ = 10,
+	EO_STREAM = 11,
+	FILTER_DATA = 12,
+	SPS_EXT = 13,
+	PREFIX_NAL_UNIT = 14,
+	SUBSET_SPS = 15,
+	RESERVED_16 = 16,
+	RESERVED_17 = 17,
+	RESERVED_18 = 18,
+	CODED_SLICE_AUX = 19,
+	CODED_SLICE_EXT = 20,
+	CODED_SLICE_DEPTH = 21,
+	RESERVED_22 = 22,
+	RESERVED_23 = 23,
+	UNSPECIFIED_24 = 24,
+	UNSPECIFIED_25,
+	UNSPECIFIED_26,
+	UNSPECIFIED_27,
+	UNSPECIFIED_28,
+	UNSPECIFIED_29,
+	UNSPECIFIED_30,
+	UNSPECIFIED_31
+};
+
+/**
+ * Extract the NAL unit type from the first NAL header.
+ * With NvPipe, the 5th byte contains the NAL Unit header.
+ */
+inline NALType getNALType(const std::vector<uint8_t> &data) {
+	return static_cast<NALType>(data[4] & 0x1F);
+}
+
+/**
+ * Check the H264 bitstream for an I-Frame. With NvPipe, all I-Frames start
+ * with a SPS NAL unit so just check for this.
+ */
+inline bool isIFrame(const std::vector<uint8_t> &data) {
+	return getNALType(data) == NALType::SPS;
+}
+
+}
+}
+}
+
+#endif  // _FTL_CODECS_H264_HPP_
diff --git a/components/codecs/include/ftl/codecs/reader.hpp b/components/codecs/include/ftl/codecs/reader.hpp
index 69e19f0a5..c3ea0adc6 100644
--- a/components/codecs/include/ftl/codecs/reader.hpp
+++ b/components/codecs/include/ftl/codecs/reader.hpp
@@ -41,6 +41,7 @@ class Reader {
 	bool end();
 
 	inline int64_t getStartTime() const { return timestart_; };
+	inline int version() const { return version_; }
 
 	private:
 	std::istream *stream_;
diff --git a/components/codecs/src/decoder.cpp b/components/codecs/src/decoder.cpp
index 4cd5437d0..a1809b615 100644
--- a/components/codecs/src/decoder.cpp
+++ b/components/codecs/src/decoder.cpp
@@ -10,6 +10,7 @@ Decoder *ftl::codecs::allocateDecoder(const ftl::codecs::Packet &pkt) {
 	switch(pkt.codec) {
 	case codec_t::JPG		:
 	case codec_t::PNG		: return new ftl::codecs::OpenCVDecoder;
+	case codec_t::H264		:
 	case codec_t::HEVC		: return new ftl::codecs::NvPipeDecoder;
 	}
 
diff --git a/components/codecs/src/nvpipe_decoder.cpp b/components/codecs/src/nvpipe_decoder.cpp
index 97985cd20..4c9f51506 100644
--- a/components/codecs/src/nvpipe_decoder.cpp
+++ b/components/codecs/src/nvpipe_decoder.cpp
@@ -4,6 +4,7 @@
 
 #include <ftl/cuda_util.hpp>
 #include <ftl/codecs/hevc.hpp>
+#include <ftl/codecs/h264.hpp>
 //#include <cuda_runtime.h>
 
 #include <opencv2/core/cuda/common.hpp>
@@ -21,32 +22,6 @@ NvPipeDecoder::~NvPipeDecoder() {
 	}
 }
 
-void cropAndScaleUp(cv::Mat &in, cv::Mat &out) {
-	CHECK(in.type() == out.type());
-
-	auto isize = in.size();
-	auto osize = out.size();
-	cv::Mat tmp;
-	
-	if (isize != osize) {
-		double x_scale = ((double) isize.width) / osize.width;
-		double y_scale = ((double) isize.height) / osize.height;
-		double x_scalei = 1.0 / x_scale;
-		double y_scalei = 1.0 / y_scale;
-		cv::Size sz_crop;
-
-		// assume downscaled image
-		if (x_scalei > y_scalei) {
-			sz_crop = cv::Size(isize.width, isize.height * x_scale);
-		} else {
-			sz_crop = cv::Size(isize.width * y_scale, isize.height);
-		}
-
-		tmp = in(cv::Rect(cv::Point2i(0, 0), sz_crop));
-		cv::resize(tmp, out, osize);
-	}
-}
-
 bool NvPipeDecoder::decode(const ftl::codecs::Packet &pkt, cv::Mat &out) {
 	cudaSetDevice(0);
 	UNIQUE_LOCK(mutex_,lk);
@@ -84,12 +59,14 @@ bool NvPipeDecoder::decode(const ftl::codecs::Packet &pkt, cv::Mat &out) {
 	// TODO: (Nick) Move to member variable to prevent re-creation
 	cv::Mat tmp(cv::Size(ftl::codecs::getWidth(pkt.definition),ftl::codecs::getHeight(pkt.definition)), (is_float_frame) ? CV_16U : CV_8UC4);
 
+	// Check for an I-Frame
 	if (pkt.codec == ftl::codecs::codec_t::HEVC) {
-		// Obtain NAL unit type
 		if (ftl::codecs::hevc::isIFrame(pkt.data)) seen_iframe_ = true;
+	} else if (pkt.codec == ftl::codecs::codec_t::H264) {
+		if (ftl::codecs::h264::isIFrame(pkt.data)) seen_iframe_ = true;
 	}
-	// TODO: Parse H264 for i-frame check
 
+	// No I-Frame yet so don't attempt to decode P-Frames.
 	if (!seen_iframe_) return false;
 
 	int rc = NvPipe_Decode(nv_decoder_, pkt.data.data(), pkt.data.size(), tmp.data, tmp.cols, tmp.rows);
@@ -107,6 +84,7 @@ bool NvPipeDecoder::decode(const ftl::codecs::Packet &pkt, cv::Mat &out) {
 	} else {
 		// Is the received frame the same size as requested output?
 		if (out.rows == ftl::codecs::getHeight(pkt.definition)) {
+			// Flag 0x1 means frame is in RGB so needs conversion to BGR
 			if (pkt.flags & 0x1) {
 				cv::cvtColor(tmp, out, cv::COLOR_RGBA2BGR);
 			} else {
@@ -114,6 +92,7 @@ bool NvPipeDecoder::decode(const ftl::codecs::Packet &pkt, cv::Mat &out) {
 			}
 		} else {
 			LOG(WARNING) << "Resizing decoded frame from " << tmp.size() << " to " << out.size();
+			// Flag 0x1 means frame is in RGB so needs conversion to BGR
 			if (pkt.flags & 0x1) {
 				cv::cvtColor(tmp, tmp, cv::COLOR_RGBA2BGR);
 			} else {
@@ -127,5 +106,5 @@ bool NvPipeDecoder::decode(const ftl::codecs::Packet &pkt, cv::Mat &out) {
 }
 
 bool NvPipeDecoder::accepts(const ftl::codecs::Packet &pkt) {
-	return pkt.codec == codec_t::HEVC;
+	return pkt.codec == codec_t::HEVC || pkt.codec == codec_t::H264;
 }
diff --git a/components/codecs/src/opencv_decoder.cpp b/components/codecs/src/opencv_decoder.cpp
index 3bbd82fa2..c7e54531a 100644
--- a/components/codecs/src/opencv_decoder.cpp
+++ b/components/codecs/src/opencv_decoder.cpp
@@ -30,6 +30,8 @@ bool OpenCVDecoder::decode(const ftl::codecs::Packet &pkt, cv::Mat &out) {
 	cv::Rect roi(cx,cy,chunk_width,chunk_height);
 	cv::Mat chunkHead = out(roi);
 
+	//LOG(INFO) << "DECODE JPEG " << (int)pkt.block_number << "/" << chunk_dim;
+
 	// Decode in temporary buffers to prevent long locks
 	cv::imdecode(pkt.data, cv::IMREAD_UNCHANGED, &tmp);
 
diff --git a/components/codecs/src/reader.cpp b/components/codecs/src/reader.cpp
index 3727bdd38..97a74acd1 100644
--- a/components/codecs/src/reader.cpp
+++ b/components/codecs/src/reader.cpp
@@ -28,6 +28,7 @@ bool Reader::begin() {
 	}
 
 	version_ = h.version;
+	LOG(INFO) << "FTL format version " << version_;
 
 	// Capture current time to adjust timestamps
 	timestart_ = (ftl::timer::get_time() / ftl::timer::getInterval()) * ftl::timer::getInterval();
@@ -89,7 +90,7 @@ bool Reader::read(int64_t ts, const std::function<void(const ftl::codecs::Stream
 		get<0>(data).timestamp += timestart_;
 
 		// Fix to clear flags for version 2.
-		if (version_ == 2) {
+		if (version_ <= 2) {
 			get<1>(data).flags = 0;
 		}
 
diff --git a/components/rgbd-sources/include/ftl/rgbd/detail/netframe.hpp b/components/rgbd-sources/include/ftl/rgbd/detail/netframe.hpp
index 1abb624c9..eb9c64b99 100644
--- a/components/rgbd-sources/include/ftl/rgbd/detail/netframe.hpp
+++ b/components/rgbd-sources/include/ftl/rgbd/detail/netframe.hpp
@@ -14,8 +14,7 @@ namespace detail {
  * Also maintains statistics about the frame transmission for later analysis.
  */
 struct NetFrame {
-	cv::Mat channel1;
-	cv::Mat channel2;
+	cv::Mat channel[2];
 	volatile int64_t timestamp;
 	std::atomic<int> chunk_count[2];
 	std::atomic<int> channel_count;
diff --git a/components/rgbd-sources/src/source.cpp b/components/rgbd-sources/src/source.cpp
index 8e402bf33..4c7485460 100644
--- a/components/rgbd-sources/src/source.cpp
+++ b/components/rgbd-sources/src/source.cpp
@@ -271,6 +271,7 @@ void Source::removeRawCallback(const std::function<void(ftl::rgbd::Source*, cons
 
 void Source::notifyRaw(const ftl::codecs::StreamPacket &spkt, const ftl::codecs::Packet &pkt) {
 	SHARED_LOCK(mutex_,lk);
+
 	for (auto &i : rawcallbacks_) {
 		i(this, spkt, pkt);
 	}
diff --git a/components/rgbd-sources/src/sources/ftlfile/file_source.cpp b/components/rgbd-sources/src/sources/ftlfile/file_source.cpp
index d8ebdb399..9597bde79 100644
--- a/components/rgbd-sources/src/sources/ftlfile/file_source.cpp
+++ b/components/rgbd-sources/src/sources/ftlfile/file_source.cpp
@@ -33,25 +33,46 @@ FileSource::FileSource(ftl::rgbd::Source *s, ftl::rgbd::Player *r, int sid) : ft
 
     r->onPacket(sid, [this](const ftl::codecs::StreamPacket &spkt, ftl::codecs::Packet &pkt) {
 		host_->notifyRaw(spkt, pkt);
+
+		// Some channels are to be directly handled by the source object and
+		// do not proceed to any subsequent step.
+		// FIXME: Potential problem, these get processed at wrong time
+		if (spkt.channel == Channel::Configuration) {
+			std::tuple<std::string, std::string> cfg;
+			auto unpacked = msgpack::unpack((const char*)pkt.data.data(), pkt.data.size());
+			unpacked.get().convert(cfg);
+
+			LOG(INFO) << "Config Received: " << std::get<1>(cfg);
+			return;
+		}
+		else if (spkt.channel == Channel::Calibration) {
+			_processCalibration(pkt);
+			return;
+		} else if (spkt.channel == Channel::Pose) {
+			_processPose(pkt);
+			return;
+		}
+
+		// FIXME: For bad and old FTL files where wrong channel is used
 		if (pkt.codec == codec_t::POSE) {
-			Eigen::Matrix4d p = Eigen::Map<Eigen::Matrix4d>((double*)pkt.data.data());
-			host_->setPose(p);
+			_processPose(pkt);
+			return;
 		} else if (pkt.codec == codec_t::CALIBRATION) {
-			ftl::rgbd::Camera *camera = (ftl::rgbd::Camera*)pkt.data.data();
-            LOG(INFO) << "Have calibration: " << camera->fx;
-			params_ = *camera;
-			has_calibration_ = true;
-		} else {
-			if (pkt.codec == codec_t::HEVC) {
-				if (ftl::codecs::hevc::isIFrame(pkt.data)) _removeChannel(spkt.channel);
-			}
-			cache_[cache_write_].emplace_back();
-			auto &c = cache_[cache_write_].back();
-
-			// TODO: Attempt to avoid this copy operation
-			c.spkt = spkt;
-			c.pkt = pkt;
+			_processCalibration(pkt);
+			return;
 		}
+
+
+		// TODO: Check I-Frames for H264
+		if (pkt.codec == codec_t::HEVC) {
+			if (ftl::codecs::hevc::isIFrame(pkt.data)) _removeChannel(spkt.channel);
+		}
+		cache_[cache_write_].emplace_back();
+		auto &c = cache_[cache_write_].back();
+
+		// TODO: Attempt to avoid this copy operation
+		c.spkt = spkt;
+		c.pkt = pkt;
     });
 }
 
@@ -59,6 +80,38 @@ FileSource::~FileSource() {
 
 }
 
+void FileSource::_processPose(ftl::codecs::Packet &pkt) {
+	LOG(INFO) << "Got POSE channel";
+	if (pkt.codec == codec_t::POSE) {
+		Eigen::Matrix4d p = Eigen::Map<Eigen::Matrix4d>((double*)pkt.data.data());
+		host_->setPose(p);
+	} else if (pkt.codec == codec_t::MSGPACK) {
+
+	}
+}
+
+void FileSource::_processCalibration(ftl::codecs::Packet &pkt) {
+	if (pkt.codec == codec_t::CALIBRATION) {
+		ftl::rgbd::Camera *camera = (ftl::rgbd::Camera*)pkt.data.data();
+		params_ = *camera;
+		has_calibration_ = true;
+	} else if (pkt.codec == codec_t::MSGPACK) {
+		std::tuple<ftl::rgbd::Camera, ftl::codecs::Channel, ftl::rgbd::capability_t> params;
+		auto unpacked = msgpack::unpack((const char*)pkt.data.data(), pkt.data.size());
+		unpacked.get().convert(params);
+
+		if (std::get<1>(params) == Channel::Left) {
+			params_ = std::get<0>(params);
+			capabilities_ = std::get<2>(params);
+			has_calibration_ = true;
+
+			LOG(INFO) << "Got Calibration channel: " << params_.width << "x" << params_.height;
+		} else {
+			//params_right_ = std::get<0>(params);
+		}
+	}
+}
+
 void FileSource::_removeChannel(ftl::codecs::Channel channel) {
 	int c = 0;
 	for (auto i=cache_[cache_write_].begin(); i != cache_[cache_write_].end(); ++i) {
@@ -98,9 +151,13 @@ bool FileSource::compute(int n, int b) {
 	int64_t lastts = 0;
 	int lastc = 0;
 
+	// Go through previously read and cached frames in sequence
+	// needs to be done due to P-Frames
 	for (auto i=cache_[cache_read_].begin(); i!=cache_[cache_read_].end(); ++i) {
 		auto &c = *i;
 
+		// Check for verifying that both channels are received, ie. two frames
+		// with the same timestamp.
 		if (c.spkt.timestamp > lastts) {
 			lastts = c.spkt.timestamp;
 			lastc = 1;
@@ -123,6 +180,7 @@ bool FileSource::compute(int n, int b) {
 		}
 	}
 
+	// FIXME: Consider case of Channel::None
 	if (lastc != 2) {
 		LOG(ERROR) << "Channels not in sync (" << sourceid_ << "): " << lastts;
 		return false;
@@ -132,8 +190,7 @@ bool FileSource::compute(int n, int b) {
 
 	if (rgb_.empty() || depth_.empty()) return false;
 
-	//auto cb = host_->callback();
-	//if (cb) cb(timestamp_, rgb_, depth_);
+	// Inform about a decoded frame pair
 	host_->notify(timestamp_, rgb_, depth_);
     return true;
 }
diff --git a/components/rgbd-sources/src/sources/ftlfile/file_source.hpp b/components/rgbd-sources/src/sources/ftlfile/file_source.hpp
index 80f3b368b..2d59b68cb 100644
--- a/components/rgbd-sources/src/sources/ftlfile/file_source.hpp
+++ b/components/rgbd-sources/src/sources/ftlfile/file_source.hpp
@@ -44,6 +44,8 @@ class FileSource : public detail::Source {
 
 	bool realtime_;
 
+	void _processCalibration(ftl::codecs::Packet &pkt);
+	void _processPose(ftl::codecs::Packet &pkt);
 	void _removeChannel(ftl::codecs::Channel channel);
 	void _createDecoder(int ix, const ftl::codecs::Packet &pkt);
 };
diff --git a/components/rgbd-sources/src/sources/net/net.cpp b/components/rgbd-sources/src/sources/net/net.cpp
index 39227c5e6..16c5994d4 100644
--- a/components/rgbd-sources/src/sources/net/net.cpp
+++ b/components/rgbd-sources/src/sources/net/net.cpp
@@ -52,8 +52,8 @@ NetFrame &NetFrameQueue::getFrame(int64_t ts, const cv::Size &s, int c1type, int
 			f.chunk_total[1] = 0;
 			f.channel_count = 0;
 			f.tx_size = 0;
-			f.channel1.create(s, c1type);
-			f.channel2.create(s, c2type);
+			f.channel[0].create(s, c1type);
+			f.channel[1].create(s, c2type);
 			return f;
 		}
 		oldest = (f.timestamp < oldest) ? f.timestamp : oldest;
@@ -72,8 +72,8 @@ NetFrame &NetFrameQueue::getFrame(int64_t ts, const cv::Size &s, int c1type, int
 			f.chunk_total[1] = 0;
 			f.channel_count = 0;
 			f.tx_size = 0;
-			f.channel1.create(s, c1type);
-			f.channel2.create(s, c2type);
+			f.channel[0].create(s, c1type);
+			f.channel[1].create(s, c2type);
 			return f;
 		}
 	}
@@ -90,55 +90,6 @@ void NetFrameQueue::freeFrame(NetFrame &f) {
 
 // ===== NetSource =============================================================
 
-/*bool NetSource::_getCalibration(Universe &net, const UUID &peer, const string &src, ftl::rgbd::Camera &p, ftl::codecs::Channel chan) {
-	try {
-		while(true) {
-			auto [cap,buf] = net.call<tuple<unsigned int,vector<unsigned char>>>(peer_, "source_details", src, chan);
-
-			capabilities_ = cap;
-
-			if (buf.size() > 0) {
-				memcpy((char*)&p, buf.data(), buf.size());
-				
-				if (sizeof(p) != buf.size()) {
-					LOG(ERROR) << "Corrupted calibration";
-					return false;
-				}
-
-				LOG(INFO) << "Calibration received: " << p.cx << ", " << p.cy << ", " << p.fx << ", " << p.fy;
-
-				if (chan == Channel::Left) {
-					// Put calibration into config manually
-					host_->getConfig()["focal"] = p.fx;
-					host_->getConfig()["centre_x"] = p.cx;
-					host_->getConfig()["centre_y"] = p.cy;
-					host_->getConfig()["baseline"] = p.baseline;
-					host_->getConfig()["doffs"] = p.doffs;
-				} else {
-					host_->getConfig()["focal_right"] = p.fx;
-					host_->getConfig()["centre_x_right"] = p.cx;
-					host_->getConfig()["centre_y_right"] = p.cy;
-					host_->getConfig()["baseline_right"] = p.baseline;
-					host_->getConfig()["doffs_right"] = p.doffs;
-				}
-				
-				return true;
-			} else {
-				LOG(INFO) << "Could not get calibration, retrying";
-				sleep_for(milliseconds(500));
-			}
-		}
-		
-	} catch (const std::exception& ex) {
-		LOG(ERROR) << "Exception: " << ex.what();
-		return false;
-
-	} catch (...) {
-		LOG(ERROR) << "Unknown exception";
-		return false;
-	}
-}*/
-
 NetSource::NetSource(ftl::rgbd::Source *host)
 		: ftl::rgbd::detail::Source(host), active_(false), minB_(9), maxN_(1), adaptive_(0), queue_(3) {
 
@@ -149,8 +100,8 @@ NetSource::NetSource(ftl::rgbd::Source *host)
 	params_right_.width = 0;
 	has_calibration_ = false;
 
-	decoder_c1_ = nullptr;
-	decoder_c2_ = nullptr;
+	decoder_[0] = nullptr;
+	decoder_[1] = nullptr;
 
 	host->on("gamma", [this,host](const ftl::config::Event&) {
 		gamma_ = host->value("gamma", 1.0f);
@@ -222,8 +173,8 @@ NetSource::NetSource(ftl::rgbd::Source *host)
 }
 
 NetSource::~NetSource() {
-	if (decoder_c1_) ftl::codecs::free(decoder_c1_);
-	if (decoder_c2_) ftl::codecs::free(decoder_c2_);
+	if (decoder_[0]) ftl::codecs::free(decoder_[0]);
+	if (decoder_[1]) ftl::codecs::free(decoder_[1]);
 
 	if (uri_.size() > 0) {
 		host_->getNet()->unbind(uri_);
@@ -261,22 +212,47 @@ NetSource::~NetSource() {
 
 void NetSource::_createDecoder(int chan, const ftl::codecs::Packet &pkt) {
 	UNIQUE_LOCK(mutex_,lk);
-	auto *decoder = (chan == 0) ? decoder_c1_ : decoder_c2_;
+	auto *decoder = decoder_[chan];
 	if (decoder) {
 		if (!decoder->accepts(pkt)) {
-			ftl::codecs::free((chan == 0) ? decoder_c1_ : decoder_c2_);
+			ftl::codecs::free(decoder_[chan]);
 		} else {
 			return;
 		}
 	}
 
-	if (chan == 0) {
-		decoder_c1_ = ftl::codecs::allocateDecoder(pkt);
+	decoder_[chan] = ftl::codecs::allocateDecoder(pkt);
+}
+
+void NetSource::_processConfig(const ftl::codecs::Packet &pkt) {
+	std::tuple<std::string, std::string> cfg;
+	auto unpacked = msgpack::unpack((const char*)pkt.data.data(), pkt.data.size());
+	unpacked.get().convert(cfg);
+
+	LOG(INFO) << "Config Received: " << std::get<1>(cfg);
+	// TODO: This needs to be put in safer / better location
+	host_->set(std::get<0>(cfg), nlohmann::json::parse(std::get<1>(cfg)));
+}
+
+void NetSource::_processCalibration(const ftl::codecs::Packet &pkt) {
+	std::tuple<ftl::rgbd::Camera, ftl::codecs::Channel, ftl::rgbd::capability_t> params;
+	auto unpacked = msgpack::unpack((const char*)pkt.data.data(), pkt.data.size());
+	unpacked.get().convert(params);
+
+	if (std::get<1>(params) == Channel::Left) {
+		params_ = std::get<0>(params);
+		capabilities_ = std::get<2>(params);
+		has_calibration_ = true;
+		LOG(INFO) << "Got Calibration channel: " << params_.width << "x" << params_.height;
 	} else {
-		decoder_c2_ = ftl::codecs::allocateDecoder(pkt);
+		params_right_ = std::get<0>(params);
 	}
 }
 
+void NetSource::_processPose(const ftl::codecs::Packet &pkt) {
+	LOG(INFO) << "Got POSE channel";
+}
+
 void NetSource::_recvPacket(short ttimeoff, const ftl::codecs::StreamPacket &spkt, const ftl::codecs::Packet &pkt) {
 	// Capture time here for better net latency estimate
 	int64_t now = std::chrono::time_point_cast<std::chrono::milliseconds>(std::chrono::high_resolution_clock::now()).time_since_epoch().count();
@@ -289,28 +265,11 @@ void NetSource::_recvPacket(short ttimeoff, const ftl::codecs::StreamPacket &spk
 	const ftl::codecs::Channel rchan = spkt.channel;
 	const int channum = (rchan == Channel::Colour) ? 0 : 1;
 
-	if (rchan == Channel::Calibration) {
-		std::tuple<ftl::rgbd::Camera, ftl::codecs::Channel, ftl::rgbd::capability_t> params;
-		auto unpacked = msgpack::unpack((const char*)pkt.data.data(), pkt.data.size());
-		unpacked.get().convert(params);
-
-		if (std::get<1>(params) == Channel::Left) {
-			params_ = std::get<0>(params);
-			capabilities_ = std::get<2>(params);
-			has_calibration_ = true;
-
-			//rgb_ = cv::Mat(cv::Size(params_.width, params_.height), CV_8UC3, cv::Scalar(0,0,0));
-			//depth_ = cv::Mat(cv::Size(params_.width, params_.height), CV_32FC1, 0.0f);
-
-			LOG(INFO) << "Got Calibration channel: " << params_.width << "x" << params_.height;
-		} else {
-			params_right_ = std::get<0>(params);
-		}
-
-		return;
-	} else if (rchan == Channel::Pose) {
-		LOG(INFO) << "Got POSE channel";
-		return;
+	// Deal with the special channels...
+	switch (rchan) {
+	case Channel::Configuration		: _processConfig(pkt); return;
+	case Channel::Calibration		: _processCalibration(pkt); return;
+	case Channel::Pose				: _processPose(pkt); return;
 	}
 
 	if (!has_calibration_) {
@@ -326,19 +285,19 @@ void NetSource::_recvPacket(short ttimeoff, const ftl::codecs::StreamPacket &spk
 	// Only decode if this channel is wanted.
 	if (rchan == Channel::Colour || rchan == chan) {
 		_createDecoder(channum, pkt);
-		auto *decoder = (rchan == Channel::Colour) ? decoder_c1_ : decoder_c2_;
+		auto *decoder = decoder_[channum];
 		if (!decoder) {
 			LOG(ERROR) << "No frame decoder available";
 			return;
 		}
 
-		decoder->decode(pkt, (rchan == Channel::Colour) ? frame.channel1 : frame.channel2);
+		decoder->decode(pkt, frame.channel[channum]);
 	} else if (chan != Channel::None && rchan != Channel::Colour) {
 		// Didn't receive correct second channel so just clear the images
 		if (isFloatChannel(chan)) {
-			frame.channel2.setTo(cv::Scalar(0.0f));
+			frame.channel[1].setTo(cv::Scalar(0.0f));
 		} else {
-			frame.channel2.setTo(cv::Scalar(0,0,0));
+			frame.channel[1].setTo(cv::Scalar(0,0,0));
 		}
 	}
 
@@ -358,49 +317,39 @@ void NetSource::_recvPacket(short ttimeoff, const ftl::codecs::StreamPacket &spk
 	}		
 
 	++frame.chunk_count[channum];
-	++frame.channel_count;
-
+	if (frame.chunk_count[channum] == frame.chunk_total[channum]) ++frame.channel_count;
 	if (frame.chunk_count[channum] > frame.chunk_total[channum]) LOG(FATAL) << "TOO MANY CHUNKS";
 
 	// Capture tx time of first received chunk
-	if (frame.channel_count == 1 && frame.chunk_count[channum] == 1) {
+	// FIXME: This seems broken
+	if (channum == 1 && frame.chunk_count[channum] == 1) {
 		UNIQUE_LOCK(frame.mtx, flk);
 		if (frame.chunk_count[channum] == 1) {
 			frame.tx_latency = int64_t(ttimeoff);
 		}
 	}
 
-	// Last chunk of both channels now received
-	if (frame.channel_count == spkt.channel_count &&
-			frame.chunk_count[0] == frame.chunk_total[0] &&
-			frame.chunk_count[1] == frame.chunk_total[1]) {
-		UNIQUE_LOCK(frame.mtx, flk);
+	// Last chunk of both channels now received, so we are done.
+	if (frame.channel_count == spkt.channel_count) {
+		_completeFrame(frame, now-(spkt.timestamp+frame.tx_latency));
+	}
+}
 
-		if (frame.timestamp >= 0 && frame.chunk_count[0] == frame.chunk_total[0] && frame.chunk_count[1] == frame.chunk_total[1]) {
-			timestamp_ = frame.timestamp;
-			frame.tx_latency = now-(spkt.timestamp+frame.tx_latency);
-
-			adaptive_ = abr_.selectBitrate(frame);
-			//LOG(INFO) << "Frame finished: " << frame.timestamp;
-			host_->notify(frame.timestamp, frame.channel1, frame.channel2);
-			/*auto cb = host_->callback();
-			if (cb) {
-				try {
-					cb(frame.timestamp, frame.channel1, frame.channel2);
-				} catch (...) {
-					LOG(ERROR) << "Exception in net frame callback";
-				}
-			} else {
-				LOG(ERROR) << "NO FRAME CALLBACK";
-			}*/
-
-			queue_.freeFrame(frame);
-
-			{
-				// Decrement expected frame counter
-				N_--;
-			}
-		}
+void NetSource::_completeFrame(NetFrame &frame, int64_t latency) {
+	UNIQUE_LOCK(frame.mtx, flk);
+
+	// Frame must not have already been freed.
+	if (frame.timestamp >= 0) {
+		timestamp_ = frame.timestamp;
+		frame.tx_latency = latency;
+
+		// Note: Not used currently
+		adaptive_ = abr_.selectBitrate(frame);
+
+		host_->notify(frame.timestamp, frame.channel[0], frame.channel[1]);
+
+		queue_.freeFrame(frame);
+		N_--;
 	}
 }
 
@@ -420,12 +369,6 @@ void NetSource::setPose(const Eigen::Matrix4d &pose) {
 
 ftl::rgbd::Camera NetSource::parameters(ftl::codecs::Channel chan) {
 	if (chan == ftl::codecs::Channel::Right) {
-		/*if (params_right_.width == 0) {
-			auto uri = host_->get<string>("uri");
-			if (!uri) return params_;
-
-			_getCalibration(*host_->getNet(), peer_, *uri, params_right_, chan);
-		}*/
 		return params_right_;
 	} else {
 		return params_;
@@ -450,27 +393,11 @@ void NetSource::_updateURI() {
 		}
 		peer_ = *p;
 
-		//has_calibration_ = _getCalibration(*host_->getNet(), peer_, *uri, params_, ftl::codecs::Channel::Left);
-		//_getCalibration(*host_->getNet(), peer_, *uri, params_right_, ftl::codecs::Channel::Right);
-
 		host_->getNet()->bind(*uri, [this](short ttimeoff, const ftl::codecs::StreamPacket &spkt, const ftl::codecs::Packet &pkt) {
-			//if (chunk == -1) {
-				//#ifdef HAVE_NVPIPE
-				//_recvVideo(frame, ttimeoff, bitrate, jpg, d);
-				//#else
-				//LOG(ERROR) << "Cannot receive HEVC, no NvPipe support";
-				//#endif
-			//} else {
-				//_recvChunk(frame, ttimeoff, bitrate, chunk, jpg, d);
-				_recvPacket(ttimeoff, spkt, pkt);
-			//}
+			_recvPacket(ttimeoff, spkt, pkt);
 		});
 
 		N_ = 0;
-
-		//d_rgb_ = cv::Mat(cv::Size(params_.width, params_.height), CV_8UC3, cv::Scalar(0,0,0));
-		//d_depth_ = cv::Mat(cv::Size(params_.width, params_.height), CV_32FC1, 0.0f);
-
 		uri_ = *uri;
 		active_ = true;
 	} else {
diff --git a/components/rgbd-sources/src/sources/net/net.hpp b/components/rgbd-sources/src/sources/net/net.hpp
index 469902a57..5cef2726d 100644
--- a/components/rgbd-sources/src/sources/net/net.hpp
+++ b/components/rgbd-sources/src/sources/net/net.hpp
@@ -68,8 +68,7 @@ class NetSource : public detail::Source {
 	//NvPipe *nv_channel2_decoder_;
 	//#endif
 
-	ftl::codecs::Decoder *decoder_c1_;
-	ftl::codecs::Decoder *decoder_c2_;
+	ftl::codecs::Decoder *decoder_[2];
 
 	// Adaptive bitrate functionality
 	ftl::rgbd::detail::bitrate_t adaptive_;	 // Current adapted bitrate
@@ -86,6 +85,10 @@ class NetSource : public detail::Source {
 	void _updateURI();
 	//void _checkAdaptive(int64_t);
 	void _createDecoder(int chan, const ftl::codecs::Packet &);
+	void _completeFrame(NetFrame &frame, int64_t now);
+	void _processCalibration(const ftl::codecs::Packet &pkt);
+	void _processConfig(const ftl::codecs::Packet &pkt);
+	void _processPose(const ftl::codecs::Packet &pkt);
 };
 
 }
diff --git a/components/rgbd-sources/src/streamer.cpp b/components/rgbd-sources/src/streamer.cpp
index 3eff9e24f..8ecda51b7 100644
--- a/components/rgbd-sources/src/streamer.cpp
+++ b/components/rgbd-sources/src/streamer.cpp
@@ -338,7 +338,10 @@ void Streamer::_addClient(const string &source, int N, int rate, const ftl::UUID
 	// Finally, inject calibration and config data
 	s->src->inject(Channel::Calibration, s->src->parameters(Channel::Left), Channel::Left, s->src->getCapabilities());
 	s->src->inject(Channel::Calibration, s->src->parameters(Channel::Right), Channel::Right, s->src->getCapabilities());
-	//s->src->inject(Channel::Pose, s->src->getPose());
+	//s->src->inject(s->src->getPose());
+	//if (!(*s->src->get<nlohmann::json>("meta")).is_null()) {
+		s->src->inject(Channel::Configuration, "/original", s->src->getConfig().dump());
+	//}
 }
 
 void Streamer::remove(Source *) {
-- 
GitLab