From a8a8bd4069ddd2afac64033dc8931a0d8853a9f4 Mon Sep 17 00:00:00 2001
From: Nicolas Pope <nicolas.pope@utu.fi>
Date: Thu, 30 Jul 2020 17:00:20 +0300
Subject: [PATCH] Reduce latency in device capture and parallel encoding

---
 applications/vision/src/main.cpp              | 50 +++++++------
 .../codecs/include/ftl/codecs/channels.hpp    |  4 +-
 .../codecs/include/ftl/codecs/encoder.hpp     |  4 +-
 components/codecs/src/encoder.cpp             |  4 +-
 components/codecs/src/nvidia_encoder.cpp      |  2 +-
 components/codecs/src/opencv_encoder.cpp      |  4 +-
 components/operators/src/depth.cpp            | 21 ++----
 .../src/sources/stereovideo/opencv.cpp        | 59 +++++++--------
 .../src/sources/stereovideo/opencv.hpp        |  2 +
 .../src/sources/stereovideo/pylon.cpp         | 73 ++++++++++---------
 .../src/sources/stereovideo/pylon.hpp         |  2 +
 .../src/sources/stereovideo/rectification.cpp | 22 ++++--
 .../src/sources/stereovideo/rectification.hpp |  2 +-
 .../streams/include/ftl/streams/sender.hpp    |  2 +
 components/streams/src/sender.cpp             | 21 ++++++
 15 files changed, 157 insertions(+), 115 deletions(-)

diff --git a/applications/vision/src/main.cpp b/applications/vision/src/main.cpp
index 2e9975297..0ae963ae1 100644
--- a/applications/vision/src/main.cpp
+++ b/applications/vision/src/main.cpp
@@ -14,6 +14,7 @@
 #include <vector>
 #include <fstream>
 #include <thread>
+#include <set>
 
 #include <opencv2/opencv.hpp>
 #include <ftl/rgbd.hpp>
@@ -148,7 +149,7 @@ static void run(ftl::Configurable *root) {
 	outstream->begin();
 	sender->setStream(outstream);
 
-	ftl::audio::Source *audioSrc = ftl::create<ftl::audio::Source>(root, "audio_test");
+	ftl::audio::Source *audioSrc = ftl::create<ftl::audio::Source>(root, "audio");
 
 	ftl::data::Pool pool(root->value("mempool_min", 2),root->value("mempool_max", 5));
 	auto *creator = new ftl::streams::IntervalSourceBuilder(&pool, 0, {source, audioSrc});
@@ -159,7 +160,8 @@ static void run(ftl::Configurable *root) {
 	receiver->registerBuilder(creatorptr);
 
 	// Which channels should be encoded
-	std::unordered_set<Channel> encodable;
+	std::set<Channel> encodable;
+	std::set<Channel> previous_encodable;
 
 	// Send channels on flush
 	auto flushhandle = pool.onFlushSet([sender,&encodable](ftl::data::FrameSet &fs, ftl::codecs::Channel c) {
@@ -192,39 +194,43 @@ static void run(ftl::Configurable *root) {
 	pipeline->append<ftl::operators::ArUco>("aruco")->value("enabled", false);
 	pipeline->append<ftl::operators::DepthChannel>("depth");  // Ensure there is a depth channel
 
-	bool busy = false;
-
-	auto h = creator->onFrameSet([sender,outstream,&stats_count,&latency,&frames,&stats_time,pipeline,&busy,&encodable](const ftl::data::FrameSetPtr &fs) {
-		if (busy) {
-			LOG(WARNING) << "Depth pipeline drop: " << fs->timestamp();
-			fs->firstFrame().message(ftl::data::Message::Warning_PIPELINE_DROP, "Depth pipeline drop");
-			return true;
-		}
-		busy = true;
+	std::atomic_flag busy;
+	busy.clear();
 
+	auto h = creator->onFrameSet([sender,outstream,&stats_count,&latency,&frames,&stats_time,pipeline,&busy,&encodable,&previous_encodable](const ftl::data::FrameSetPtr &fs) {
 		encodable.clear();
 		// Decide what to encode here.
 		const auto sel = outstream->selectedNoExcept(fs->frameset());
-		std::vector<Channel> sortedsel(sel.begin(), sel.end());
-		std::sort(sortedsel.begin(),sortedsel.end());
-
-		if (sortedsel.size() > 0) encodable.emplace(sortedsel[0]);
-		if (sortedsel.size() > 1) encodable.emplace(sortedsel[1]);
+		encodable.insert(sel.begin(), sel.end());
 
 		// Only allow the two encoders to exist
+		if (encodable.size() > 2) {
+			auto enciter = encodable.begin();
+			std::advance(enciter, 2);
+			encodable.erase(enciter, encodable.end());
+		}
+
 		// This ensures we cleanup other encoders
-		sender->setActiveEncoders(fs->frameset(), encodable);
+		if (encodable != previous_encodable) sender->resetEncoders(fs->frameset());
+		previous_encodable = encodable;
 
 		// Do all processing in another thread... only if encoding of depth
 		//if (encodable.find(Channel::Depth) != encodable.end()) {
 			ftl::pool.push([sender,&stats_count,&latency,&frames,&stats_time,pipeline,&busy,fs](int id) mutable {
-				// Do pipeline here...
+				// Do pipeline here... if not still busy doing it
+				if (busy.test_and_set()) {
+					LOG(WARNING) << "Depth pipeline drop: " << fs->timestamp();
+					fs->firstFrame().message(ftl::data::Message::Warning_PIPELINE_DROP, "Depth pipeline drop");
+					return;
+				}
 				pipeline->apply(*fs, *fs);
+				busy.clear();
 
 				++frames;
 				latency += float(ftl::timer::get_time() - fs->timestamp());
 
 				// Destruct frameset as soon as possible to send the data...
+				if (fs->hasAnyChanged(Channel::Depth)) fs->flush(Channel::Depth);
 				const_cast<ftl::data::FrameSetPtr&>(fs).reset();
 
 				if (!quiet && --stats_count <= 0) {
@@ -238,8 +244,6 @@ static void run(ftl::Configurable *root) {
 					latency = 0.0f;
 					stats_time = nowtime;
 				}
-
-				busy = false;
 			});
 		//} else {
 			//LOG(INFO) << "NOT DOING DEPTH";
@@ -248,8 +252,10 @@ static void run(ftl::Configurable *root) {
 		//}
 
 		// Lock colour right now to encode in parallel
-		fs->flush(ftl::codecs::Channel::Colour);
-		fs->flush(ftl::codecs::Channel::AudioStereo);
+		ftl::pool.push([fs](int id){ fs->flush(ftl::codecs::Channel::Colour); });
+		if (fs->hasAnyChanged(Channel::Audio)) {
+			ftl::pool.push([fs](int id){ fs->flush(ftl::codecs::Channel::Audio); });
+		}
 
 		return true;
 	});
diff --git a/components/codecs/include/ftl/codecs/channels.hpp b/components/codecs/include/ftl/codecs/channels.hpp
index 71c97dc9f..598a6cf5c 100644
--- a/components/codecs/include/ftl/codecs/channels.hpp
+++ b/components/codecs/include/ftl/codecs/channels.hpp
@@ -40,9 +40,9 @@ enum struct Channel : int {
 	Overlay			= 21,   // 8UC4
 	GroundTruth		= 22,	// 32F
 
-	Audio			= 32,
-	AudioMono		= 32,
+	AudioMono		= 32,	// Deprecated, will always be stereo
 	AudioStereo		= 33,
+	Audio			= 33,
 
 	Configuration	= 64,	// JSON Data
 	Settings1		= 65,
diff --git a/components/codecs/include/ftl/codecs/encoder.hpp b/components/codecs/include/ftl/codecs/encoder.hpp
index 995a8216c..c2ed5ed9f 100644
--- a/components/codecs/include/ftl/codecs/encoder.hpp
+++ b/components/codecs/include/ftl/codecs/encoder.hpp
@@ -75,13 +75,15 @@ class Encoder {
 
 	virtual bool supports(ftl::codecs::codec_t codec)=0;
 
+	inline ftl::codecs::device_t device() const { return device_; };
+
 	cv::cuda::Stream &stream() { return stream_; }
 
 	protected:
 	bool available;
 	const ftl::codecs::definition_t max_definition;
 	const ftl::codecs::definition_t min_definition;
-	const ftl::codecs::device_t device;
+	const ftl::codecs::device_t device_;
 	cv::cuda::Stream stream_;
 };
 
diff --git a/components/codecs/src/encoder.cpp b/components/codecs/src/encoder.cpp
index 4ef9ade5a..3d8f8aac1 100644
--- a/components/codecs/src/encoder.cpp
+++ b/components/codecs/src/encoder.cpp
@@ -37,7 +37,7 @@ Encoder *ftl::codecs::allocateEncoder(ftl::codecs::definition_t maxdef,
 	for (auto i=encoders.begin(); i!=encoders.end(); ++i) {
 		auto *e = *i;
 		if (!e->available) continue;
-		if (dev != device_t::Any && dev != e->device) continue;
+		if (dev != device_t::Any && dev != e->device_) continue;
 		if (maxdef != definition_t::Any && (maxdef < e->max_definition || maxdef > e->min_definition)) continue;
 		if (codec != codec_t::Any && !e->supports(codec)) continue;
 		
@@ -57,7 +57,7 @@ void ftl::codecs::free(Encoder *&enc) {
 }
 
 Encoder::Encoder(definition_t maxdef, definition_t mindef, device_t dev) :
-		available(true), max_definition(maxdef), min_definition(mindef), device(dev) {
+		available(true), max_definition(maxdef), min_definition(mindef), device_(dev) {
 
 }
 
diff --git a/components/codecs/src/nvidia_encoder.cpp b/components/codecs/src/nvidia_encoder.cpp
index 9f9b9e8a3..62ae6b0f1 100644
--- a/components/codecs/src/nvidia_encoder.cpp
+++ b/components/codecs/src/nvidia_encoder.cpp
@@ -61,7 +61,7 @@ static inline std::string EncErrorCodeToString(NVENCSTATUS code)
 }
 
 NvidiaEncoder::NvidiaEncoder(definition_t maxdef,
-			definition_t mindef) : Encoder(maxdef, mindef, ftl::codecs::device_t::Hardware) {
+			definition_t mindef) : Encoder(maxdef, mindef, ftl::codecs::device_t::NVIDIA) {
 	nvenc_ = nullptr;
 	was_reset_ = false;
 }
diff --git a/components/codecs/src/opencv_encoder.cpp b/components/codecs/src/opencv_encoder.cpp
index 83ac4dfa5..aa41b2b4a 100644
--- a/components/codecs/src/opencv_encoder.cpp
+++ b/components/codecs/src/opencv_encoder.cpp
@@ -11,7 +11,7 @@ using ftl::codecs::OpenCVEncoder;
 using std::vector;
 
 OpenCVEncoder::OpenCVEncoder(ftl::codecs::definition_t maxdef,
-			ftl::codecs::definition_t mindef) : Encoder(maxdef, mindef, ftl::codecs::device_t::Software) {
+			ftl::codecs::definition_t mindef) : Encoder(maxdef, mindef, ftl::codecs::device_t::OpenCV) {
 	jobs_ = 0;
 }
 
@@ -38,8 +38,6 @@ bool OpenCVEncoder::encode(const cv::cuda::GpuMat &in, ftl::codecs::Packet &pkt)
 		return false;
 	}
 
-	LOG(WARNING) << "Using Software Encoder!";
-
 	in.download(tmp_);
 
 	if (!is_colour && in.type() == CV_32F) {
diff --git a/components/operators/src/depth.cpp b/components/operators/src/depth.cpp
index cdeac427f..df127ef9f 100644
--- a/components/operators/src/depth.cpp
+++ b/components/operators/src/depth.cpp
@@ -9,6 +9,7 @@
 #include "ftl/operators/depth.hpp"
 #include "ftl/operators/mask.hpp"
 #include "ftl/operators/opticalflow.hpp"
+#include <ftl/calibration/structures.hpp>
 
 #include "./disparity/opencv/disparity_bilateral_filter.hpp"
 
@@ -179,7 +180,14 @@ bool DepthChannel::apply(ftl::rgbd::FrameSet &in, ftl::rgbd::FrameSet &out, cuda
 	for (size_t i=0; i<in.frames.size(); ++i) {
 		if (!in.hasFrame(i)) continue;
 		auto &f = in.frames[i].cast<ftl::rgbd::Frame>();
+
 		if (!f.hasChannel(Channel::Depth) && f.hasChannel(Channel::Right)) {
+
+			if (f.hasChannel(Channel::CalibrationData)) {
+				auto &cdata = f.get<ftl::calibration::CalibrationData>(Channel::CalibrationData);
+				if (!cdata.enabled) continue;
+			}
+
 			_createPipeline(in.frames.size());
 
 			const cv::cuda::GpuMat& left = f.get<cv::cuda::GpuMat>(Channel::Left);
@@ -188,19 +196,6 @@ bool DepthChannel::apply(ftl::rgbd::FrameSet &in, ftl::rgbd::FrameSet &out, cuda
 			depth.create(left.size(), CV_32FC1);
 
 			if (left.empty() || right.empty()) continue;
-
-			/*if (depth_size_ != left.size()) {
-				auto &col2 = f.create<cv::cuda::GpuMat>(Channel::ColourHighRes);
-				cv::cuda::resize(left, col2, depth_size_, 0.0, 0.0, cv::INTER_CUBIC, cvstream);
-				f.createTexture<uchar4>(Channel::ColourHighRes, true);
-				f.swapChannels(Channel::Colour, Channel::ColourHighRes);
-			}
-
-			if (depth_size_ != right.size()) {
-				cv::cuda::resize(right, rbuf_[i], depth_size_, 0.0, 0.0, cv::INTER_CUBIC, cvstream);
-				cv::cuda::swap(right, rbuf_[i]);
-			}*/
-
 			pipe_->apply(f, f, stream);
 		}
 	}
diff --git a/components/rgbd-sources/src/sources/stereovideo/opencv.cpp b/components/rgbd-sources/src/sources/stereovideo/opencv.cpp
index b7ff35953..4ce893c6b 100644
--- a/components/rgbd-sources/src/sources/stereovideo/opencv.cpp
+++ b/components/rgbd-sources/src/sources/stereovideo/opencv.cpp
@@ -149,9 +149,9 @@ OpenCVDevice::OpenCVDevice(nlohmann::json &config, bool stereo)
 	right_hm_ = cv::cuda::HostMem(dheight_, dwidth_, CV_8UC4);
 	hres_hm_ = cv::cuda::HostMem(height_, width_, CV_8UC4);
 
-	interpolation_ = value("inter_cubic", false) ? cv::INTER_CUBIC : cv::INTER_LINEAR;
+	interpolation_ = value("inter_cubic", true) ? cv::INTER_CUBIC : cv::INTER_LINEAR;
 	on("inter_cubic", [this](){
-		interpolation_ = value("inter_cubic_", false) ?
+		interpolation_ = value("inter_cubic_", true) ?
 			cv::INTER_CUBIC : cv::INTER_LINEAR;
 	});
 }
@@ -347,32 +347,32 @@ bool OpenCVDevice::get(ftl::rgbd::Frame &frame, cv::cuda::GpuMat &l_out, cv::cud
 
 	if (!camera_a_) return false;
 
-	std::future<bool> future_b;
+	//std::future<bool> future_b;
 	if (camera_b_) {
-		future_b = std::move(ftl::pool.push([this,&rfull,&r,c,&r_out,&r_hres_out,&stream](int id) {
+		//future_b = std::move(ftl::pool.push([this,&rfull,&r,c,&r_out,&r_hres_out,&stream](int id) {
 			if (!camera_b_->retrieve(frame_r_)) {
 				LOG(ERROR) << "Unable to read frame from camera B";
 				return false;
+			} else {
+				cv::cvtColor(frame_r_, rtmp2_, cv::COLOR_BGR2BGRA);
+
+				//if (stereo_) {
+					c->rectify(rtmp2_, rfull, Channel::Right);
+
+					if (hasHigherRes()) {
+						// TODO: Use threads?
+						cv::resize(rfull, r, r.size(), 0.0, 0.0, interpolation_);
+						r_hres_out = rfull;
+					}
+					else {
+						r_hres_out = Mat();
+					}
+				//}
+
+				r_out.upload(r, stream);
 			}
-
-			cv::cvtColor(frame_r_, rfull, cv::COLOR_BGR2BGRA);
-
-			if (stereo_) {
-				c->rectify(rfull, Channel::Right);
-
-				if (hasHigherRes()) {
-					// TODO: Use threads?
-					cv::resize(rfull, r, r.size(), 0.0, 0.0, interpolation_);
-					r_hres_out = rfull;
-				}
-				else {
-					r_hres_out = Mat();
-				}
-			}
-
-			r_out.upload(r, stream);
-			return true;
-		}));
+			//return true;
+		//}));
 	}
 
 	if (camera_b_) {
@@ -394,18 +394,19 @@ bool OpenCVDevice::get(ftl::rgbd::Frame &frame, cv::cuda::GpuMat &l_out, cv::cud
 		}
 	}
 
-	cv::cvtColor(frame_l_, lfull, cv::COLOR_BGR2BGRA);
-
 	if (stereo_) {
+		cv::cvtColor(frame_l_, ltmp_, cv::COLOR_BGR2BGRA);
 		//FTL_Profile("Rectification", 0.01);
 		//c->rectifyStereo(lfull, rfull);
-		c->rectify(lfull, Channel::Left);
+		c->rectify(ltmp_, lfull, Channel::Left);
 
 		// Need to resize
 		//if (hasHigherRes()) {
 			// TODO: Use threads?
 		//	cv::resize(rfull, r, r.size(), 0.0, 0.0, interpolation_);
 		//}
+	} else {
+		cv::cvtColor(frame_l_, lfull, cv::COLOR_BGR2BGRA);
 	}
 
 	if (hasHigherRes()) {
@@ -430,10 +431,10 @@ bool OpenCVDevice::get(ftl::rgbd::Frame &frame, cv::cuda::GpuMat &l_out, cv::cud
 		cv::imencode(".jpg", thumb, thumbdata, params);
 	}
 
-	if (camera_b_) {
+	//if (camera_b_) {
 		//FTL_Profile("WaitCamB", 0.05);
-		future_b.wait();
-	}
+		//future_b.wait();
+	//}
 
 	return true;
 }
diff --git a/components/rgbd-sources/src/sources/stereovideo/opencv.hpp b/components/rgbd-sources/src/sources/stereovideo/opencv.hpp
index acd263a97..37389e63f 100644
--- a/components/rgbd-sources/src/sources/stereovideo/opencv.hpp
+++ b/components/rgbd-sources/src/sources/stereovideo/opencv.hpp
@@ -59,6 +59,8 @@ class OpenCVDevice : public ftl::rgbd::detail::Device {
 	cv::cuda::HostMem right_hm_;
 	cv::cuda::HostMem hres_hm_;
 	cv::Mat rtmp_;
+	cv::Mat rtmp2_;
+	cv::Mat ltmp_;
 
 	cv::Mat frame_l_;
 	cv::Mat frame_r_;
diff --git a/components/rgbd-sources/src/sources/stereovideo/pylon.cpp b/components/rgbd-sources/src/sources/stereovideo/pylon.cpp
index 9a6fc73b9..57fea9555 100644
--- a/components/rgbd-sources/src/sources/stereovideo/pylon.cpp
+++ b/components/rgbd-sources/src/sources/stereovideo/pylon.cpp
@@ -104,6 +104,7 @@ PylonDevice::PylonDevice(nlohmann::json &config)
 	left_hm_ = cv::cuda::HostMem(height_, width_, CV_8UC4);
 	right_hm_ = cv::cuda::HostMem(height_, width_, CV_8UC4);
 	hres_hm_ = cv::cuda::HostMem(fullheight_, fullwidth_, CV_8UC4);
+	rtmp_.create(fullheight_, fullwidth_, CV_8UC4);
 
 	on("exposure", [this]() {
 		if (lcam_->GetDeviceInfo().GetModelName() != "Emulation") {
@@ -116,9 +117,9 @@ PylonDevice::PylonDevice(nlohmann::json &config)
 
 	on("buffer_size", buffer_size_, 1);
 
-	interpolation_ = value("inter_cubic", false) ? cv::INTER_CUBIC : cv::INTER_LINEAR;
+	interpolation_ = value("inter_cubic", true) ? cv::INTER_CUBIC : cv::INTER_LINEAR;
 	on("inter_cubic", [this](){
-		interpolation_ = value("inter_cubic", false) ?
+		interpolation_ = value("inter_cubic", true) ?
 			cv::INTER_CUBIC : cv::INTER_LINEAR;
 	});
 }
@@ -282,47 +283,49 @@ bool PylonDevice::get(ftl::rgbd::Frame &frame, cv::cuda::GpuMat &l_out, cv::cuda
 
 	try {
 		FTL_Profile("Frame Retrieve", 0.005);
-		std::future<bool> future_b;
+		//std::future<bool> future_b;
+		bool res_r = false;
 		if (rcam_) {
-			future_b = std::move(ftl::pool.push([this,&rfull,&r,&l,c,&r_out,&h_r,&stream](int id) {
+			//future_b = std::move(ftl::pool.push([this,&rfull,&r,&l,c,&r_out,&h_r,&stream](int id) {
 				Pylon::CGrabResultPtr result_right;
 
-				if (!_retrieveFrames(result_right, rcam_)) return false;
+				if (_retrieveFrames(result_right, rcam_)) {
 
-				cv::Mat wrap_right(
-				result_right->GetHeight(),
-				result_right->GetWidth(),
-				CV_8UC1,
-				(uint8_t*)result_right->GetBuffer());
+					cv::Mat wrap_right(
+					result_right->GetHeight(),
+					result_right->GetWidth(),
+					CV_8UC1,
+					(uint8_t*)result_right->GetBuffer());
 
-				{
-					FTL_Profile("Bayer Colour (R)", 0.005);
-					cv::cvtColor(wrap_right, rfull, cv::COLOR_BayerRG2BGRA);
-				}
-
-				if (isStereo()) {
-					FTL_Profile("Rectify and Resize (R)", 0.005);
-					c->rectify(rfull, Channel::Right);
-
-					if (hasHigherRes()) {
-						cv::resize(rfull, r, r.size(), 0.0, 0.0, interpolation_);
-						h_r = rfull;
+					{
+						FTL_Profile("Bayer Colour (R)", 0.005);
+						cv::cvtColor(wrap_right, rtmp2_, cv::COLOR_BayerRG2BGRA);
 					}
-					else {
-						h_r = Mat();
-					}
-				}
 
-				r_out.upload(r, stream);
-				return true;
-			}));
+					//if (isStereo()) {
+						FTL_Profile("Rectify and Resize (R)", 0.005);
+						c->rectify(rtmp2_, rfull, Channel::Right);
+
+						if (hasHigherRes()) {
+							cv::resize(rfull, r, r.size(), 0.0, 0.0, interpolation_);
+							h_r = rfull;
+						}
+						else {
+							h_r = Mat();
+						}
+					//}
+
+					r_out.upload(r, stream);
+					res_r = true;
+				}
+			//}));
 		}
 
 		Pylon::CGrabResultPtr result_left;
 
 		if (!_retrieveFrames(result_left, lcam_)) {
 			if (rcam_) {
-				future_b.wait();
+				//future_b.wait();
 			}
 			return false;
 		}
@@ -335,13 +338,14 @@ bool PylonDevice::get(ftl::rgbd::Frame &frame, cv::cuda::GpuMat &l_out, cv::cuda
 
 		{
 			FTL_Profile("Bayer Colour (L)", 0.005);
-			cv::cvtColor(wrap_left, lfull, cv::COLOR_BayerRG2BGRA);
+			if (isStereo()) cv::cvtColor(wrap_left, ltmp_, cv::COLOR_BayerRG2BGRA);
+			else cv::cvtColor(wrap_left, lfull, cv::COLOR_BayerRG2BGRA);
 		}
 
 		{
 			FTL_Profile("Rectify and Resize (L)", 0.005);
 			if (isStereo()) {
-				c->rectify(lfull, Channel::Left);
+				c->rectify(ltmp_, lfull, Channel::Left);
 			}
 
 			if (hasHigherRes()) {
@@ -355,8 +359,9 @@ bool PylonDevice::get(ftl::rgbd::Frame &frame, cv::cuda::GpuMat &l_out, cv::cuda
 		l_out.upload(l, stream);
 
 		if (rcam_) {
-			future_b.wait();
-			if (!future_b.get()) return false;
+			//future_b.wait();
+			//if (!future_b.get()) return false;
+			if (!res_r) return false;
 		}
 
 	} catch (const GenericException &e) {
diff --git a/components/rgbd-sources/src/sources/stereovideo/pylon.hpp b/components/rgbd-sources/src/sources/stereovideo/pylon.hpp
index 691c54d52..707e8f437 100644
--- a/components/rgbd-sources/src/sources/stereovideo/pylon.hpp
+++ b/components/rgbd-sources/src/sources/stereovideo/pylon.hpp
@@ -57,6 +57,8 @@ class PylonDevice : public ftl::rgbd::detail::Device {
 	cv::cuda::HostMem right_hm_;
 	cv::cuda::HostMem hres_hm_;
 	cv::Mat rtmp_;
+	cv::Mat rtmp2_;
+	cv::Mat ltmp_;
 	int interpolation_;
 
 	void _configureCamera(Pylon::CBaslerUniversalInstantCamera *cam);
diff --git a/components/rgbd-sources/src/sources/stereovideo/rectification.cpp b/components/rgbd-sources/src/sources/stereovideo/rectification.cpp
index 213aada07..f755d5de8 100644
--- a/components/rgbd-sources/src/sources/stereovideo/rectification.cpp
+++ b/components/rgbd-sources/src/sources/stereovideo/rectification.cpp
@@ -109,22 +109,30 @@ void StereoRectification::calculateParameters_() {
 
 }
 
-void StereoRectification::rectify(cv::InputOutputArray im, Channel c) {
+void StereoRectification::rectify(cv::InputArray im, cv::OutputArray im_out, Channel c) {
 
-	if (!enabled_ || !valid_) { return; }
+	if (!enabled_ || !valid_) {
+		im.copyTo(im_out);
+		return;
+	}
 
 	if (im.size() != image_resolution_) {
 		throw ftl::exception("Input has wrong size");
 	}
+
 	if (im.isMat()) {
-		cv::Mat &in = im.getMatRef();
+		if (!im_out.isMat()) {
+			throw ftl::exception(	"Input and Output arrays must have same "
+									"type (cv::Mat expected)");
+		}
+		cv::Mat in = im.getMat();
+		cv::Mat &out = im_out.getMatRef(); // assumes valid size/type
+
 		if (c == Channel::Left) {
-			cv::remap(in, tmp_l_, map_l_.first, map_l_.second, interpolation_);
-			cv::swap(in, tmp_l_);
+			cv::remap(in, out, map_l_.first, map_l_.second, interpolation_);
 		}
 		else if (c == Channel::Right) {
-			cv::remap(in, tmp_r_, map_r_.first, map_r_.second, interpolation_);
-			cv::swap(in, tmp_r_);
+			cv::remap(in, out, map_r_.first, map_r_.second, interpolation_);
 		}
 		else {
 			throw ftl::exception("Bad channel for rectification");
diff --git a/components/rgbd-sources/src/sources/stereovideo/rectification.hpp b/components/rgbd-sources/src/sources/stereovideo/rectification.hpp
index 2e924b7f3..d326d0d16 100644
--- a/components/rgbd-sources/src/sources/stereovideo/rectification.hpp
+++ b/components/rgbd-sources/src/sources/stereovideo/rectification.hpp
@@ -41,7 +41,7 @@ public:
 	void setCalibration(ftl::calibration::CalibrationData &calib);
 	bool calibrated();
 
-	void rectify(cv::InputOutputArray im, ftl::codecs::Channel c);
+	void rectify(cv::InputArray im, cv::OutputArray out, ftl::codecs::Channel c);
 
 	/** Enable/disable rectification. TODO: move outside (to stereovideo)?
 	 */
diff --git a/components/streams/include/ftl/streams/sender.hpp b/components/streams/include/ftl/streams/sender.hpp
index 09ab6e513..66b782b0c 100644
--- a/components/streams/include/ftl/streams/sender.hpp
+++ b/components/streams/include/ftl/streams/sender.hpp
@@ -60,6 +60,8 @@ class Sender : public ftl::Configurable {
 	 */
 	void setActiveEncoders(uint32_t fsid, const std::unordered_set<ftl::codecs::Channel> &);
 
+	void resetEncoders(uint32_t fsid);
+
 	private:
 	ftl::stream::Stream *stream_;
 	int64_t timestamp_;
diff --git a/components/streams/src/sender.cpp b/components/streams/src/sender.cpp
index b658349af..9b7a60816 100644
--- a/components/streams/src/sender.cpp
+++ b/components/streams/src/sender.cpp
@@ -354,6 +354,24 @@ void Sender::post(ftl::data::Frame &frame, ftl::codecs::Channel c) {
 	//do_inject_ = false;
 }
 
+void Sender::resetEncoders(uint32_t fsid) {
+	LOG(INFO) << "Reset encoders for " << fsid;
+	for (auto &t : state_) {
+		if ((t.first >> 16) == static_cast<int>(fsid)) {
+			if (t.second.encoder[0]) {
+				// Remove unwanted encoder
+				ftl::codecs::free(t.second.encoder[0]);
+				t.second.encoder[0] = nullptr;
+				if (t.second.encoder[1]) {
+					ftl::codecs::free(t.second.encoder[1]);
+					t.second.encoder[1] = nullptr;
+				}
+				LOG(INFO) << "Removing encoder for channel " << (t.first & 0xFF);
+			}
+		}
+	}
+}
+
 void Sender::setActiveEncoders(uint32_t fsid, const std::unordered_set<Channel> &ec) {
 	for (auto &t : state_) {
 		if ((t.first >> 8) == static_cast<int>(fsid)) {
@@ -419,6 +437,9 @@ void Sender::_encodeVideoChannel(ftl::data::FrameSet &fs, Channel c, bool reset,
 			LOG(ERROR) << "No encoder";
 			return;
 		}
+		if (enc->device() == device_t::OpenCV) {
+			LOG(WARNING) << "Software encoder for " << ftl::codecs::name(c);
+		}
 
 		// Upload if in host memory
 		for (auto &f : fs.frames) {
-- 
GitLab