Skip to content
Snippets Groups Projects

Add configuration to stream, H264 support, codec selection

Merged Nicolas Pope requested to merge feature/configstream into master
2 files
+ 21
105
Compare changes
  • Side-by-side
  • Inline
Files
2
@@ -90,55 +90,6 @@ void NetFrameQueue::freeFrame(NetFrame &f) {
// ===== NetSource =============================================================
/*bool NetSource::_getCalibration(Universe &net, const UUID &peer, const string &src, ftl::rgbd::Camera &p, ftl::codecs::Channel chan) {
try {
while(true) {
auto [cap,buf] = net.call<tuple<unsigned int,vector<unsigned char>>>(peer_, "source_details", src, chan);
capabilities_ = cap;
if (buf.size() > 0) {
memcpy((char*)&p, buf.data(), buf.size());
if (sizeof(p) != buf.size()) {
LOG(ERROR) << "Corrupted calibration";
return false;
}
LOG(INFO) << "Calibration received: " << p.cx << ", " << p.cy << ", " << p.fx << ", " << p.fy;
if (chan == Channel::Left) {
// Put calibration into config manually
host_->getConfig()["focal"] = p.fx;
host_->getConfig()["centre_x"] = p.cx;
host_->getConfig()["centre_y"] = p.cy;
host_->getConfig()["baseline"] = p.baseline;
host_->getConfig()["doffs"] = p.doffs;
} else {
host_->getConfig()["focal_right"] = p.fx;
host_->getConfig()["centre_x_right"] = p.cx;
host_->getConfig()["centre_y_right"] = p.cy;
host_->getConfig()["baseline_right"] = p.baseline;
host_->getConfig()["doffs_right"] = p.doffs;
}
return true;
} else {
LOG(INFO) << "Could not get calibration, retrying";
sleep_for(milliseconds(500));
}
}
} catch (const std::exception& ex) {
LOG(ERROR) << "Exception: " << ex.what();
return false;
} catch (...) {
LOG(ERROR) << "Unknown exception";
return false;
}
}*/
NetSource::NetSource(ftl::rgbd::Source *host)
: ftl::rgbd::detail::Source(host), active_(false), minB_(9), maxN_(1), adaptive_(0), queue_(3) {
@@ -306,10 +257,6 @@ void NetSource::_recvPacket(short ttimeoff, const ftl::codecs::StreamPacket &spk
params_ = std::get<0>(params);
capabilities_ = std::get<2>(params);
has_calibration_ = true;
//rgb_ = cv::Mat(cv::Size(params_.width, params_.height), CV_8UC3, cv::Scalar(0,0,0));
//depth_ = cv::Mat(cv::Size(params_.width, params_.height), CV_32FC1, 0.0f);
LOG(INFO) << "Got Calibration channel: " << params_.width << "x" << params_.height;
} else {
params_right_ = std::get<0>(params);
@@ -380,36 +327,26 @@ void NetSource::_recvPacket(short ttimeoff, const ftl::codecs::StreamPacket &spk
}
// Last chunk of both channels now received
if (frame.channel_count == spkt.channel_count &&
frame.chunk_count[0] == frame.chunk_total[0] &&
frame.chunk_count[1] == frame.chunk_total[1]) {
UNIQUE_LOCK(frame.mtx, flk);
if (frame.channel_count == spkt.channel_count) {
_completeFrame(frame, now-(spkt.timestamp+frame.tx_latency));
}
}
if (frame.timestamp >= 0 && frame.chunk_count[0] == frame.chunk_total[0] && frame.chunk_count[1] == frame.chunk_total[1]) {
timestamp_ = frame.timestamp;
frame.tx_latency = now-(spkt.timestamp+frame.tx_latency);
adaptive_ = abr_.selectBitrate(frame);
//LOG(INFO) << "Frame finished: " << frame.timestamp;
host_->notify(frame.timestamp, frame.channel1, frame.channel2);
/*auto cb = host_->callback();
if (cb) {
try {
cb(frame.timestamp, frame.channel1, frame.channel2);
} catch (...) {
LOG(ERROR) << "Exception in net frame callback";
}
} else {
LOG(ERROR) << "NO FRAME CALLBACK";
}*/
queue_.freeFrame(frame);
{
// Decrement expected frame counter
N_--;
}
}
void NetSource::_completeFrame(NetFrame &frame, int64_t latency) {
UNIQUE_LOCK(frame.mtx, flk);
// Frame must not have already been freed.
if (frame.timestamp >= 0) {
timestamp_ = frame.timestamp;
frame.tx_latency = latency;
// Note: Not used currently
adaptive_ = abr_.selectBitrate(frame);
host_->notify(frame.timestamp, frame.channel1, frame.channel2);
queue_.freeFrame(frame);
N_--;
}
}
@@ -429,12 +366,6 @@ void NetSource::setPose(const Eigen::Matrix4d &pose) {
ftl::rgbd::Camera NetSource::parameters(ftl::codecs::Channel chan) {
if (chan == ftl::codecs::Channel::Right) {
/*if (params_right_.width == 0) {
auto uri = host_->get<string>("uri");
if (!uri) return params_;
_getCalibration(*host_->getNet(), peer_, *uri, params_right_, chan);
}*/
return params_right_;
} else {
return params_;
@@ -459,27 +390,11 @@ void NetSource::_updateURI() {
}
peer_ = *p;
//has_calibration_ = _getCalibration(*host_->getNet(), peer_, *uri, params_, ftl::codecs::Channel::Left);
//_getCalibration(*host_->getNet(), peer_, *uri, params_right_, ftl::codecs::Channel::Right);
host_->getNet()->bind(*uri, [this](short ttimeoff, const ftl::codecs::StreamPacket &spkt, const ftl::codecs::Packet &pkt) {
//if (chunk == -1) {
//#ifdef HAVE_NVPIPE
//_recvVideo(frame, ttimeoff, bitrate, jpg, d);
//#else
//LOG(ERROR) << "Cannot receive HEVC, no NvPipe support";
//#endif
//} else {
//_recvChunk(frame, ttimeoff, bitrate, chunk, jpg, d);
_recvPacket(ttimeoff, spkt, pkt);
//}
_recvPacket(ttimeoff, spkt, pkt);
});
N_ = 0;
//d_rgb_ = cv::Mat(cv::Size(params_.width, params_.height), CV_8UC3, cv::Scalar(0,0,0));
//d_depth_ = cv::Mat(cv::Size(params_.width, params_.height), CV_32FC1, 0.0f);
uri_ = *uri;
active_ = true;
} else {
Loading