Skip to content
Snippets Groups Projects
Commit 07da9d6e authored by Nicolas Pope's avatar Nicolas Pope
Browse files

Merge branch 'feature/209/camparams' into 'master'

Implements #209 Injecting intrinsics into stream with msgpack

Closes #209

See merge request nicolas.pope/ftl!142
parents 7aa9b4d9 56dd7929
No related branches found
No related tags found
1 merge request!142Implements #209 Injecting intrinsics into stream with msgpack
Pipeline #15920 passed
......@@ -349,18 +349,18 @@ bool ftl::gui::Screen::mouseButtonEvent(const nanogui::Vector2i &p, int button,
float sx = ((float)p[0] - positionAfterOffset[0]) / mScale;
float sy = ((float)p[1] - positionAfterOffset[1]) / mScale;
Eigen::Vector4f camPos;
//Eigen::Vector4f camPos;
try {
camPos = camera_->source()->point(sx,sy).cast<float>();
} catch(...) {
return true;
}
//try {
//camPos = camera_->source()->point(sx,sy).cast<float>();
//} catch(...) {
// return true;
//}
camPos *= -1.0f;
//camPos *= -1.0f;
//Eigen::Vector4f worldPos = camera_->source()->getPose().cast<float>() * camPos;
//lookPoint_ = Eigen::Vector3f(worldPos[0],worldPos[1],worldPos[2]);
LOG(INFO) << "Depth at click = " << -camPos[2];
//LOG(INFO) << "Depth at click = " << -camPos[2];
return true;
}
return false;
......
......@@ -174,7 +174,7 @@ static void run(ftl::Configurable *root) {
// Generate virtual camera render when requested by streamer
virt->onRender([splat,virt,&scene_B,align](ftl::rgbd::Frame &out) {
virt->setTimestamp(scene_B.timestamp);
//virt->setTimestamp(scene_B.timestamp);
// Do we need to convert Lab to BGR?
if (align->isLabColour()) {
for (auto &f : scene_B.frames) {
......
......@@ -22,7 +22,9 @@ enum struct codec_t : uint8_t {
JSON = 100, // A JSON string
CALIBRATION, // Camera parameters object
POSE, // 4x4 eigen matrix
RAW // Some unknown binary format (msgpack?)
MSGPACK,
STRING, // Null terminated string
RAW // Some unknown binary format
};
/**
......
......@@ -50,6 +50,16 @@ struct StreamPacket {
MSGPACK_DEFINE(timestamp, streamID, channel_count, channel);
};
/**
* Combine both packet types into a single packet unit. This pair is always
* saved or transmitted in a stream together.
*/
struct PacketPair {
PacketPair(const StreamPacket &s, const Packet &p) : spkt(s), pkt(p) {}
const StreamPacket &spkt;
const Packet &pkt;
};
}
}
......
......@@ -6,6 +6,10 @@
#include <cuda_runtime.h>
#include <ftl/cuda_util.hpp>
#ifndef __CUDACC__
#include <msgpack.hpp>
#endif
namespace ftl{
namespace rgbd {
......@@ -34,6 +38,10 @@ struct __align__(16) Camera {
* Convert screen plus depth into camera coordinates.
*/
__device__ float3 screenToCam(uint ux, uint uy, float depth) const;
#ifndef __CUDACC__
MSGPACK_DEFINE(fx,fy,cx,cy,width,height,minDepth,maxDepth,baseline,doffs);
#endif
};
};
......
......@@ -3,10 +3,10 @@
#include <ftl/cuda_util.hpp>
#include <ftl/configuration.hpp>
#include <ftl/rgbd/camera.hpp>
#include <ftl/threads.hpp>
#include <ftl/net/universe.hpp>
#include <ftl/uri.hpp>
#include <ftl/rgbd/camera.hpp>
#include <ftl/rgbd/detail/source.hpp>
#include <ftl/codecs/packet.hpp>
#include <opencv2/opencv.hpp>
......@@ -46,9 +46,6 @@ class Source : public ftl::Configurable {
friend T *ftl::config::create(ftl::config::json_t &, ARGS ...);
friend class VirtualSource;
//template <typename T, typename... ARGS>
//friend T *ftl::config::create(ftl::Configurable *, const std::string &, ARGS ...);
// This class cannot be constructed directly, use ftl::create
Source()=delete;
......@@ -66,13 +63,16 @@ class Source : public ftl::Configurable {
/**
* Is this source valid and ready to grab?.
*/
bool isReady() { return (impl_) ? impl_->isReady() : params_.width != 0; }
bool isReady() { return (impl_) ? impl_->isReady() : false; }
/**
* Change the second channel source.
*/
bool setChannel(ftl::codecs::Channel c);
/**
* Get the channel allocated to the second source.
*/
ftl::codecs::Channel getChannel() const { return channel_; }
/**
......@@ -114,19 +114,7 @@ class Source : public ftl::Configurable {
* swap rather than a copy, so the parameters should be persistent buffers for
* best performance.
*/
void getFrames(cv::Mat &c, cv::Mat &d);
/**
* Get a copy of the colour frame only.
*/
void getColour(cv::Mat &c);
/**
* Get a copy of the depth frame only.
*/
void getDepth(cv::Mat &d);
int64_t timestamp() const { return timestamp_; }
[[deprecated]] void getFrames(cv::Mat &c, cv::Mat &d);
/**
* Directly upload source RGB and Depth to GPU.
......@@ -136,16 +124,20 @@ class Source : public ftl::Configurable {
void uploadColour(cv::cuda::GpuMat&);
void uploadDepth(cv::cuda::GpuMat&);
bool isVirtual() const { return impl_ == nullptr; }
//bool isVirtual() const { return impl_ == nullptr; }
/**
* Get the source's camera intrinsics.
*/
const Camera &parameters() const {
if (impl_) return impl_->params_;
else return params_;
else throw ftl::exception("Cannot get parameters for bad source");
}
/**
* Get camera intrinsics for another channel. For example the right camera
* in a stereo pair.
*/
const Camera parameters(ftl::codecs::Channel) const;
cv::Mat cameraMatrix() const;
......@@ -169,37 +161,16 @@ class Source : public ftl::Configurable {
capability_t getCapabilities() const;
/**
* Get a point in camera coordinates at specified pixel location.
*/
Eigen::Vector4d point(uint x, uint y);
/**
* Get a point in camera coordinates at specified pixel location, with a
* given depth value.
*/
Eigen::Vector4d point(uint x, uint y, double d);
Eigen::Vector2i point(const Eigen::Vector4d &p);
/**
* Force the internal implementation to be reconstructed.
*/
void reset();
void pause(bool);
bool isPaused() { return paused_; }
void bullet(bool);
bool isBullet() { return bullet_; }
bool thumbnail(cv::Mat &t);
ftl::net::Universe *getNet() const { return net_; }
std::string getURI() { return value("uri", std::string("")); }
void customImplementation(detail::Source *);
//void customImplementation(detail::Source *);
SHARED_MUTEX &mutex() { return mutex_; }
......@@ -207,6 +178,8 @@ class Source : public ftl::Configurable {
/**
* Set the callback that receives decoded frames as they are generated.
* There can be only a single such callback as the buffers can be swapped
* by the callback.
*/
void setCallback(std::function<void(int64_t, cv::Mat &, cv::Mat &)> cb);
void removeCallback() { callback_ = nullptr; }
......@@ -218,6 +191,9 @@ class Source : public ftl::Configurable {
*/
void addRawCallback(const std::function<void(ftl::rgbd::Source*, const ftl::codecs::StreamPacket &spkt, const ftl::codecs::Packet &pkt)> &);
/**
* THIS DOES NOT WORK CURRENTLY.
*/
void removeRawCallback(const std::function<void(ftl::rgbd::Source*, const ftl::codecs::StreamPacket &spkt, const ftl::codecs::Packet &pkt)> &);
/**
......@@ -225,20 +201,22 @@ class Source : public ftl::Configurable {
*/
void notifyRaw(const ftl::codecs::StreamPacket &spkt, const ftl::codecs::Packet &pkt);
// ==== Inject Data into stream ============================================
/**
* Generate a stream packet with arbitrary data. The data is packed using
* msgpack and is given the timestamp of the most recent frame.
*/
template <typename... ARGS>
void inject(ftl::codecs::Channel c, ARGS... args);
protected:
detail::Source *impl_;
cv::Mat rgb_;
cv::Mat depth_;
cv::Mat thumb_;
Camera params_; // TODO Find better solution
Eigen::Matrix4d pose_;
ftl::net::Universe *net_;
SHARED_MUTEX mutex_;
bool paused_;
bool bullet_;
ftl::codecs::Channel channel_;
cudaStream_t stream_;
int64_t timestamp_;
std::function<void(int64_t, cv::Mat &, cv::Mat &)> callback_;
std::list<std::function<void(ftl::rgbd::Source*, const ftl::codecs::StreamPacket &spkt, const ftl::codecs::Packet &pkt)>> rawcallbacks_;
......@@ -255,4 +233,40 @@ class Source : public ftl::Configurable {
}
}
class VectorBuffer {
public:
inline explicit VectorBuffer(std::vector<unsigned char> &v) : vector_(v) {}
inline void write(const char *data, std::size_t size) {
vector_.insert(vector_.end(), (const unsigned char*)data, (const unsigned char*)data+size);
}
private:
std::vector<unsigned char> &vector_;
};
template <typename... ARGS>
void ftl::rgbd::Source::inject(ftl::codecs::Channel c, ARGS... args) {
if (!impl_) return;
auto data = std::make_tuple(args...);
ftl::codecs::StreamPacket spkt;
ftl::codecs::Packet pkt;
spkt.timestamp = impl_->timestamp_;
spkt.channel = c;
spkt.channel_count = 0;
spkt.streamID = 0;
pkt.codec = ftl::codecs::codec_t::MSGPACK;
pkt.block_number = 0;
pkt.block_total = 1;
pkt.definition = ftl::codecs::definition_t::Any;
pkt.flags = 0;
VectorBuffer buf(pkt.data);
msgpack::pack(buf, data);
notifyRaw(spkt, pkt);
}
#endif // _FTL_RGBD_SOURCE_HPP_
......@@ -13,8 +13,6 @@ class VirtualSource : public ftl::rgbd::Source {
void onRender(const std::function<void(ftl::rgbd::Frame &)> &);
void setTimestamp(int64_t ts) { timestamp_ = ts; }
/**
* Write frames into source buffers from an external renderer. Virtual
* sources do not have an internal generator of frames but instead have
......
......@@ -36,9 +36,8 @@ std::map<std::string, ftl::rgbd::Player*> Source::readers__;
Source::Source(ftl::config::json_t &cfg) : Configurable(cfg), pose_(Eigen::Matrix4d::Identity()), net_(nullptr) {
impl_ = nullptr;
params_ = {};
//params_ = {};
stream_ = 0;
timestamp_ = 0;
reset();
on("uri", [this](const ftl::config::Event &e) {
......@@ -49,9 +48,8 @@ Source::Source(ftl::config::json_t &cfg) : Configurable(cfg), pose_(Eigen::Matri
Source::Source(ftl::config::json_t &cfg, ftl::net::Universe *net) : Configurable(cfg), pose_(Eigen::Matrix4d::Identity()), net_(net) {
impl_ = nullptr;
params_ = {};
//params_ = {};
stream_ = 0;
timestamp_ = 0;
reset();
on("uri", [this](const ftl::config::Event &e) {
......@@ -69,11 +67,6 @@ cv::Mat Source::cameraMatrix() const {
return m;
}
/*void Source::customImplementation(ftl::rgbd::detail::Source *impl) {
if (impl_) delete impl_;
impl_ = impl;
}*/
ftl::rgbd::detail::Source *Source::_createImplementation() {
auto uristr = get<string>("uri");
if (!uristr) {
......@@ -178,7 +171,7 @@ ftl::rgbd::detail::Source *Source::_createDeviceImpl(const ftl::URI &uri) {
LOG(ERROR) << "You do not have 'librealsense2' installed";
#endif
} else {
params_.width = value("width", 1280);
/*params_.width = value("width", 1280);
params_.height = value("height", 720);
params_.fx = value("focal", 700.0f);
params_.fy = params_.fx;
......@@ -187,7 +180,7 @@ ftl::rgbd::detail::Source *Source::_createDeviceImpl(const ftl::URI &uri) {
params_.minDepth = value("minDepth", 0.1f);
params_.maxDepth = value("maxDepth", 20.0f);
params_.doffs = 0;
params_.baseline = value("baseline", 0.0f);
params_.baseline = value("baseline", 0.0f);*/
}
return nullptr;
}
......@@ -195,45 +188,12 @@ ftl::rgbd::detail::Source *Source::_createDeviceImpl(const ftl::URI &uri) {
void Source::getFrames(cv::Mat &rgb, cv::Mat &depth) {
if (bool(callback_)) LOG(WARNING) << "Cannot use getFrames and callback in source";
SHARED_LOCK(mutex_,lk);
rgb_.copyTo(rgb);
depth_.copyTo(depth);
//rgb_.copyTo(rgb);
//depth_.copyTo(depth);
//rgb = rgb_;
//depth = depth_;
/*cv::Mat tmp;
tmp = rgb;
rgb = rgb_;
rgb_ = tmp;
tmp = depth;
depth = depth_;
depth_ = tmp;*/
}
Eigen::Vector4d Source::point(uint ux, uint uy) {
const auto &params = parameters();
const double x = ((double)ux+params.cx) / params.fx;
const double y = ((double)uy+params.cy) / params.fy;
SHARED_LOCK(mutex_,lk);
const double depth = depth_.at<float>(uy,ux);
return Eigen::Vector4d(x*depth,y*depth,depth,1.0);
}
Eigen::Vector4d Source::point(uint ux, uint uy, double d) {
const auto &params = parameters();
const double x = ((double)ux+params.cx) / params.fx;
const double y = ((double)uy+params.cy) / params.fy;
return Eigen::Vector4d(x*d,y*d,d,1.0);
}
Eigen::Vector2i Source::point(const Eigen::Vector4d &p) {
const auto &params = parameters();
double x = p[0] / p[2];
double y = p[1] / p[2];
x *= params.fx;
y *= params.fy;
return Eigen::Vector2i((int)(x - params.cx), (int)(y - params.cy));
}
void Source::setPose(const Eigen::Matrix4d &pose) {
pose_ = pose;
......@@ -273,54 +233,7 @@ bool Source::retrieve() {
bool Source::compute(int N, int B) {
UNIQUE_LOCK(mutex_,lk);
if (!impl_ && stream_ != 0) {
cudaSafeCall(cudaStreamSynchronize(stream_));
if (depth_.type() == CV_32SC1) depth_.convertTo(depth_, CV_32F, 1.0f / 1000.0f);
stream_ = 0;
return true;
} else if (impl_ && impl_->compute(N,B)) {
timestamp_ = impl_->timestamp_;
/*cv::Mat tmp;
rgb_.create(impl_->rgb_.size(), impl_->rgb_.type());
depth_.create(impl_->depth_.size(), impl_->depth_.type());
tmp = rgb_;
rgb_ = impl_->rgb_;
impl_->rgb_ = tmp;
tmp = depth_;
depth_ = impl_->depth_;
impl_->depth_ = tmp;*/
// TODO:(Nick) Reduce buffer copies
impl_->rgb_.copyTo(rgb_);
impl_->depth_.copyTo(depth_);
//rgb_ = impl_->rgb_;
//depth_ = impl_->depth_;
return true;
}
return false;
}
bool Source::thumbnail(cv::Mat &t) {
if (!impl_ && stream_ != 0) {
cudaSafeCall(cudaStreamSynchronize(stream_));
if (depth_.type() == CV_32SC1) depth_.convertTo(depth_, CV_32F, 1.0f / 1000.0f);
stream_ = 0;
return true;
} else if (impl_) {
UNIQUE_LOCK(mutex_,lk);
impl_->capture(0);
impl_->swap();
impl_->compute(1, 9);
impl_->rgb_.copyTo(rgb_);
impl_->depth_.copyTo(depth_);
}
if (!rgb_.empty()) {
SHARED_LOCK(mutex_,lk);
// Downsize and square the rgb_ image
cv::resize(rgb_, thumb_, cv::Size(320,180));
}
t = thumb_;
return !thumb_.empty();
return impl_ && impl_->compute(N,B);
}
bool Source::setChannel(ftl::codecs::Channel c) {
......@@ -340,7 +253,6 @@ void Source::setCallback(std::function<void(int64_t, cv::Mat &, cv::Mat &)> cb)
void Source::addRawCallback(const std::function<void(ftl::rgbd::Source*, const ftl::codecs::StreamPacket &spkt, const ftl::codecs::Packet &pkt)> &f) {
UNIQUE_LOCK(mutex_,lk);
LOG(INFO) << "ADD RAW";
rawcallbacks_.push_back(f);
}
......
......@@ -90,7 +90,7 @@ void NetFrameQueue::freeFrame(NetFrame &f) {
// ===== NetSource =============================================================
bool NetSource::_getCalibration(Universe &net, const UUID &peer, const string &src, ftl::rgbd::Camera &p, ftl::codecs::Channel chan) {
/*bool NetSource::_getCalibration(Universe &net, const UUID &peer, const string &src, ftl::rgbd::Camera &p, ftl::codecs::Channel chan) {
try {
while(true) {
auto [cap,buf] = net.call<tuple<unsigned int,vector<unsigned char>>>(peer_, "source_details", src, chan);
......@@ -137,7 +137,7 @@ bool NetSource::_getCalibration(Universe &net, const UUID &peer, const string &s
LOG(ERROR) << "Unknown exception";
return false;
}
}
}*/
NetSource::NetSource(ftl::rgbd::Source *host)
: ftl::rgbd::detail::Source(host), active_(false), minB_(9), maxN_(1), adaptive_(0), queue_(3) {
......@@ -147,6 +147,7 @@ NetSource::NetSource(ftl::rgbd::Source *host)
default_quality_ = host->value("quality", 0);
last_bitrate_ = 0;
params_right_.width = 0;
has_calibration_ = false;
decoder_c1_ = nullptr;
decoder_c2_ = nullptr;
......@@ -288,6 +289,35 @@ void NetSource::_recvPacket(short ttimeoff, const ftl::codecs::StreamPacket &spk
const ftl::codecs::Channel rchan = spkt.channel;
const int channum = (rchan == Channel::Colour) ? 0 : 1;
if (rchan == Channel::Calibration) {
std::tuple<ftl::rgbd::Camera, ftl::codecs::Channel, ftl::rgbd::capability_t> params;
auto unpacked = msgpack::unpack((const char*)pkt.data.data(), pkt.data.size());
unpacked.get().convert(params);
if (std::get<1>(params) == Channel::Left) {
params_ = std::get<0>(params);
capabilities_ = std::get<2>(params);
has_calibration_ = true;
//rgb_ = cv::Mat(cv::Size(params_.width, params_.height), CV_8UC3, cv::Scalar(0,0,0));
//depth_ = cv::Mat(cv::Size(params_.width, params_.height), CV_32FC1, 0.0f);
LOG(INFO) << "Got Calibration channel: " << params_.width << "x" << params_.height;
} else {
params_right_ = std::get<0>(params);
}
return;
} else if (rchan == Channel::Pose) {
LOG(INFO) << "Got POSE channel";
return;
}
if (!has_calibration_) {
LOG(WARNING) << "Missing calibration, skipping frame";
return;
}
NetFrame &frame = queue_.getFrame(spkt.timestamp, cv::Size(params_.width, params_.height), CV_8UC3, (isFloatChannel(chan) ? CV_32FC1 : CV_8UC3));
// Update frame statistics
......@@ -389,12 +419,12 @@ void NetSource::setPose(const Eigen::Matrix4d &pose) {
ftl::rgbd::Camera NetSource::parameters(ftl::codecs::Channel chan) {
if (chan == ftl::codecs::Channel::Right) {
if (params_right_.width == 0) {
/*if (params_right_.width == 0) {
auto uri = host_->get<string>("uri");
if (!uri) return params_;
_getCalibration(*host_->getNet(), peer_, *uri, params_right_, chan);
}
}*/
return params_right_;
} else {
return params_;
......@@ -419,8 +449,8 @@ void NetSource::_updateURI() {
}
peer_ = *p;
has_calibration_ = _getCalibration(*host_->getNet(), peer_, *uri, params_, ftl::codecs::Channel::Left);
_getCalibration(*host_->getNet(), peer_, *uri, params_right_, ftl::codecs::Channel::Right);
//has_calibration_ = _getCalibration(*host_->getNet(), peer_, *uri, params_, ftl::codecs::Channel::Left);
//_getCalibration(*host_->getNet(), peer_, *uri, params_right_, ftl::codecs::Channel::Right);
host_->getNet()->bind(*uri, [this](short ttimeoff, const ftl::codecs::StreamPacket &spkt, const ftl::codecs::Packet &pkt) {
//if (chunk == -1) {
......@@ -437,8 +467,6 @@ void NetSource::_updateURI() {
N_ = 0;
rgb_ = cv::Mat(cv::Size(params_.width, params_.height), CV_8UC3, cv::Scalar(0,0,0));
depth_ = cv::Mat(cv::Size(params_.width, params_.height), CV_32FC1, 0.0f);
//d_rgb_ = cv::Mat(cv::Size(params_.width, params_.height), CV_8UC3, cv::Scalar(0,0,0));
//d_depth_ = cv::Mat(cv::Size(params_.width, params_.height), CV_32FC1, 0.0f);
......@@ -491,5 +519,5 @@ bool NetSource::compute(int n, int b) {
}
bool NetSource::isReady() {
return has_calibration_ && !rgb_.empty();
return has_calibration_;
}
......@@ -7,8 +7,17 @@ using ftl::rgbd::Camera;
class VirtualImpl : public ftl::rgbd::detail::Source {
public:
explicit VirtualImpl(ftl::rgbd::Source *host, const ftl::rgbd::Camera &params) : ftl::rgbd::detail::Source(host) {
params_ = params;
explicit VirtualImpl(ftl::rgbd::Source *host) : ftl::rgbd::detail::Source(host) {
params_.width = host->value("width", 1280);
params_.height = host->value("height", 720);
params_.fx = host->value("focal", 700.0f);
params_.fy = params_.fx;
params_.cx = -(double)params_.width / 2.0;
params_.cy = -(double)params_.height / 2.0;
params_.minDepth = host->value("minDepth", 0.1f);
params_.maxDepth = host->value("maxDepth", 20.0f);
params_.doffs = 0;
params_.baseline = host->value("baseline", 0.0f);
params_right_.width = host->value("width", 1280);
params_right_.height = host->value("height", 720);
......@@ -113,20 +122,7 @@ class VirtualImpl : public ftl::rgbd::detail::Source {
};
VirtualSource::VirtualSource(ftl::config::json_t &cfg) : Source(cfg) {
auto params = params_;
params_.width = value("width", 1280);
params_.height = value("height", 720);
params_.fx = value("focal", 700.0f);
params_.fy = params_.fx;
params_.cx = value("centre_x", -(double)params_.width / 2.0);
params_.cy = value("centre_y", -(double)params_.height / 2.0);
params_.minDepth = value("minDepth", 0.1f);
params_.maxDepth = value("maxDepth", 20.0f);
params_.doffs = 0;
params_.baseline = value("baseline", 0.0f);
impl_ = new VirtualImpl(this, params);
impl_ = new VirtualImpl(this);
}
VirtualSource::~VirtualSource() {
......
......@@ -187,6 +187,11 @@ void Streamer::add(Source *src) {
sources_[src->getID()] = s;
group_.addSource(src);
src->addRawCallback([this,s](Source *src, const ftl::codecs::StreamPacket &spkt, const ftl::codecs::Packet &pkt) {
//LOG(INFO) << "RAW CALLBACK";
_transmitPacket(s, spkt, pkt, Quality::Any);
});
}
LOG(INFO) << "Streaming: " << src->getID();
......@@ -322,6 +327,11 @@ void Streamer::_addClient(const string &source, int N, int rate, const ftl::UUID
}
++s->clientCount;
// Finally, inject calibration and config data
s->src->inject(Channel::Calibration, s->src->parameters(Channel::Left), Channel::Left, s->src->getCapabilities());
s->src->inject(Channel::Calibration, s->src->parameters(Channel::Right), Channel::Right, s->src->getCapabilities());
//s->src->inject(Channel::Pose, s->src->getPose());
}
void Streamer::remove(Source *) {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment