diff --git a/components/rgbd-sources/src/sources/stereovideo/opencv.cpp b/components/rgbd-sources/src/sources/stereovideo/opencv.cpp index 5efd1ba3e6fd2b9f6f8545fef94e36f29b9bb7a6..0dea86f4c3f40590d8373c217ab149eafd4262d2 100644 --- a/components/rgbd-sources/src/sources/stereovideo/opencv.cpp +++ b/components/rgbd-sources/src/sources/stereovideo/opencv.cpp @@ -43,10 +43,10 @@ using std::chrono::high_resolution_clock; using std::chrono::milliseconds; using std::this_thread::sleep_for; -OpenCVDevice::OpenCVDevice(nlohmann::json &config) +OpenCVDevice::OpenCVDevice(nlohmann::json &config, bool stereo) : ftl::rgbd::detail::Device(config), timestamp_(0.0) { - std::vector<ftl::rgbd::detail::DeviceDetails> devices = _selectDevices(); + std::vector<ftl::rgbd::detail::DeviceDetails> devices = getDevices(); int device_left = 0; int device_right = -1; @@ -77,7 +77,7 @@ OpenCVDevice::OpenCVDevice(nlohmann::json &config) device_right = value("device_right", (devices.size() > 1) ? devices[1].id : 1); } - nostereo_ = value("nostereo", false); + nostereo_ = value("nostereo", !stereo); if (device_left < 0) { LOG(ERROR) << "No available cameras"; @@ -147,7 +147,11 @@ OpenCVDevice::~OpenCVDevice() { } -std::vector<ftl::rgbd::detail::DeviceDetails> OpenCVDevice::_selectDevices() { +static std::vector<ftl::rgbd::detail::DeviceDetails> opencv_devices; + +std::vector<ftl::rgbd::detail::DeviceDetails> OpenCVDevice::getDevices() { + if (opencv_devices.size() > 0) return opencv_devices; + std::vector<ftl::rgbd::detail::DeviceDetails> devices; #ifdef WIN32 @@ -291,6 +295,7 @@ std::vector<ftl::rgbd::detail::DeviceDetails> OpenCVDevice::_selectDevices() { #endif + opencv_devices = devices; return devices; } diff --git a/components/rgbd-sources/src/sources/stereovideo/opencv.hpp b/components/rgbd-sources/src/sources/stereovideo/opencv.hpp index 9915cf701bb60402f150f39eecf65cab5f848913..5df8eea0f7561953989c157bbfcea9ca79ca602b 100644 --- a/components/rgbd-sources/src/sources/stereovideo/opencv.hpp +++ b/components/rgbd-sources/src/sources/stereovideo/opencv.hpp @@ -14,7 +14,7 @@ namespace detail { class OpenCVDevice : public ftl::rgbd::detail::Device { public: - explicit OpenCVDevice(nlohmann::json &config); + explicit OpenCVDevice(nlohmann::json &config, bool stereo); ~OpenCVDevice(); static std::vector<DeviceDetails> listDevices(); @@ -32,6 +32,8 @@ class OpenCVDevice : public ftl::rgbd::detail::Device { bool isStereo() const override; + static std::vector<DeviceDetails> getDevices(); + private: double timestamp_; //double tps_; @@ -55,8 +57,6 @@ class OpenCVDevice : public ftl::rgbd::detail::Device { cv::Mat frame_l_; cv::Mat frame_r_; - - std::vector<DeviceDetails> _selectDevices(); }; } diff --git a/components/rgbd-sources/src/sources/stereovideo/stereovideo.cpp b/components/rgbd-sources/src/sources/stereovideo/stereovideo.cpp index c62c23448d839d8b95422a6d1bbc6f7b063468d9..f09820922ba4ff9b5a34c0b82b0e2b5575ce3304 100644 --- a/components/rgbd-sources/src/sources/stereovideo/stereovideo.cpp +++ b/components/rgbd-sources/src/sources/stereovideo/stereovideo.cpp @@ -40,6 +40,25 @@ using ftl::codecs::Channel; using std::string; using ftl::rgbd::Capability; + +static cv::Mat rmat(const cv::Vec3d &rvec) { + cv::Mat R(cv::Size(3, 3), CV_64FC1); + cv::Rodrigues(rvec, R); + return R; +} + +static Eigen::Matrix4d matrix(const cv::Vec3d &rvec, const cv::Vec3d &tvec) { + cv::Mat M = cv::Mat::eye(cv::Size(4, 4), CV_64FC1); + rmat(rvec).copyTo(M(cv::Rect(0, 0, 3, 3))); + M.at<double>(0, 3) = tvec[0]; + M.at<double>(1, 3) = tvec[1]; + M.at<double>(2, 3) = tvec[2]; + Eigen::Matrix4d r; + cv::cv2eigen(M,r); + return r; +} + + ftl::rgbd::detail::Device::Device(nlohmann::json &config) : Configurable(config) { } @@ -80,7 +99,9 @@ bool StereoVideoSource::supported(const std::string &dev) { } else if (dev == "video") { return true; } else if (dev == "camera") { - return true; // TODO + return ftl::rgbd::detail::OpenCVDevice::getDevices().size() > 0; + } else if (dev == "stereo") { + return ftl::rgbd::detail::OpenCVDevice::getDevices().size() > 1; } return false; @@ -102,24 +123,15 @@ void StereoVideoSource::init(const string &file) { } else if (uri.getPathSegment(0) == "opencv") { // Use cameras LOG(INFO) << "Using OpenCV cameras..."; - lsrc_ = ftl::create<ftl::rgbd::detail::OpenCVDevice>(host_, "feed"); + lsrc_ = ftl::create<ftl::rgbd::detail::OpenCVDevice>(host_, "feed", true); } else if (uri.getPathSegment(0) == "video" || uri.getPathSegment(0) == "camera") { - // Now detect automatically which device to use - #ifdef HAVE_PYLON - auto pylon_devices = ftl::rgbd::detail::PylonDevice::listDevices(); - if (pylon_devices.size() > 0) { - LOG(INFO) << "Using Pylon..."; - lsrc_ = ftl::create<ftl::rgbd::detail::PylonDevice>(host_, "feed"); - } else { - // Use cameras - LOG(INFO) << "Using OpenCV cameras..."; - lsrc_ = ftl::create<ftl::rgbd::detail::OpenCVDevice>(host_, "feed"); - } - #else + // Use cameras + LOG(INFO) << "Using OpenCV camera..."; + lsrc_ = ftl::create<ftl::rgbd::detail::OpenCVDevice>(host_, "feed", false); + } else if (uri.getPathSegment(0) == "stereo") { // Use cameras LOG(INFO) << "Using OpenCV cameras..."; - lsrc_ = ftl::create<ftl::rgbd::detail::OpenCVDevice>(host_, "feed"); - #endif + lsrc_ = ftl::create<ftl::rgbd::detail::OpenCVDevice>(host_, "feed", true); } } @@ -165,11 +177,6 @@ ftl::rgbd::Camera StereoVideoSource::parameters(Channel chan) { } void StereoVideoSource::updateParameters(ftl::rgbd::Frame &frame) { - Eigen::Matrix4d pose; - cv::cv2eigen(rectification_->getPose(Channel::Left), pose); - - frame.setPose() = pose; - auto &meta = frame.create<std::map<std::string,std::string>>(Channel::MetaData); meta["name"] = host_->value("name", host_->getID()); meta["uri"] = host_->value("uri", std::string("")); @@ -201,63 +208,96 @@ void StereoVideoSource::updateParameters(ftl::rgbd::Frame &frame) { return true; }); - cv::Mat K; + if (lsrc_->isStereo()) { + Eigen::Matrix4d pose; + cv::cv2eigen(rectification_->getPose(Channel::Left), pose); - // same for left and right - float baseline = static_cast<float>(rectification_->baseline()); - float doff = rectification_->doff(color_size_); + frame.setPose() = pose; - double d_resolution = this->host_->getConfig().value<double>("depth_resolution", 0.0); - float min_depth = this->host_->getConfig().value<double>("min_depth", 0.45); - float max_depth = this->host_->getConfig().value<double>("max_depth", 12.0); + cv::Mat K; - // left + // same for left and right + float baseline = static_cast<float>(rectification_->baseline()); + float doff = rectification_->doff(color_size_); - K = rectification_->cameraMatrix(color_size_); - float fx = static_cast<float>(K.at<double>(0,0)); + double d_resolution = this->host_->getConfig().value<double>("depth_resolution", 0.0); + float min_depth = this->host_->getConfig().value<double>("min_depth", 0.45); + float max_depth = this->host_->getConfig().value<double>("max_depth", (lsrc_->isStereo()) ? 12.0 : 1.0); - if (d_resolution > 0.0) { - // Learning OpenCV p. 442 - // TODO: remove, should not be used here - float max_depth_new = sqrt(d_resolution * fx * baseline); - max_depth = (max_depth_new > max_depth) ? max_depth : max_depth_new; - } + // left + + K = rectification_->cameraMatrix(color_size_); + float fx = static_cast<float>(K.at<double>(0,0)); - frame.setLeft() = { - fx, - static_cast<float>(K.at<double>(1,1)), // Fy - static_cast<float>(-K.at<double>(0,2)), // Cx - static_cast<float>(-K.at<double>(1,2)), // Cy - (unsigned int) color_size_.width, - (unsigned int) color_size_.height, - min_depth, - max_depth, - baseline, - doff - }; - - host_->getConfig()["focal"] = params_.fx; - host_->getConfig()["centre_x"] = params_.cx; - host_->getConfig()["centre_y"] = params_.cy; - host_->getConfig()["baseline"] = params_.baseline; - host_->getConfig()["doffs"] = params_.doffs; - - // right - // Not used anymore? - - K = rectification_->cameraMatrix(color_size_, Channel::Right); - frame.setRight() = { - static_cast<float>(K.at<double>(0,0)), // Fx - static_cast<float>(K.at<double>(1,1)), // Fy - static_cast<float>(-K.at<double>(0,2)), // Cx - static_cast<float>(-K.at<double>(1,2)), // Cy - (unsigned int) color_size_.width, - (unsigned int) color_size_.height, - min_depth, - max_depth, - baseline, - doff - }; + if (d_resolution > 0.0) { + // Learning OpenCV p. 442 + // TODO: remove, should not be used here + float max_depth_new = sqrt(d_resolution * fx * baseline); + max_depth = (max_depth_new > max_depth) ? max_depth : max_depth_new; + } + + frame.setLeft() = { + fx, + static_cast<float>(K.at<double>(1,1)), // Fy + static_cast<float>(-K.at<double>(0,2)), // Cx + static_cast<float>(-K.at<double>(1,2)), // Cy + (unsigned int) color_size_.width, + (unsigned int) color_size_.height, + min_depth, + max_depth, + baseline, + doff + }; + + host_->getConfig()["focal"] = params_.fx; + host_->getConfig()["centre_x"] = params_.cx; + host_->getConfig()["centre_y"] = params_.cy; + host_->getConfig()["baseline"] = params_.baseline; + host_->getConfig()["doffs"] = params_.doffs; + + // right + // Not used anymore? + + K = rectification_->cameraMatrix(color_size_, Channel::Right); + frame.setRight() = { + static_cast<float>(K.at<double>(0,0)), // Fx + static_cast<float>(K.at<double>(1,1)), // Fy + static_cast<float>(-K.at<double>(0,2)), // Cx + static_cast<float>(-K.at<double>(1,2)), // Cy + (unsigned int) color_size_.width, + (unsigned int) color_size_.height, + min_depth, + max_depth, + baseline, + doff + }; + } else { + Eigen::Matrix4d pose; + + params_.cx = -(color_size_.width / 2.0); + params_.cy = -(color_size_.height / 2.0); + params_.fx = 700.0; + params_.fy = 700.0; + params_.maxDepth = host_->value("size", 1.0f); + params_.minDepth = 0.0f; + params_.doffs = 0.0; + params_.baseline = 0.1f; + + float offsetz = host_->value("offset_z",0.0f); + //state_.setPose(matrix(cv::Vec3d(0.0, 3.14159, 0.0), cv::Vec3d(0.0,0.0,params_.maxDepth+offsetz))); + pose = matrix(cv::Vec3d(0.0, 3.14159, 0.0), cv::Vec3d(0.0,0.0,params_.maxDepth+offsetz)); + + /*host_->on("size", [this](const ftl::config::Event &e) { + float offsetz = host_->value("offset_z",0.0f); + params_.maxDepth = host_->value("size", 1.0f); + //state_.getLeft() = params_; + pose_ = matrix(cv::Vec3d(0.0, 3.14159, 0.0), cv::Vec3d(0.0,0.0,params_.maxDepth+offsetz)); + do_update_params_ = true; + });*/ + + frame.setLeft() = params_; + frame.setPose() = pose; + } } bool StereoVideoSource::capture(int64_t ts) { diff --git a/components/streams/src/feed.cpp b/components/streams/src/feed.cpp index df05a5e35d6a61d7e19b4463251aa57c87ba0b34..5be03ebfee7c189a9439e8c9f774dde733fb7e0b 100644 --- a/components/streams/src/feed.cpp +++ b/components/streams/src/feed.cpp @@ -376,6 +376,7 @@ std::vector<std::string> Feed::availableDeviceSources() { if (ftl::rgbd::Source::supports("device:pylon")) results.emplace_back("device:pylon"); if (ftl::rgbd::Source::supports("device:camera")) results.emplace_back("device:camera"); + if (ftl::rgbd::Source::supports("device:stereo")) results.emplace_back("device:stereo"); if (ftl::rgbd::Source::supports("device:screen")) results.emplace_back("device:screen"); if (ftl::rgbd::Source::supports("device:realsense")) results.emplace_back("device:realsense"); results.emplace_back("device:render");