Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • nicolaspope/ftl
1 result
Select Git revision
Show changes
Showing
with 3664 additions and 57 deletions
#include <openvr/openvr.h>
#include <Eigen/Eigen>
#include <openvr/openvr.h>
/* @brief Calculate (pinhole camera) intrinsic matrix from OpenVR parameters
* @param Tangent of left half angle (negative) from center view axis
* @param Tangent of right half angle from center view axis
* @param Tangent of top half angle (negative) from center view axis
* @param Tangent of bottom half angle from center view axis
* @param Image width
* @param Image height
*
* Parameters are provided by IVRSystem::GetProjectionRaw and
* IVRSystem::GetRecommendedRenderTargetSize.
*
* tanx1 = x1 / fx (1)
* tanx2 = x2 / fy (2)
* x1 + x2 = size_x (3)
*
* :. fx = size_x / (-tanx1 + tanx2)
*
* fy can be calculated in same way
*/
Eigen::Matrix3d getCameraMatrix(const double tanx1,
const double tanx2,
const double tany1,
const double tany2,
const double size_x,
const double size_y);
/*
* @brief Same as above, but uses given IVRSystem and eye.
*/
Eigen::Matrix3d getCameraMatrix(vr::IVRSystem *vr, const vr::Hmd_Eye &eye);
static inline Eigen::Matrix4d ConvertSteamVRMatrixToMatrix4( const vr::HmdMatrix34_t &matPose )
{
Eigen::Matrix4d matrixObj;
matrixObj <<
matPose.m[0][0], matPose.m[0][1], matPose.m[0][2], matPose.m[0][3],
matPose.m[1][0], matPose.m[1][1], matPose.m[1][2], matPose.m[1][3],
matPose.m[2][0], matPose.m[2][1], matPose.m[2][2], matPose.m[2][3],
0.0, 0.0, 0.0, 1.0;
return matrixObj;
}
static inline Eigen::Matrix4d ConvertSteamVRMatrixToMatrix4( const vr::HmdMatrix44_t &matPose )
{
Eigen::Matrix4d matrixObj;
matrixObj <<
matPose.m[0][0], matPose.m[0][1], matPose.m[0][2], matPose.m[0][3],
matPose.m[1][0], matPose.m[1][1], matPose.m[1][2], matPose.m[1][3],
matPose.m[2][0], matPose.m[2][1], matPose.m[2][2], matPose.m[2][3],
matPose.m[3][0], matPose.m[3][1], matPose.m[3][2], matPose.m[3][3];
return matrixObj;
}
\ No newline at end of file
......@@ -2,23 +2,55 @@
#include_directories(${PROJECT_SOURCE_DIR}/reconstruct/include)
#include_directories(${PROJECT_BINARY_DIR})
set(GUISRC
function(add_gui_module NAME)
get_filename_component(FULLPATH "src/modules/${NAME}.cpp" ABSOLUTE)
if (EXISTS ${FULLPATH})
list(APPEND GUI2SRC "src/modules/${NAME}.cpp")
endif()
get_filename_component(FULLPATH "src/views/${NAME}.cpp" ABSOLUTE)
if (EXISTS ${FULLPATH})
list(APPEND GUI2SRC "src/views/${NAME}.cpp")
endif()
set(GUI2SRC ${GUI2SRC} PARENT_SCOPE)
endfunction()
set(GUI2SRC
src/main.cpp
#src/ctrl_window.cpp
src/src_window.cpp
src/config_window.cpp
src/pose_window.cpp
src/inputoutput.cpp
src/screen.cpp
src/gltexture.cpp
src/camera.cpp
src/media_panel.cpp
src/thumbview.cpp
src/record_window.cpp
src/frameset_mgr.cpp
src/view.cpp
src/widgets/soundctrl.cpp
src/widgets/popupbutton.cpp
src/widgets/imageview.cpp
src/widgets/combobox.cpp
src/widgets/leftbutton.cpp
)
add_gui_module("themes")
add_gui_module("statistics")
add_gui_module("config")
add_gui_module("camera")
add_gui_module("camera3d")
add_gui_module("thumbnails")
add_gui_module("addsource")
if (WITH_CERES)
list(APPEND GUI2SRC
src/modules/calibration/calibration.cpp
src/modules/calibration/extrinsic.cpp
src/modules/calibration/intrinsic.cpp
src/modules/calibration/stereo.cpp
src/views/calibration/widgets.cpp
src/views/calibration/extrinsicview.cpp
src/views/calibration/intrinsicview.cpp
src/views/calibration/stereoview.cpp
)
endif()
if (HAVE_OPENVR)
list(APPEND GUISRC "src/vr.cpp")
add_gui_module("cameravr")
endif()
# Various preprocessor definitions have been generated by NanoGUI
......@@ -27,21 +59,19 @@ add_definitions(${NANOGUI_EXTRA_DEFS})
# On top of adding the path to nanogui/include, you may need extras
include_directories(${NANOGUI_EXTRA_INCS})
add_executable(ftl-gui ${GUISRC})
add_executable(ftl-gui2 ${GUI2SRC})
target_include_directories(ftl-gui PUBLIC
target_include_directories(ftl-gui2 PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
$<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/ext/nanogui/include>
$<INSTALL_INTERFACE:include>
PRIVATE src)
#if (CUDA_FOUND)
#set_property(TARGET ftl-gui PROPERTY CUDA_SEPARABLE_COMPILATION ON)
#set_property(TARGET ftl-gui2 PROPERTY CUDA_SEPARABLE_COMPILATION ON)
#endif()
#target_include_directories(cv-node PUBLIC ${PROJECT_SOURCE_DIR}/include)
target_link_libraries(ftl-gui ftlcommon ftlctrl ftlrgbd ftlstreams ftlrender Threads::Threads ${OpenCV_LIBS} openvr ftlnet nanogui ${NANOGUI_EXTRA_LIBS})
target_link_libraries(ftl-gui2 ftlcommon ftldata ftlctrl ftlrgbd ftlstreams ftlrender Threads::Threads ${OpenCV_LIBS} openvr ftlnet nanogui ${NANOGUI_EXTRA_LIBS} ceres nvidia-ml)
if (BUILD_TESTS)
add_subdirectory(test)
endif()
target_precompile_headers(ftl-gui2 REUSE_FROM ftldata)
GUI
Nanogui based graphical user interface.
General:
* Do not modify gui outside gui thread (main). Modifications must be done in
GUI callbacks or draw().
* Expensive processing should be moved out of gui thread (draw() and callbacks)
* Module is only required to implement Module. Each module is expected to be
loaded only once.
Classes
Screen
* Implements main screen: toolbar and view
* Interface for registering new modules.
* Interface for adding/removing buttons
* Interface for setting active View. Inactive view is removed and destroyed if
no other references are remaining.
* Note: toolbar could be a module, but other modules likely assume it is
always available anyways.
* Centralized access to Nanogui::Themes and custom non-theme colors.
Module (controller)
* GUI module class wraps pointers for io, config and net. Initialization should
add necessary buttons to Screen
* Build necessary callbacks to process data from InputOutput to view.
Note: If callback passes data to view, callback handle should be owned by
the view or Module has to keep a nanogui reference to the View. Also note
that View destructor is called when active view is replaced.
View
* Active view will be the main window; only one view can be active at time
* Button callbacks (eg. those registered by module init) may change active view
* Destroyed when view is changed. Object lifetime can be used to remove
callbacks from InputOutput (TODO: only one active callback supported at the
moment)
* Implementations do not have to inherit from View. Popup/Window/Widget... can
be used to implement UI components available from any mode (config, record).
* Receives all unprocessed keyboard events.
InputOutput
* Contains pointers to all required FTL objects (network/rendering/feed/...).
* Speaker
NanoGUI notes:
* If disposing Window in widget destructor, window->parent() reference count
must be checked and dispose() only called if refCount > 0. (segfault at exit)
* Nanogui does not dispose popup windows automatically. See above point if
using destructor for clean up.
#include <loguru.hpp>
#include <nlohmann/json.hpp>
#include <ftl/codecs/shapes.hpp>
#include <ftl/streams/filestream.hpp>
#include "inputoutput.hpp"
using ftl::gui2::InputOutput;
using ftl::codecs::Channel;
InputOutput::InputOutput(ftl::Configurable *root, ftl::net::Universe *net) :
net_(net) {
master_ = std::unique_ptr<ftl::ctrl::Master>(new ftl::ctrl::Master(root, net));
master_->onLog([](const ftl::ctrl::LogEvent &e){
const int v = e.verbosity;
switch (v) {
case -2: LOG(ERROR) << "Remote log: " << e.message; break;
case -1: LOG(WARNING) << "Remote log: " << e.message; break;
case 0: LOG(INFO) << "Remote log: " << e.message; break;
}
});
//net_->onConnect([this](ftl::net::Peer *p) {
//ftl::pool.push([this](int id) {
// FIXME: Find better option that waiting here.
// Wait to make sure streams have started properly.
//std::this_thread::sleep_for(std::chrono::milliseconds(100));
//_updateCameras(screen_->net()->findAll<string>("list_streams"));
//});
//});
feed_ = std::unique_ptr<ftl::stream::Feed>
(ftl::create<ftl::stream::Feed>(root, "feed", net));
speaker_ = feed_->speaker();
//auto* f = feed_->filter({ftl::codecs::Channel::Colour, ftl::codecs::Channel::Depth});
//feed_->render(f, Eigen::Matrix4d::Identity());
}
#pragma once
#include <memory>
#include <mutex>
#include <array>
#include <ftl/handle.hpp>
#include <ftl/configuration.hpp>
#include <ftl/net/universe.hpp>
#include <ftl/master.hpp>
#include <ftl/streams/stream.hpp>
#include <ftl/streams/receiver.hpp>
#include <ftl/streams/feed.hpp>
#include <ftl/streams/filestream.hpp>
#include <ftl/audio/speaker.hpp>
#include <ftl/data/new_frame.hpp>
#include <ftl/data/new_frameset.hpp>
#include <ftl/data/framepool.hpp>
namespace ftl {
namespace gui2 {
class InputOutput {
public:
InputOutput(ftl::Configurable *config, ftl::net::Universe *net);
InputOutput(const InputOutput&) = delete;
void operator=(const InputOutput&) = delete;
ftl::Handle addCallback(const std::function<bool(const ftl::data::FrameSetPtr&)>&);
ftl::net::Universe* net() const;
ftl::ctrl::Master* master() const { return master_.get(); }
ftl::stream::Feed* feed() const { return feed_.get(); }
ftl::audio::Speaker* speaker() const { return speaker_; }
private:
ftl::net::Universe* net_;
std::unique_ptr<ftl::stream::Feed> feed_;
std::unique_ptr<ftl::ctrl::Master> master_;
ftl::audio::Speaker *speaker_;
};
}
}
#include <memory>
#include <loguru.hpp>
#include <nlohmann/json.hpp>
#include <ftl/configuration.hpp>
#include <ftl/net/universe.hpp>
#include <ftl/net_configurable.hpp>
#include <ftl/rgbd.hpp>
#include <nanogui/nanogui.h>
#include <cuda_gl_interop.h>
#include "inputoutput.hpp"
#include "module.hpp"
#include "screen.hpp"
#include "modules.hpp"
#ifdef HAVE_PYLON
#include <pylon/PylonIncludes.h>
#endif
using std::unique_ptr;
using std::make_unique;
/**
* FTL Graphical User Interface
* Single screen, loads configuration and sets up networking and input/output.
* Loads required modules to gui.
*/
class FTLGui {
public:
FTLGui(int argc, char **argv);
~FTLGui();
template<typename T>
T* loadModule(const std::string &name);
void mainloop();
private:
std::unique_ptr<ftl::Configurable> root_;
std::unique_ptr<ftl::net::Universe> net_;
std::unique_ptr<ftl::gui2::InputOutput> io_;
nanogui::ref<ftl::gui2::Screen> screen_;
};
template<typename T>
T* FTLGui::loadModule(const std::string &name) {
return screen_->addModule<T>(name, root_.get(), screen_.get(), io_.get());
}
FTLGui::FTLGui(int argc, char **argv) {
using namespace ftl::gui2;
screen_ = new Screen();
int cuda_device;
cudaSafeCall(cudaGetDevice(&cuda_device));
//cudaSafeCall(cudaGLSetGLDevice(cuda_device));
root_ = unique_ptr<ftl::Configurable>(ftl::configure(argc, argv, "gui_default"));
net_ = unique_ptr<ftl::net::Universe>(ftl::create<ftl::net::Universe>(root_.get(), "net"));
io_ = make_unique<ftl::gui2::InputOutput>(root_.get(), net_.get());
net_->start();
net_->waitConnections();
loadModule<Themes>("themes");
loadModule<ThumbnailsController>("home")->activate();
loadModule<Camera>("camera");
loadModule<ConfigCtrl>("configwindow");
loadModule<Statistics>("statistics");
#ifdef HAVE_CERES
loadModule<Calibration>("calibration");
#endif
auto *adder = loadModule<AddCtrl>("adder");
for (int c = 1; c < argc; c++) {
std::string path(argv[c]);
try {
io_->feed()->add(path);
LOG(INFO) << "Add: " << path;
}
catch (const ftl::exception&) {
LOG(ERROR) << "Could not add: " << path;
}
}
if (io_->feed()->listSources().size() == 0) {
adder->show();
}
net_->onDisconnect([this](ftl::net::Peer *p) {
if (p->status() != ftl::net::Peer::kConnected) {
screen_->showError("Connection Failed", std::string("Could not connect to network peer: ") + p->getURI());
} else {
screen_->showError("Disconnection", std::string("Network peer disconnected: ") + p->getURI());
}
});
net_->onError([this](ftl::net::Peer *, const ftl::net::Error &err) {
});
}
FTLGui::~FTLGui() {
net_->shutdown();
}
void FTLGui::mainloop() {
// implements similar main loop as nanogui::mainloop()
ftl::timer::start();
screen_->setVisible(true);
screen_->drawAll();
float last_draw_time = 0.0f;
while (ftl::running) {
if (!screen_->visible()) {
ftl::running = false;
}
else if (glfwWindowShouldClose(screen_->glfwWindow())) {
screen_->setVisible(false);
ftl::running = false;
}
else {
float now = float(glfwGetTime());
float delta = now - last_draw_time;
// Generate poses and render and virtual frame here
// at full FPS (25 without VR and 90 with VR currently)
//screen_->render();
io_->feed()->render();
// Only draw the GUI at 25fps
if (delta >= 0.04f) {
last_draw_time = now;
screen_->drawAll();
}
}
// Wait for mouse/keyboard or empty refresh events
glfwWaitEventsTimeout(0.02); // VR headest issues
//glfwPollEvents();
}
// Process events once more
glfwPollEvents();
ftl::config::save();
// Stop everything before deleting feed etc
LOG(INFO) << "Stopping...";
ftl::timer::stop(true);
ftl::pool.stop(true);
LOG(INFO) << "All threads stopped.";
}
////////////////////////////////////////////////////////////////////////////////
int main(int argc, char **argv) {
#ifdef HAVE_PYLON
Pylon::PylonAutoInitTerm autoInitTerm;
#endif
// Note: This causes 100% CPU use but prevents the random frame drops.
ftl::timer::setHighPrecision(true);
{
nanogui::init();
FTLGui gui(argc, argv);
try {
gui.mainloop();
}
catch (const ftl::exception &e) {
#ifdef WIN32
std::string error_msg = std::string("Caught a fatal error: ") + std::string(e.what()) + std::string("\r\n") + std::string(e.trace());
MessageBoxA(nullptr, error_msg.c_str(), NULL, MB_ICONERROR | MB_OK);
#else
LOG(ERROR) << "Fatal error: " << e.what();
LOG(ERROR) << e.trace();
#endif
}
catch (const std::runtime_error &e) {
std::string error_msg = std::string("Caught a fatal error: ") + std::string(e.what());
#ifdef WIN32
MessageBoxA(nullptr, error_msg.c_str(), NULL, MB_ICONERROR | MB_OK);
LOG(ERROR) << error_msg;
#else
LOG(ERROR) << error_msg;
#endif
return -1;
}
}
// Must be after ~FTLGui since it destroys GL context.
nanogui::shutdown();
// Save config changes and delete final objects
ftl::config::cleanup();
return 0;
}
#pragma once
#include "view.hpp"
#include "inputoutput.hpp"
#include <ftl/configurable.hpp>
#include <nanogui/entypo.h>
#include <nanogui/button.h>
namespace ftl {
namespace gui2 {
class Screen;
class Module : public ftl::Configurable {
public:
Module(nlohmann::json &config, Screen *screen, InputOutput *io) :
Configurable(config), screen(screen), io(io) {}
/** called by constructor */
virtual void init() {};
/** called before draw */
virtual void update(double) {};
virtual ~Module() {};
ftl::gui2::Screen* const screen;
protected:
ftl::gui2::InputOutput* const io;
};
}
}
#pragma once
#include "modules/thumbnails.hpp"
#include "modules/camera.hpp"
#include "modules/config.hpp"
#include "modules/themes.hpp"
#include "modules/statistics.hpp"
#ifdef HAVE_CERES
#include "modules/calibration/calibration.hpp"
#endif
#include "modules/addsource.hpp"
#include "addsource.hpp"
using ftl::gui2::AddCtrl;
void AddCtrl::init() {
button = screen->addButton(ENTYPO_ICON_PLUS);
button->setTooltip("Add New Source");
button->setCallback([this](){
button->setPushed(false);
show();
});
button->setVisible(true);
}
void AddCtrl::show() {
// Note: By chance, the pointer can in fact pass this test as another
// widget gets allocated to the exact same address
if (!window || screen->childIndex(window) == -1) {
window = new ftl::gui2::AddSourceWindow(screen, this);
}
window->setVisible(true);
window->requestFocus();
screen->performLayout();
}
void AddCtrl::disposeWindow() {
window->dispose();
window = nullptr;
}
ftl::Configurable *AddCtrl::add(const std::string &uri) {
try {
if (io->feed()->sourceActive(uri)) {
io->feed()->remove(uri);
} else {
io->feed()->add(uri);
}
} catch (const ftl::exception &e) {
screen->showError("Exception", e.what());
}
return nullptr;
}
std::vector<std::string> AddCtrl::getHosts() {
return std::move(io->feed()->knownHosts());
}
std::vector<std::string> AddCtrl::getGroups() {
return std::move(io->feed()->availableGroups());
}
std::set<ftl::stream::SourceInfo> AddCtrl::getRecent() {
return std::move(io->feed()->recentSources());
}
std::vector<std::string> AddCtrl::getNetSources() {
return std::move(io->feed()->availableNetworkSources());
}
std::vector<std::string> AddCtrl::getFileSources() {
return std::move(io->feed()->availableFileSources());
}
std::vector<std::string> AddCtrl::getDeviceSources() {
return std::move(io->feed()->availableDeviceSources());
}
std::string AddCtrl::getSourceName(const std::string &uri) {
return io->feed()->getName(uri);
}
bool AddCtrl::isSourceActive(const std::string &uri) {
return io->feed()->sourceActive(uri);
}
AddCtrl::~AddCtrl() {
// remove window?
}
#pragma once
#include "../module.hpp"
#include "../screen.hpp"
#include "../views/addsource.hpp"
namespace ftl {
namespace gui2 {
/**
* Controller for adding sources etc.
*/
class AddCtrl : public Module {
public:
using Module::Module;
virtual ~AddCtrl();
virtual void init() override;
virtual void show();
void disposeWindow();
ftl::Configurable *add(const std::string &uri);
std::vector<std::string> getHosts();
std::set<ftl::stream::SourceInfo> getRecent();
std::vector<std::string> getNetSources();
std::vector<std::string> getFileSources();
std::vector<std::string> getDeviceSources();
std::vector<std::string> getGroups();
std::string getSourceName(const std::string &uri);
bool isSourceActive(const std::string &uri);
inline ftl::stream::Feed *feed() { return io->feed(); }
private:
nanogui::ToolButton *button;
ftl::gui2::AddSourceWindow *window = nullptr;
};
}
}
#include <loguru.hpp>
#include "calibration.hpp"
#include "../../screen.hpp"
#include "../../widgets/popupbutton.hpp"
#include "../../views/calibration/intrinsicview.hpp"
#include "../../views/calibration/extrinsicview.hpp"
#include "../../views/calibration/stereoview.hpp"
#include <opencv2/aruco.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/calib3d.hpp>
#include <ftl/calibration/optimize.hpp>
#include <ftl/calibration/structures.hpp>
#include <ftl/threads.hpp>
#include <nanogui/entypo.h>
#include <nanogui/layout.h>
using ftl::gui2::Calibration;
using ftl::calibration::CalibrationData;
using ftl::codecs::Channel;
using ftl::data::FrameID;
using ftl::data::FrameSetPtr;
// ==== OpenCVCalibrateFlags ===================================================
using ftl::gui2::OpenCVCalibrateFlags;
using ftl::gui2::OpenCVCalibrateFlagsStereo;
int OpenCVCalibrateFlags::defaultFlags() const {
// For finding distortion coefficients fix focal length and principal point.
// Otherwise results might be unreliable.
return (cv::CALIB_FIX_FOCAL_LENGTH |
cv::CALIB_FIX_PRINCIPAL_POINT |
cv::CALIB_FIX_ASPECT_RATIO);
}
std::vector<int> OpenCVCalibrateFlags::list() const {
return {
cv::CALIB_USE_INTRINSIC_GUESS,
cv::CALIB_FIX_FOCAL_LENGTH,
cv::CALIB_FIX_PRINCIPAL_POINT,
cv::CALIB_FIX_ASPECT_RATIO,
cv::CALIB_ZERO_TANGENT_DIST,
cv::CALIB_FIX_K1,
cv::CALIB_FIX_K2,
cv::CALIB_FIX_K3,
cv::CALIB_FIX_K4,
cv::CALIB_FIX_K5,
cv::CALIB_FIX_K6,
cv::CALIB_RATIONAL_MODEL,
cv::CALIB_THIN_PRISM_MODEL,
cv::CALIB_FIX_S1_S2_S3_S4,
cv::CALIB_TILTED_MODEL,
cv::CALIB_FIX_TAUX_TAUY
};
}
std::string OpenCVCalibrateFlags::name(int i) const {
using namespace cv;
switch(i) {
case CALIB_FIX_INTRINSIC:
return "CALIB_FIX_INTRINSIC";
case CALIB_FIX_FOCAL_LENGTH:
return "CALIB_FIX_FOCAL_LENGTH";
case CALIB_USE_INTRINSIC_GUESS:
return "CALIB_USE_INTRINSIC_GUESS";
case CALIB_USE_EXTRINSIC_GUESS:
return "CALIB_USE_EXTRINSIC_GUESS";
case CALIB_FIX_PRINCIPAL_POINT:
return "CALIB_FIX_PRINCIPAL_POINT";
case CALIB_FIX_ASPECT_RATIO:
return "CALIB_FIX_ASPECT_RATIO";
case CALIB_SAME_FOCAL_LENGTH:
return "CALIB_SAME_FOCAL_LENGTH";
case CALIB_ZERO_TANGENT_DIST:
return "CALIB_ZERO_TANGENT_DIST";
case CALIB_FIX_K1:
return "CALIB_FIX_K1";
case CALIB_FIX_K2:
return "CALIB_FIX_K2";
case CALIB_FIX_K3:
return "CALIB_FIX_K3";
case CALIB_FIX_K4:
return "CALIB_FIX_K4";
case CALIB_FIX_K5:
return "CALIB_FIX_K5";
case CALIB_FIX_K6:
return "CALIB_FIX_K6";
case CALIB_RATIONAL_MODEL:
return "CALIB_RATIONAL_MODEL";
case CALIB_THIN_PRISM_MODEL:
return "CALIB_THIN_PRISM_MODEL";
case CALIB_FIX_S1_S2_S3_S4:
return "CALIB_FIX_S1_S2_S3_S4";
case CALIB_TILTED_MODEL:
return "CALIB_TILTED_MODEL";
case CALIB_FIX_TAUX_TAUY:
return "CALIB_FIX_TAUX_TAUY";
};
return "";
}
std::string OpenCVCalibrateFlags::explain(int i) const {
using namespace cv;
switch(i) {
case CALIB_FIX_INTRINSIC:
return "Fix all intrinsic paramters.";
case CALIB_FIX_FOCAL_LENGTH:
return "Fix focal length (fx and fy).";
case CALIB_USE_INTRINSIC_GUESS:
return "Use valid initial values of fx, fy, cx, cy that are "
"optimized further. Otherwise, (cx, cy) is initially set "
"to the image center and focal distances are computed in "
"a least-squares fashion.";
case CALIB_USE_EXTRINSIC_GUESS:
return "";
case CALIB_FIX_PRINCIPAL_POINT:
return "The principal point is not changed during the global "
"optimization. It stays at the center or at a location "
"specified in initial parameters.";
case CALIB_FIX_ASPECT_RATIO:
return "Consider only fy as a free parameter. The ratio fx/fy "
"stays the same. When CALIB_USE_INTRINSIC_GUESS is not "
"set, the actual input values of fx and fy are ignored, "
"only their ratio is computed and used further.";
case CALIB_ZERO_TANGENT_DIST:
return "Tangential distortion coefficients (p1,p2) are set to "
"zeros and stay zero.";
case CALIB_FIX_K1:
case CALIB_FIX_K2:
case CALIB_FIX_K3:
case CALIB_FIX_K4:
case CALIB_FIX_K5:
case CALIB_FIX_K6:
return "The radial distortion coefficient is not changed during "
"the optimization. If CALIB_USE_INTRINSIC_GUESS is set, "
"the coefficient from initial values is used. Otherwise, "
"it is set to 0.";
case CALIB_RATIONAL_MODEL:
return "Coefficients k4, k5, and k6 are enabled.";
case CALIB_THIN_PRISM_MODEL:
return " Coefficients s1, s2, s3 and s4 are enabled.";
case CALIB_FIX_S1_S2_S3_S4:
return "The thin prism distortion coefficients are not changed "
"during the optimization. If CALIB_USE_INTRINSIC_GUESS is "
"set, the supplied coefficients are used. Otherwise, they "
"are set to 0.";
case CALIB_TILTED_MODEL:
return "Coefficients tauX and tauY are enabled";
case CALIB_FIX_TAUX_TAUY:
return "The coefficients of the tilted sensor model are not "
"changed during the optimization. If "
"CALIB_USE_INTRINSIC_GUESS is set, the supplied "
"coefficients are used. Otherwise, they are set to 0.";
};
return "";
}
std::vector<int> OpenCVCalibrateFlagsStereo::list() const {
auto ls = OpenCVCalibrateFlags::list();
ls.insert(ls.begin(), cv::CALIB_FIX_INTRINSIC);
ls.insert(ls.begin() + 1, cv::CALIB_SAME_FOCAL_LENGTH);
ls.insert(ls.begin() + 1, cv::CALIB_USE_EXTRINSIC_GUESS);
return ls;
}
std::string OpenCVCalibrateFlagsStereo::explain(int i) const {
using namespace cv;
switch(i) {
case CALIB_FIX_INTRINSIC:
return "Fix intrinsic camera paramters (focal length, aspect "
"ratio, principal point and distortion coefficients)";
case CALIB_USE_INTRINSIC_GUESS:
return "Optimize some or all of the intrinsic parameters according "
"to the specified flags";
case CALIB_USE_EXTRINSIC_GUESS:
return "Rotation and translation have valid initial values that "
"are optimized further. Otherwise rotation and translation "
"are initialized to the median value of the pattern views ";
case CALIB_SAME_FOCAL_LENGTH:
return "Enforce fx_l == fx_r && fy_l == fy_r";
default:
return OpenCVCalibrateFlags::explain(i);
};
}
int OpenCVCalibrateFlagsStereo::defaultFlags() const {
return cv::CALIB_FIX_INTRINSIC;
}
// ==== Calibration module =====================================================
// Loads sub-modules and adds buttons to main screen.
void Calibration::init() {
screen->addModule<IntrinsicCalibration>("calib_intrinsic", this, screen, io);
screen->addModule<ExtrinsicCalibration>("calib_extrinsic", this, screen, io);
screen->addModule<StereoCalibration>("calib_stereo", this, screen, io);
// NOTE: If more GUI code is added, consider moving the GUI cude to a new
// file in ../views/
// Should implement PopupMenu widget which would abstract building steps
// and provide common feel&look. (TODO)
auto button = screen->addButton<ftl::gui2::PopupButton>("", ENTYPO_ICON_CAMERA);
button->setChevronIcon(0);
button->setTooltip("Calibrate Cameras");
auto* popup = button->popup();
popup->setLayout(new nanogui::BoxLayout
(nanogui::Orientation::Vertical, nanogui::Alignment::Fill, 10, 6));
auto* button_intrinsic = new nanogui::Button(popup, "Intrinsic Calibration");
button_intrinsic->setCallback([this, button, button_intrinsic, popup](){
button->setPushed(false);
button_intrinsic->setPushed(false);
button_intrinsic->setFocused(false);
auto* calib = screen->getModule<IntrinsicCalibration>();
auto* view = new ftl::gui2::IntrinsicCalibrationStart(screen, calib);
screen->setView(view);
});
auto* button_extrinsic = new nanogui::Button(popup, "Extrinsic Calibration");
button_extrinsic->setCallback([this, button, button_extrinsic, popup](){
button->setPushed(false);
button_extrinsic->setPushed(false);
button_extrinsic->setFocused(false);
auto* calib = screen->getModule<ExtrinsicCalibration>();
auto* view = new ftl::gui2::ExtrinsicCalibrationStart(screen, calib);
screen->setView(view);
});
auto* button_stereo = new nanogui::Button(popup, "Stereo Calibration");
button_stereo->setCallback([this, button, button_extrinsic, popup](){
button->setPushed(false);
button_extrinsic->setPushed(false);
button_extrinsic->setFocused(false);
auto* calib = screen->getModule<StereoCalibration>();
auto* view = new ftl::gui2::StereoCalibrationStart(screen, calib);
screen->setView(view);
});
button->setVisible(true);
}
Calibration::~Calibration() {
// remove button
}
// ==== CalibrationModule ======================================================
using ftl::gui2::CalibrationModule;
bool CalibrationModule::checkFrame(ftl::data::Frame& frame) {
if (wait_update_) {
return false;
}
if (frame.hasChannel(Channel::CalibrationData)) {
if (calibration_enabled_ != calibrationEnabled(frame)) {
LOG(INFO) << std::string(calibration_enabled_ ? "Enabling" : "Disabling") +
" calibration (changed outside)";
setCalibrationMode(frame, calibration_enabled_);
return false;
}
}
else {
static bool logged_once__ = false;
if (!logged_once__) {
LOG(WARNING) << "No CalibrationData channel, is this a valid camera?";
logged_once__ = true;
}
return false;
}
return true;
}
bool CalibrationModule::calibrationEnabled(ftl::data::Frame& frame) {
auto& calib_data = frame.get<CalibrationData>(Channel::CalibrationData);
return calib_data.enabled;
}
void CalibrationModule::setCalibration(ftl::data::Frame& frame, CalibrationData data) {
// previous callbacks are cancelled!
wait_update_ = true;
// updates enabled_ status with given calibration data
auto response = frame.response();
response.create<CalibrationData>(Channel::CalibrationData) = data;
update_handle_ = frame.onChange(Channel::CalibrationData,
[&wait_update = wait_update_,
&enabled = calibration_enabled_,
value = data.enabled]
(ftl::data::Frame& frame, ftl::codecs::Channel){
enabled = value;
wait_update = false;
return true;
});
}
void CalibrationModule::setCalibrationMode(ftl::data::Frame& frame, bool value) {
if (!frame.hasChannel(Channel::CalibrationData)) {
LOG(ERROR) << "Trying to change calibration status of frame which does "
"not contain CalibrationData";
return;
}
auto data = CalibrationData(frame.get<CalibrationData>(Channel::CalibrationData));
data.enabled = value;
setCalibration(frame, data);
}
void CalibrationModule::setCalibrationMode(bool value) {
calibration_enabled_ = value;
}
cv::Mat CalibrationModule::getMat(ftl::rgbd::Frame& frame, Channel c) {
auto& vframe = frame.get<ftl::rgbd::VideoFrame>(c);
cv::Mat host;
if (vframe.isGPU()) { vframe.getGPU().download(host); }
else { host = vframe.getCPU(); }
return host;
}
cv::cuda::GpuMat CalibrationModule::getGpuMat(ftl::rgbd::Frame& frame, Channel c) {
auto& vframe = frame.get<ftl::rgbd::VideoFrame>(c);
cv::cuda::GpuMat gpu;
if (!vframe.isGPU()) { gpu.upload(vframe.getCPU()); }
else { gpu = vframe.getGPU(); }
return gpu;
}
#pragma once
#include "../../module.hpp"
#include <ftl/calibration/object.hpp>
#include <ftl/calibration/extrinsic.hpp>
#include <ftl/calibration/structures.hpp>
#include <opencv2/core/types.hpp>
namespace ftl
{
namespace gui2
{
/** OpenCV calibration flags */
class OpenCVCalibrateFlags {
public:
bool has(unsigned int flag) const { return (flags_ & flag) != 0; }
void set(unsigned int flag) { flags_ |= flag; }
void unset(unsigned int flag) { flags_ &= ~flag; }
void reset() { flags_ = 0; }
std::string name(int) const;
operator int() { return flags_; }
virtual int defaultFlags() const;
virtual std::vector<int> list() const;
virtual std::string explain(int) const;
private:
int flags_ = 0;
};
class OpenCVCalibrateFlagsStereo : public OpenCVCalibrateFlags {
public:
int defaultFlags() const override;
std::vector<int> list() const override;
std::string explain(int) const override;
};
/**
* Calibration. Loads Intrinsic and Extrinsic calibration modules and
* adds buttons to main screen.
*/
class Calibration : public Module {
public:
using Module::Module;
virtual ~Calibration();
virtual void init() override;
};
/**
* Calibration base module. Implements methods to loading/saving calibration.
* Also manages enabling/disabling calibration.
*/
class CalibrationModule : public Module {
public:
using Module::Module;
virtual void init() = 0;
protected:
/** Set new calibration. */
void setCalibration(ftl::data::Frame& frame, ftl::calibration::CalibrationData data);
/** Activate/deactivate calibration (rectification, distortion corrections,
* ...). See also StereoVideo */
/** set mode, update performed by checkFrame() when next called */
void setCalibrationMode(bool value);
/** set mode directly to frame */
void setCalibrationMode(ftl::data::Frame& frame, bool value);
/** Check everything is in expected state. If returns true, processing can
* continue. Use this in frameset callback. Also sets calibration mode if
* it doesn't match with stored state. Should always be called in FrameSet
* callback.
*/
bool checkFrame(ftl::data::Frame& frame);
cv::cuda::GpuMat getGpuMat(ftl::rgbd::Frame&, ftl::codecs::Channel);
cv::Mat getMat(ftl::rgbd::Frame&, ftl::codecs::Channel);
private:
bool calibrationEnabled(ftl::data::Frame& frame);
std::atomic_bool wait_update_ = false;
std::atomic_bool calibration_enabled_ = false;
ftl::Handle update_handle_;
};
/**
* GUI for camera intrinsic calibration. Only sources which have CalibrationData
* channel can be calibrated (StereoVideo receives updates and saves them).
*
* TODO: Catch exceptions in future and report back to GUI. At the moment
* errors are printed with logging.
* TODO: View: add button to get back to chessboard/capture parameters.
* TODO: Saving calibration should give more feedback, saved just tells it was
* sent but it does not verify it was received (or know if it was
* successfully saved; if saving fails do not write file/changes; how
* to inform GUI/client about the error?)
*
* TODO: FEATURE: Add timer to calibration window showing remaining time until
* next picture is captured.
* TODO: FEATURE: Add calibration image window for browsing calibration images
* and discarding bad images manually. Also should support visualization
* of calibration results; draw detected points and re-projected points
* using OpenGL (reproject points implemented in calibration:: using
* with OpenCV).
* TODO: FEATURE: Visualize lens distortion. Plot regular grid and apply
* distortion model.
*/
class IntrinsicCalibration : public CalibrationModule {
public:
using CalibrationModule::CalibrationModule;
virtual void init() override;
virtual ~IntrinsicCalibration();
/** start calibration process, replaces active view */
void start(ftl::data::FrameID id);
bool hasChannel(ftl::codecs::Channel c);
/** select channel */
void setChannel(ftl::codecs::Channel c);
ftl::codecs::Channel channel() { return state_->channel; }
int count() { return state_->count; }
int calibrated() { return state_->calibrated; }
OpenCVCalibrateFlags& flags() { return state_->flags; };
int defaultFlags();
/** Reset calibration instance, discards drops all state. */
void reset();
void setChessboard(cv::Size, double);
cv::Size chessboardSize();
double squareSize();
/** Returns if capture/calibration is still processing in background.
* calib() instance must not be modifed while isBusy() is true.
*/
bool isBusy();
/** Start/stop capture. After stopping, use isBusy() to check when last
* frame is finished.
*/
void setCapture(bool v) { state_->capture = v; }
bool capturing() { return state_->capture; }
/** get/set capture frequency: interval between processed frames in
* chessboard detection
*/
void setFrequency(float v) { state_->frequency = v; }
float frequency() { return state_->frequency; }
int maxIter() { return state_->max_iter; }
void setMaxIter(int v) { state_->max_iter = v; }
/** Run calibration in another thread. Check status with isBusy(). */
void run();
/** Save calibration */
void saveCalibration();
ftl::calibration::CalibrationData::Intrinsic calibration();
float reprojectionError() { return state_->reprojection_error; }
/** Get sensor size from config/previous calibration (in mm) */
cv::Size2d sensorSize();
void setSensorSize(cv::Size2d size);
/** Set/get focal length in mm */
double focalLength();
void setFocalLength(double value, cv::Size2d sensor_size);
/** Set principal point at image center */
void resetPrincipalPoint();
void resetDistortion();
/** get current frame */
cv::cuda::GpuMat getFrame();
bool hasFrame();
cv::cuda::GpuMat getFrameUndistort();
/** get previous points (visualization) */
std::vector<cv::Point2f> previousPoints();
// must not be running_
//std::vector<cv::Point2f> getPoints(int n);
//std::vector<cv::Point2f> getProjectedPoints(int n);
/** List sources which can be calibrated.
*/
std::vector<std::pair<std::string, ftl::data::FrameID>> listSources(bool all=false);
private:
bool onFrame_(const ftl::data::FrameSetPtr& fs);
/** Set actual channel (channel_alt_) to high res if found in fs */
void setChannel_(ftl::data::FrameSetPtr fs);
std::future<void> future_;
std::mutex mtx_;
ftl::data::FrameSetPtr fs_current_;
ftl::data::FrameSetPtr fs_update_;
struct State {
cv::Mat gray;
ftl::codecs::Channel channel;
ftl::codecs::Channel channel_alt;
ftl::data::FrameID id;
std::atomic_bool capture = false;
std::atomic_bool running = false;
float last = 0.0f;
float frequency = 0.5f;
bool calibrated = false;
int count = 0;
int max_iter = 50;
float reprojection_error = NAN;
std::vector<std::vector<cv::Point2f>> points;
std::vector<std::vector<cv::Point3f>> points_object;
std::unique_ptr<ftl::calibration::ChessboardObject> object;
OpenCVCalibrateFlags flags;
ftl::calibration::CalibrationData::Intrinsic calib;
};
std::unique_ptr<State> state_;
};
////////////////////////////////////////////////////////////////////////////////
/**
* GUI for camera extrinsic calibration. Sources must be in same FrameSet
* (synchronization) and have CalibrationData channel. Provided extrinsic
* parameters can be used to calculate paramters for stereo rectification.
*/
class ExtrinsicCalibration : public CalibrationModule {
public:
using CalibrationModule::CalibrationModule;
virtual void init() override;
virtual ~ExtrinsicCalibration();
/** List framesets and calibrateable sources */
std::vector<std::pair<std::string, unsigned int>> listFrameSets();
std::vector<std::pair<std::string, ftl::data::FrameID>> listSources(unsigned int fsid, bool all);
/** start calibration process for given frames. Assumes stereo,
* calibration: left and right channels are used. */
void start(unsigned int fsid, std::vector<ftl::data::FrameID> sources);
/** discard current state and load defaults */
void reset();
int cameraCount();
std::string cameraName(int camera);
std::vector<std::string> cameraNames();
ftl::calibration::ExtrinsicCalibration& calib() { return state_.calib; } // should avoid
/** hasFrame(int) must be true before calling getFrame() **/
bool hasFrame(int camera);
const cv::cuda::GpuMat getFrame(int camera);
const cv::cuda::GpuMat getFrameRectified(int camera);
/** Next FrameSet, returns true if new FrameSet is available */
bool next();
bool capturing();
void setCapture(bool value);
/** Set callback for point detection. Callback returns number of points
* found, takes input frame, channel and output points as arguments.
*/
//void setCallback(const std::function<int(cv::InputArray, const cv::Mat&, const cv::Mat&, std::vector<cv::Point2f>&)>& cb) { cb_detect_ = cb; }
struct CameraID : ftl::data::FrameID {
CameraID(unsigned int fs, unsigned int s, ftl::codecs::Channel channel) :
ftl::data::FrameID::FrameID(fs, s), channel(channel) {}
const ftl::codecs::Channel channel;
};
/** list selected (active) cameras */
std::vector<CameraID> cameras();
/** Run calibration in another thread. Check status with isBusy(). */
void run();
/** Returns if capture/calibration is still processing in background.
* calib() instance must not be modifed while isBusy() is true.
*/
bool isBusy();
/** status message */
std::string status() { return state_.calib.status(); }
/** Get previous points (for visualization) */
const std::vector<cv::Point2d>& previousPoints(int camera);
/** Get number of frames captured by a camera */
int getFrameCount(int c);
void updateCalibration(int c);
void updateCalibration();
void saveInput(const std::string& filename);
void loadInput(const std::string& filename);
ftl::calibration::CalibrationData::Calibration calibration(int camera);
double reprojectionError(int camera=-1);
enum Flags {
ZERO_DISTORTION = 1,
RATIONAL_MODEL = 2,
FIX_INTRINSIC = 4,
FIX_FOCAL = 8,
FIX_PRINCIPAL_POINT = 16,
FIX_DISTORTION = 32,
LOSS_CAUCHY = 64,
NONMONOTONIC_STEP = 128,
};
void setFlags(int flags);
int flags() const;
protected:
ftl::calibration::CalibrationData::Calibration getCalibration(CameraID id);
/** Calculate stereo rectification maps for two cameras; state_.maps[1,2]
* must already be initialized at correct size */
void stereoRectify(int cl, int cr,
const ftl::calibration::CalibrationData::Calibration& l,
const ftl::calibration::CalibrationData::Calibration& r);
private:
// map frameid+channel to int. used by ExtrinsicCalibration
bool onFrameSet_(const ftl::data::FrameSetPtr& fs);
std::future<void> future_;
std::atomic_bool running_;
ftl::data::FrameSetPtr fs_current_;
ftl::data::FrameSetPtr fs_update_;
struct State {
bool capture = false;
int min_cameras = 2;
int flags = 0;
std::vector<CameraID> cameras;
std::unique_ptr<ftl::calibration::CalibrationObject> calib_object;
ftl::calibration::ExtrinsicCalibration calib;
std::vector<std::vector<cv::Point2d>> points_prev;
std::vector<cv::cuda::GpuMat> maps1;
std::vector<cv::cuda::GpuMat> maps2;
};
State state_;
};
////////////////////////////////////////////////////////////////////////////////
/** Stereo calibration for OpenCV's calibrateStereo() */
class StereoCalibration : public CalibrationModule {
public:
using CalibrationModule::CalibrationModule;
virtual void init() override;
virtual ~StereoCalibration();
/** start calibration process, replaces active view */
void start(ftl::data::FrameID id);
bool hasChannel(ftl::codecs::Channel c);
void setChessboard(cv::Size, double);
cv::Size chessboardSize();
double squareSize();
/** Reset calibration instance, discards drops all state. */
void reset();
OpenCVCalibrateFlagsStereo& flags() { return state_->flags; };
void resetFlags();
/** Returns if capture/calibration is still processing in background.
* calib() instance must not be modifed while isBusy() is true.
*/
bool isBusy();
/** Start/stop capture. After stopping, use isBusy() to check when last
* frame is finished.
*/
void setCapture(bool v);
bool capturing();
/** get/set capture frequency: interval between processed frames in
* chessboard detection
*/
void setFrequency(float v);
float frequency();
/** Run calibration in another thread. Check status with isBusy(). */
void run();
/** Save calibration */
void saveCalibration();
/** check if calibration valid: baseline > 0 */
bool calibrated();
/** get current frame */
cv::cuda::GpuMat getLeft();
cv::cuda::GpuMat getRight();
cv::cuda::GpuMat getLeftRectify();
cv::cuda::GpuMat getRightRectify();
bool hasFrame();
ftl::calibration::CalibrationData::Calibration calibrationLeft();
ftl::calibration::CalibrationData::Calibration calibrationRight();
double baseline();
/** get previous points (visualization) */
std::vector<std::vector<cv::Point2f>> previousPoints();
cv::cuda::GpuMat getLeftPrevious();
cv::cuda::GpuMat getRightPrevious();
int count() const { return state_->count; }
/** List sources which can be calibrated.
*/
std::vector<std::pair<std::string, ftl::data::FrameID>> listSources(bool all=false);
private:
bool onFrame_(const ftl::data::FrameSetPtr& fs);
void calculateRectification();
ftl::rgbd::Frame& frame_();
ftl::codecs::Channel channelLeft_();
ftl::codecs::Channel channelRight_();
std::future<void> future_;
std::mutex mtx_;
ftl::data::FrameSetPtr fs_current_;
ftl::data::FrameSetPtr fs_update_;
struct State {
cv::Mat gray_left;
cv::Mat gray_right;
ftl::calibration::CalibrationData calib;
std::unique_ptr<ftl::calibration::ChessboardObject> object;
ftl::data::FrameID id;
bool highres = false;
cv::Size imsize;
std::atomic_bool capture = false;
std::atomic_bool running = false;
float last = 0.0f;
float frequency = 0.5f;
int count = 0;
float reprojection_error = NAN;
OpenCVCalibrateFlagsStereo flags;
// maps for rectification (cv)
std::pair<cv::Mat, cv::Mat> map_l;
std::pair<cv::Mat, cv::Mat> map_r;
cv::Rect validROI_l;
cv::Rect validROI_r;
ftl::data::FrameSetPtr fs_previous_points;
std::vector<std::vector<cv::Point2f>> points_l;
std::vector<std::vector<cv::Point2f>> points_r;
std::vector<std::vector<cv::Point3f>> points_object;
};
std::unique_ptr<State> state_;
};
}
}
#include "calibration.hpp"
#include "../../screen.hpp"
#include "../../widgets/popupbutton.hpp"
#include "../../views/calibration/extrinsicview.hpp"
#include <opencv2/calib3d.hpp>
#include <opencv2/aruco.hpp>
#include <opencv2/cudawarping.hpp>
#include <ftl/calibration/optimize.hpp>
#include <ftl/calibration/structures.hpp>
#include <ftl/threads.hpp>
#include <nanogui/entypo.h>
using ftl::gui2::Calibration;
using ftl::calibration::CalibrationData;
using ftl::codecs::Channel;
using ftl::data::FrameID;
using ftl::data::FrameSetPtr;
using ftl::gui2::ExtrinsicCalibration;
using ftl::calibration::CalibrationObject;
using ftl::calibration::ArUCoObject;
using ftl::calibration::transform::inverse;
using ftl::calibration::transform::getRotationAndTranslation;
void ExtrinsicCalibration::init() {
reset();
}
void ExtrinsicCalibration::reset() {
if(future_.valid()) { future_.wait(); }
state_ = ExtrinsicCalibration::State();
running_ = false;
fs_current_.reset();
fs_update_.reset();
state_.calib_object = std::unique_ptr<CalibrationObject>(new ArUCoObject(cv::aruco::DICT_6X6_100));
state_.calib.points().setObject(state_.calib_object->object());
state_.min_cameras = 2;
}
ExtrinsicCalibration::~ExtrinsicCalibration() {
if(future_.valid()) {
future_.wait();
}
}
void ExtrinsicCalibration::start(unsigned int fsid, std::vector<FrameID> sources) {
setCalibrationMode(false);
reset();
state_.cameras.reserve(sources.size()*2);
state_.maps1.resize(sources.size()*2);
state_.maps2.resize(sources.size()*2);
auto* filter = io->feed()->filter
(std::unordered_set<uint32_t>{fsid}, {Channel::Left, Channel::Right});
filter->on([this](const FrameSetPtr& fs){ return onFrameSet_(fs);});
while(fs_current_ == nullptr) {
auto fss = filter->getLatestFrameSets();
if (fss.size() == 1) { fs_current_ = fss.front(); }
}
for (auto id : sources) {
// stereo calibration
auto cl = CameraID(id.frameset(), id.source(), Channel::Left);
auto cr = CameraID(id.frameset(), id.source(), Channel::Right);
state_.cameras.push_back(cl);
state_.cameras.push_back(cr);
const auto& frame = (*fs_current_)[id.source()].cast<ftl::rgbd::Frame>();
// NOTE: assumes left size is the same as right size!
auto sz = frame.getSize();
auto calibl = getCalibration(cl);
calibl.intrinsic = CalibrationData::Intrinsic(calibl.intrinsic, sz);
auto calibr = getCalibration(cr);
calibr.intrinsic = CalibrationData::Intrinsic(calibr.intrinsic, sz);
state_.calib.addStereoCamera(calibl, calibr);
// Update rectification
unsigned int idx = state_.cameras.size() - 2;
stereoRectify(idx, idx + 1, calibl, calibr);
}
// initialize last points structure; can't be resized while running (without
// mutex)
unsigned int npoints = state_.calib_object->object().size();
state_.points_prev.resize(state_.cameras.size());
for (unsigned int i = 0; i < state_.cameras.size(); i++) {
state_.points_prev[i] = std::vector<cv::Point2d>(npoints);
}
auto* view = new ftl::gui2::ExtrinsicCalibrationView(screen, this);
view->onClose([this, filter]() {
filter->remove();
state_.capture = false;
if (future_.valid()) {
future_.wait();
}
if (fs_current_ == nullptr) { return; }
// change mode only once per frame (cameras contain same frame twice)
std::unordered_set<uint32_t> fids;
for (const auto camera : state_.cameras) {
fids.insert(camera.source());
}
for (const auto i : fids) {
setCalibrationMode((*fs_current_)[i], true);
}
});
state_.capture = true;
screen->setView(view);
}
std::string ExtrinsicCalibration::cameraName(int c) {
const auto& camera = state_.cameras[c];
return (*fs_current_)[camera.id].name() + " - " +
((camera.channel == Channel::Left) ? "Left" : "Right");
}
std::vector<std::string> ExtrinsicCalibration::cameraNames() {
std::vector<std::string> names;
names.reserve(cameraCount());
for (int i = 0; i < cameraCount(); i++) {
names.push_back(cameraName(i));
}
return names;
}
CalibrationData::Calibration ExtrinsicCalibration::calibration(int c) {
return state_.calib.calibrationOptimized(c);
}
bool ExtrinsicCalibration::onFrameSet_(const FrameSetPtr& fs) {
std::atomic_store(&fs_update_, fs);
screen->redraw();
bool all_good = true;
for (const auto& c : state_.cameras) {
all_good &= checkFrame((*fs)[c.source()]);
}
//if (!all_good) { return true; }
if (!state_.capture) { return true; }
if (running_.exchange(true)) { return true; }
future_ = ftl::pool.push([this, fs = fs](int thread_id) {
cv::Mat K;
cv::Mat distCoeffs;
std::vector<cv::Point2d> points;
int count = 0;
for (unsigned int i = 0; i < state_.cameras.size(); i++) {
const auto& id = state_.cameras[i];
const auto& calib = state_.calib.calibration(i).intrinsic;
if (!(*fs)[id.source()].hasChannel(id.channel)) { continue; }
points.clear();
const cv::cuda::GpuMat& im = (*fs)[id.source()].get<cv::cuda::GpuMat>(id.channel);
K = calib.matrix();
distCoeffs = calib.distCoeffs.Mat();
try {
int n = state_.calib_object->detect(im, points, K, distCoeffs);
if (n > 0) {
state_.calib.points().addPoints(i, points);
state_.points_prev[i] = points;
count++;
}
}
catch (std::exception& ex) {
LOG(ERROR) << ex.what();
}
}
if (count < state_.min_cameras) {
state_.calib.points().clear();
}
else {
state_.calib.points().next();
}
running_ = false;
});
return true;
}
bool ExtrinsicCalibration::hasFrame(int camera) {
const auto id = state_.cameras[camera];
return (std::atomic_load(&fs_current_).get() != nullptr) &&
((*fs_current_)[id.source()].hasChannel(id.channel));
}
const cv::cuda::GpuMat ExtrinsicCalibration::getFrame(int camera) {
const auto id = state_.cameras[camera];
return (*fs_current_)[id.source()].cast<ftl::rgbd::Frame>().get<cv::cuda::GpuMat>(id.channel);
}
const cv::cuda::GpuMat ExtrinsicCalibration::getFrameRectified(int c) {
if (running_ || state_.maps1.size() <= (unsigned int)(c)) {
return getFrame(c);
}
cv::cuda::GpuMat remapped;
cv::cuda::remap(getFrame(c), remapped, state_.maps1[c], state_.maps2[c], cv::INTER_LINEAR);
return remapped;
}
int ExtrinsicCalibration::cameraCount() {
return state_.cameras.size();
}
bool ExtrinsicCalibration::next() {
if (std::atomic_load(&fs_update_).get()) {
std::atomic_store(&fs_current_, fs_update_);
std::atomic_store(&fs_update_, {});
return true;
}
return false;
}
bool ExtrinsicCalibration::capturing() {
return state_.capture;
}
void ExtrinsicCalibration::setCapture(bool v) {
state_.capture = v;
}
std::vector<std::pair<std::string, unsigned int>> ExtrinsicCalibration::listFrameSets() {
auto framesets = io->feed()->listFrameSets();
std::vector<std::pair<std::string, unsigned int>> result;
result.reserve(framesets.size());
for (auto fsid : framesets) {
auto uri = io->feed()->getURI(fsid);
result.push_back({uri, fsid});
}
return result;
}
std::vector<std::pair<std::string, ftl::data::FrameID>> ExtrinsicCalibration::listSources(unsigned int fsid, bool all) {
std::vector<std::pair<std::string, FrameID>> cameras;
auto fs = io->feed()->getFrameSet(fsid);
for (auto id : io->feed()->listFrames()) {
if (id.frameset() != fsid) { continue; }
if (all || io->feed()->availableChannels(id).count(Channel::CalibrationData)) {
std::string name = (*fs)[id.source()].name();
cameras.push_back({name, id});
}
}
return cameras;
}
std::vector<ExtrinsicCalibration::CameraID> ExtrinsicCalibration::cameras() {
std::vector<ExtrinsicCalibration::CameraID> res;
res.reserve(cameraCount());
for (const auto& camera : state_.cameras) {
res.push_back(camera);
}
return res;
}
bool ExtrinsicCalibration::isBusy() {
return running_;
}
void ExtrinsicCalibration::updateCalibration() {
auto fs = std::atomic_load(&fs_current_);
std::map<ftl::data::FrameID, ftl::calibration::CalibrationData> update;
for (unsigned int i = 0; i < state_.cameras.size(); i++) {
auto& c = state_.cameras[i];
auto frame_id = ftl::data::FrameID(c);
if (update.count(frame_id) == 0) {
auto& frame = fs->frames[c];
update[frame_id] = frame.get<CalibrationData>(Channel::CalibrationData);
}
update[frame_id].origin = cv::Mat::eye(4, 4, CV_64FC1);
update[frame_id].get(c.channel) = state_.calib.calibrationOptimized(i);
}
for (auto& [fid, calib] : update) {
auto& frame = fs->frames[fid];
setCalibration(frame, calib);
}
}
void ExtrinsicCalibration::updateCalibration(int c) {
throw ftl::exception("Not implemented");
}
void ExtrinsicCalibration::stereoRectify(int cl, int cr,
const CalibrationData::Calibration& l, const CalibrationData::Calibration& r) {
CHECK_NE(l.extrinsic.tvec, r.extrinsic.tvec);
CHECK_EQ(l.intrinsic.resolution, r.intrinsic.resolution);
CHECK_LT(cr, state_.maps1.size());
CHECK_LT(cr, state_.maps2.size());
auto size = l.intrinsic.resolution;
cv::Mat T = r.extrinsic.matrix() * inverse(l.extrinsic.matrix());
cv::Mat R, t, R1, R2, P1, P2, Q, map1, map2;
getRotationAndTranslation(T, R, t);
cv::stereoRectify(
l.intrinsic.matrix(), l.intrinsic.distCoeffs.Mat(),
r.intrinsic.matrix(), r.intrinsic.distCoeffs.Mat(), size,
R, t, R1, R2, P1, P2, Q, cv::CALIB_ZERO_DISPARITY, 1.0);
// sanity check: rectification should give same rotation for both cameras
// cameras (with certain accuracy). R1 and R2 contain 3x3 rotation matrices
// from unrectified to rectified coordinates.
cv::Vec3d rvec1;
cv::Vec3d rvec2;
cv::Rodrigues(R1 * l.extrinsic.matrix()(cv::Rect(0, 0, 3, 3)), rvec1);
cv::Rodrigues(R2 * r.extrinsic.matrix()(cv::Rect(0, 0, 3, 3)), rvec2);
CHECK_LT(cv::norm(rvec1, rvec2), 0.01);
cv::initUndistortRectifyMap(l.intrinsic.matrix(), l.intrinsic.distCoeffs.Mat(),
R1, P1, size, CV_32FC1, map1, map2);
state_.maps1[cl].upload(map1);
state_.maps2[cl].upload(map2);
cv::initUndistortRectifyMap(r.intrinsic.matrix(), r.intrinsic.distCoeffs.Mat(),
R2, P2, size, CV_32FC1, map1, map2);
state_.maps1[cr].upload(map1);
state_.maps2[cr].upload(map2);
}
void ExtrinsicCalibration::run() {
if (running_.exchange(true)) { return; }
future_ = ftl::pool.push([this](int id) {
try {
auto opt = state_.calib.options();
opt.optimize_intrinsic = !(state_.flags & Flags::FIX_INTRINSIC);
opt.rational_model = state_.flags & Flags::RATIONAL_MODEL;
opt.fix_focal = state_.flags & Flags::FIX_FOCAL;
opt.fix_distortion = state_.flags & Flags::FIX_DISTORTION;
opt.zero_distortion = state_.flags & Flags::ZERO_DISTORTION;
opt.fix_principal_point = state_.flags & Flags::FIX_PRINCIPAL_POINT;
opt.loss = (state_.flags & Flags::LOSS_CAUCHY) ?
ftl::calibration::BundleAdjustment::Options::Loss::CAUCHY :
ftl::calibration::BundleAdjustment::Options::Loss::SQUARED;
opt.use_nonmonotonic_steps = state_.flags & Flags::NONMONOTONIC_STEP;
state_.calib.setOptions(opt);
state_.calib.run();
// Rectification maps for visualization; stereo cameras assumed
// if non-stereo cameras added visualization/grouping (by index)
// has to be different.
state_.maps1.resize(cameraCount());
state_.maps2.resize(cameraCount());
for (int c = 0; c < cameraCount(); c += 2) {
auto l = state_.calib.calibrationOptimized(c);
auto r = state_.calib.calibrationOptimized(c + 1);
stereoRectify(c, c + 1, l, r);
/*LOG(INFO) << c << ": rvec " << l.extrinsic.rvec
<< "; tvec " << l.extrinsic.tvec;
LOG(INFO) << c + 1 << ": rvec " << r.extrinsic.rvec
<< "; tvec " << r.extrinsic.tvec;*/
LOG(INFO) << "baseline (" << c << ", " << c + 1 << "): "
<< cv::norm(l.extrinsic.tvec - r.extrinsic.tvec);
}
}
catch (ftl::exception &ex) {
LOG(ERROR) << ex.what() << "\n" << ex.trace();
}
catch (std::exception &ex) {
LOG(ERROR) << ex.what();
}
running_ = false;
});
}
double ExtrinsicCalibration::reprojectionError(int camera) {
if (camera <= cameraCount()) {
return NAN;
}
if (camera < 0) {
return state_.calib.reprojectionError();
}
else {
return state_.calib.reprojectionError(camera);
}
}
ftl::calibration::CalibrationData::Calibration ExtrinsicCalibration::getCalibration(CameraID id) {
if (fs_current_ == nullptr) {
throw ftl::exception("No frame");
}
auto calib = (*fs_current_)[id.source()].get<CalibrationData>(Channel::CalibrationData);
if (!calib.hasCalibration(id.channel)) {
throw ftl::exception("Calibration missing for requierd channel");
}
return calib.get(id.channel);
}
const std::vector<cv::Point2d>& ExtrinsicCalibration::previousPoints(int camera) {
// not really thread safe (but points_prev_ should not resize)
return state_.points_prev[camera];
}
int ExtrinsicCalibration::getFrameCount(int camera) {
return state_.calib.points().getCount(unsigned(camera));
}
void ExtrinsicCalibration::setFlags(int flags) {
state_.flags = flags;
}
int ExtrinsicCalibration::flags() const {
return state_.flags;
}
// debug method: save state to file (msgpack)
void ExtrinsicCalibration::saveInput(const std::string& filename) {
ftl::pool.push([this, filename](int){
do {
// calib must not be modified; would be better to have mutex here
state_.capture = false;
}
while(running_);
running_ = true;
try { state_.calib.toFile(filename);}
catch (std::exception& ex) { LOG(ERROR) << "Calib save failed " << ex.what(); }
running_ = false;
});
}
// debug method: load state from file (msgpack)
void ExtrinsicCalibration::loadInput(const std::string& filename) { ftl::pool.push([this, filename](int){
do {
// calib must not be modified; would be better to have mutex here
state_.capture = false;
}
while(running_);
running_ = true;
try { state_.calib.fromFile(filename); }
catch (std::exception& ex) { LOG(ERROR) << "Calib load failed: " << ex.what(); }
running_ = false;
});
}
#include <loguru.hpp>
#include "calibration.hpp"
#include "../../screen.hpp"
#include "../../widgets/popupbutton.hpp"
#include "../../views/calibration/intrinsicview.hpp"
#include <opencv2/imgproc.hpp>
#include <opencv2/calib3d.hpp>
#include <ftl/calibration/structures.hpp>
#include <ftl/threads.hpp>
#include <nanogui/entypo.h>
using ftl::gui2::Calibration;
using ftl::gui2::IntrinsicCalibration;
using ftl::calibration::ChessboardObject;
using ftl::calibration::CalibrationData;
using ftl::codecs::Channel;
using ftl::data::FrameID;
using ftl::data::FrameSetPtr;
void IntrinsicCalibration::init() {
reset();
}
IntrinsicCalibration::~IntrinsicCalibration() {
if(future_.valid()) {
future_.wait();
}
}
cv::Size IntrinsicCalibration::chessboardSize() {
return state_->object->chessboardSize();
}
double IntrinsicCalibration::squareSize() {
return state_->object->squareSize();
}
void IntrinsicCalibration::setChessboard(cv::Size size, double square) {
state_->object = std::make_unique<ChessboardObject>(size.height, size.width, square);
}
void IntrinsicCalibration::reset() {
state_ = std::make_unique<State>();
state_->object = std::make_unique<ChessboardObject>();
state_->channel = Channel::Left;
state_->channel_alt = Channel::Left;
state_->flags.set(defaultFlags());
}
void IntrinsicCalibration::start(ftl::data::FrameID id) {
reset();
setCalibrationMode(false);
state_->id = id;
auto* filter = io->feed()->filter
(std::unordered_set<uint32_t>{id.frameset()},
{Channel::Left, Channel::Right});
filter->on([this](const FrameSetPtr& fs){ return onFrame_(fs); });
while(fs_current_ == nullptr) {
auto fss = filter->getLatestFrameSets();
if (fss.size() == 1) { fs_current_ = fss.front(); }
}
auto fs = std::atomic_load(&fs_current_);
setChannel_(fs);
auto* view = new ftl::gui2::IntrinsicCalibrationView(screen, this);
view->onClose([filter, this](){
// if calib_ caches images, also reset() here!
filter->remove();
if (fs_current_) {
setCalibrationMode(fs_current_->frames[state_->id.source()], true);
}
reset();
fs_current_.reset();
fs_update_.reset();
});
screen->setView(view);
}
void IntrinsicCalibration::setChannel(Channel channel) {
state_->channel = channel;
auto fs = std::atomic_load(&fs_current_);
setChannel_(fs);
}
void IntrinsicCalibration::setChannel_(FrameSetPtr fs) {
// reset points, find if high res available and find correct resolution
// TODO/FIXME: channel might be missing from previous frameset; temporary
// fix uses left channel to set resulution (assumes left resolution always
// the same as right resolution).
state_->calib = CalibrationData::Intrinsic();
state_->points.clear();
state_->points_object.clear();
state_->count = 0;
state_->channel_alt = state_->channel;
if (fs == nullptr) {
LOG(ERROR) << "No frame, calibration not loaded";
}
auto& frame = (*fs)[state_->id.source()].cast<ftl::rgbd::Frame>();
cv::Size size;
if (state_->channel== Channel::Left) {
size = frame.get<cv::Mat>(state_->channel_alt).size();
}
else if (state_->channel== Channel::Right) {
size = frame.get<cv::Mat>(Channel::Left).size();
}
try {
auto calib = frame.get<CalibrationData>(Channel::CalibrationData);
if (calib.hasCalibration(state_->channel)) {
auto intrinsic = calib.get(state_->channel).intrinsic;
state_->calib = CalibrationData::Intrinsic(intrinsic, size);
state_->calibrated = true;
}
else {
state_->calib.resolution = size;
}
}
catch (std::exception& ex) {
LOG(ERROR) << "Could not read calibration: " << ex.what()
<< "; is this a valid source?";
}
}
bool IntrinsicCalibration::onFrame_(const ftl::data::FrameSetPtr& fs) {
std::atomic_store(&fs_update_, fs);
screen->redraw();
auto& frame = fs->frames[state_->id.source()];
if (!checkFrame(frame)) { return true; }
if (!state_->capture) { return true; }
if ((float(glfwGetTime()) - state_->last) < state_->frequency) { return true; }
if (state_->running.exchange(true)) { return true; }
future_ = ftl::pool.push( [fs, this]
(int thread_id) {
try {
auto& frame = (*fs)[state_->id.source()].cast<ftl::rgbd::Frame>();
auto im = getMat(frame, state_->channel_alt);
cv::cvtColor(im, state_->gray, cv::COLOR_BGRA2GRAY);
std::vector<cv::Point2d> points;
int npoints = state_->object->detect(state_->gray, points);
if (npoints > 0) {
std::unique_lock<std::mutex> lk(mtx_);
auto& new_points = state_->points.emplace_back();
for (auto p : points) {
new_points.push_back(p);
}
auto& new_points_obj = state_->points_object.emplace_back();
for (auto p : state_->object->object()) {
new_points_obj.push_back(p);
}
state_->count++;
}
else {
LOG(INFO) << "Calibration pattern was not detected";
}
}
catch (std::exception &e) {
LOG(ERROR) << "exception in chesboard detection: " << e.what();
state_->running = false;
throw;
}
state_->running = false;
state_->last = float(glfwGetTime());
});
return true;
}
void IntrinsicCalibration::saveCalibration() {
auto& frame = fs_current_->frames[state_->id.source()];
CalibrationData calib_data = CalibrationData(frame.get<CalibrationData>(Channel::CalibrationData));
auto& calibration = calib_data.get(state_->channel);
calibration.intrinsic = state_->calib;
setCalibration(frame, calib_data);
}
int IntrinsicCalibration::defaultFlags() {
int flags = state_->flags.defaultFlags();
// load config flags
for (int i : state_->flags.list()) {
auto flag = get<bool>(state_->flags.name(i));
if (flag) {
if (*flag) flags |= i;
else flags &= (~i);
}
}
return flags;
}
bool IntrinsicCalibration::isBusy() {
return state_->capture || state_->running;
}
void IntrinsicCalibration::run() {
state_->running = true;
future_ = ftl::pool.push([this](int id) {
try {
for (auto f : state_->flags.list()) {
if (state_->flags.has(f)) {
LOG(INFO) << state_->flags.name(f);
}
}
cv::Size2d ssize = sensorSize();
cv::Mat K;
cv::Mat distCoeffs;
cv::Size size = state_->calib.resolution;
if (state_->flags.has(cv::CALIB_USE_INTRINSIC_GUESS)) {
// OpenCV seems to use these anyways?
K = state_->calib.matrix(size);
state_->calib.distCoeffs.Mat(12).copyTo(distCoeffs);
}
std::vector<cv::Mat> rvecs, tvecs;
auto term = cv::TermCriteria
(cv::TermCriteria::COUNT|cv::TermCriteria::EPS, state_->max_iter, 1.0e-6);
state_->reprojection_error = cv::calibrateCamera(
state_->points_object, state_->points,
size, K, distCoeffs, rvecs, tvecs,
state_->flags, term);
state_->calib = CalibrationData::Intrinsic(K, distCoeffs, size);
state_->calib.sensorSize = ssize;
state_->calibrated = true;
}
catch (std::exception &e) {
LOG(ERROR) << "exception in calibration: " << e.what();
state_->running = false;
throw;
}
state_->running = false;
});
}
bool IntrinsicCalibration::hasFrame() {
return (std::atomic_load(&fs_update_).get() != nullptr)
&& fs_update_->frames[state_->id.source()].hasChannel(state_->channel_alt);
};
cv::cuda::GpuMat IntrinsicCalibration::getFrame() {
if (std::atomic_load(&fs_update_)) {
fs_current_ = fs_update_;
std::atomic_store(&fs_update_, {});
}
if (!fs_current_) {
return cv::cuda::GpuMat();
}
return getGpuMat((*fs_current_)[state_->id.source()].cast<ftl::rgbd::Frame>(),
state_->channel_alt);
}
cv::cuda::GpuMat IntrinsicCalibration::getFrameUndistort() {
if (!calibrated()) {
return getFrame();
}
if (std::atomic_load(&fs_update_)) {
fs_current_ = fs_update_;
std::atomic_store(&fs_update_, {});
}
if (!fs_current_) {
return cv::cuda::GpuMat();
}
auto im = getMat((*fs_current_)[state_->id.source()].cast<ftl::rgbd::Frame>(),
state_->channel_alt);
// NOTE: would be faster to use remap() and computing the maps just once if
// performance is relevant here
cv::Mat im_undistort;
cv::cuda::GpuMat gpu;
cv::undistort(im, im_undistort, state_->calib.matrix(), state_->calib.distCoeffs.Mat(12));
gpu.upload(im_undistort);
return gpu;
}
cv::Size2d IntrinsicCalibration::sensorSize() {
if (state_->calib.sensorSize == cv::Size2d{0.0, 0.0}) {
double w = value("sensor_width", 0.0);
double h = value("sensor_height", 0.0);
return {w, h};
}
else {
return state_->calib.sensorSize;
}
};
void IntrinsicCalibration::setSensorSize(cv::Size2d sz) {
state_->calib.sensorSize = sz;
}
double IntrinsicCalibration::focalLength() {
return (state_->calib.fx)*(sensorSize().width/state_->calib.resolution.width);
}
void IntrinsicCalibration::setFocalLength(double value, cv::Size2d sensor_size) {
setSensorSize(sensor_size);
double f = value*(state_->calib.resolution.width/sensor_size.width);
state_->calib.fx = f;
state_->calib.fy = f;
}
void IntrinsicCalibration::resetPrincipalPoint() {
auto sz = state_->calib.resolution;
state_->calib.cx = double(sz.width)/2.0;
state_->calib.cy = double(sz.height)/2.0;
}
void IntrinsicCalibration::resetDistortion() {
state_->calib.distCoeffs = CalibrationData::Intrinsic::DistortionCoefficients();
}
bool IntrinsicCalibration::hasChannel(Channel c) {
if (fs_current_) {
return (*fs_current_)[state_->id.source()].hasChannel(c);
}
return false;
}
std::vector<std::pair<std::string, FrameID>> IntrinsicCalibration::listSources(bool all) {
std::vector<std::pair<std::string, FrameID>> cameras;
for (auto id : io->feed()->listFrames()) {
auto channels = io->feed()->availableChannels(id);
if (all || (channels.count(Channel::CalibrationData) == 1)) {
auto name = (*(io->feed()->getFrameSet(id.frameset())))[id.source()].name();
//auto name = io->feed()->getURI(id.frameset()) + "#" + std::to_string(id.source());
cameras.push_back({name, id});
}
}
return cameras;
}
std::vector<cv::Point2f> IntrinsicCalibration::previousPoints() {
std::unique_lock<std::mutex> lk(mtx_, std::defer_lock);
if (lk.try_lock()) {
if (state_->points.size() == 0) { return {}; }
return std::vector<cv::Point2f>(state_->points.back());
}
return {};
}
ftl::calibration::CalibrationData::Intrinsic IntrinsicCalibration::calibration() {
return state_->calib;
}
#include <loguru.hpp>
#include "calibration.hpp"
#include "../../screen.hpp"
#include "../../widgets/popupbutton.hpp"
#include "../../views/calibration/stereoview.hpp"
#include <opencv2/imgproc.hpp>
#include <opencv2/calib3d.hpp>
#include <ftl/calibration/parameters.hpp>
#include <ftl/calibration/structures.hpp>
#include <ftl/threads.hpp>
#include <nanogui/entypo.h>
using ftl::gui2::Calibration;
using ftl::gui2::StereoCalibration;
using ftl::calibration::ChessboardObject;
using ftl::calibration::CalibrationData;
using ftl::codecs::Channel;
using ftl::data::FrameID;
using ftl::data::FrameSetPtr;
////////////////////////////////////////////////////////////////////////////////
void StereoCalibration::setCapture(bool v) {
state_->capture = v;
}
bool StereoCalibration::capturing() {
return state_->capture;
}
void StereoCalibration::setFrequency(float v) {
state_->frequency = v;
}
float StereoCalibration::frequency() {
return state_->frequency;
}
void StereoCalibration::init() {
state_ = std::make_unique<State>();
state_->object = std::unique_ptr<ChessboardObject>(new ChessboardObject());
}
StereoCalibration::~StereoCalibration() {
if (state_) {
state_->running = false;
}
if(future_.valid()) {
future_.wait();
}
fs_current_.reset();
fs_update_.reset();
}
void StereoCalibration::reset() {
while(state_->running) { state_->capture = false; }
state_ = std::make_unique<State>();
state_->object = std::unique_ptr<ChessboardObject>(new ChessboardObject());
resetFlags();
}
cv::Size StereoCalibration::chessboardSize() {
return state_->object->chessboardSize();
}
double StereoCalibration::squareSize() {
return state_->object->squareSize();
}
void StereoCalibration::setChessboard(cv::Size size, double square) {
state_->object = std::make_unique<ChessboardObject>(size.height, size.width, square);
}
void StereoCalibration::start(ftl::data::FrameID id) {
reset();
setCalibrationMode(false);
state_->id = id;
auto* view = new ftl::gui2::StereoCalibrationView(screen, this);
auto* filter = io->feed()->filter
(std::unordered_set<uint32_t>{id.frameset()},
{Channel::Left, Channel::Right});
filter->on([this](const FrameSetPtr& fs){ return onFrame_(fs); });
view->onClose([filter, this](){
// if state_->calib caches images, also reset() here!
filter->remove();
if (fs_current_) {
setCalibrationMode(fs_current_->frames[state_->id.source()], true);
}
reset();
fs_current_.reset();
fs_update_.reset();
});
screen->setView(view);
for (auto fs : filter->getLatestFrameSets()) {
if (!(fs->frameset() == state_->id.frameset()) ||
!(fs->hasFrame(state_->id.source()))) { continue; }
// read calibration channel and set channel_alt_ to high res if available
try {
auto& frame = (*fs)[state_->id.source()];
state_->calib = frame.get<CalibrationData>(Channel::CalibrationData);
state_->highres = false; // TODO: Remove
auto sizel = frame.get<cv::cuda::GpuMat>(channelLeft_()).size();
auto sizer = frame.get<cv::cuda::GpuMat>(channelLeft_()).size();
if (sizel != sizer) {
LOG(ERROR) << "Frames have different resolutions";
// TODO: do not proceed
}
state_->imsize = sizel;
}
catch (std::exception& ex) {
LOG(ERROR) << "Could not read calibration: " << ex.what()
<< "; is this a valid source?";
}
break;
}
}
bool StereoCalibration::onFrame_(const ftl::data::FrameSetPtr& fs) {
std::atomic_store(&fs_update_, fs);
screen->redraw();
auto& frame = fs->frames[state_->id.source()];
if (!checkFrame(frame)) { return true; }
if (!frame.hasAll({channelLeft_(), channelRight_()})) { return true; }
if (!state_->capture) { return true; }
if ((float(glfwGetTime()) - state_->last) < state_->frequency) { return true; }
if (state_->running.exchange(true)) { return true; }
future_ = ftl::pool.push([this, fs] (int thread_id) {
try {
auto& frame = (*fs)[state_->id.source()].cast<ftl::rgbd::Frame>();
auto l = getMat(frame, channelLeft_());
auto r = getMat(frame, channelRight_());
cv::cvtColor(l, state_->gray_left, cv::COLOR_BGRA2GRAY);
cv::cvtColor(r, state_->gray_right, cv::COLOR_BGRA2GRAY);
std::vector<cv::Point2d> pointsl;
std::vector<cv::Point2d> pointsr;
if ((state_->object->detect(state_->gray_left, pointsl) == 1) &&
(state_->object->detect(state_->gray_right, pointsr) == 1)) {
std::unique_lock<std::mutex> lk(mtx_);
auto& new_points_l = state_->points_l.emplace_back();
new_points_l.reserve(pointsl.size());
auto& new_points_r = state_->points_r.emplace_back();
new_points_r.reserve(pointsl.size());
auto& new_points_obj = state_->points_object.emplace_back();
new_points_obj.reserve(pointsl.size());
for (auto p : pointsl) { new_points_l.push_back(p); }
for (auto p : pointsr) { new_points_r.push_back(p); }
for (auto p : state_->object->object()) { new_points_obj.push_back(p); }
state_->count++;
}
}
catch (std::exception &e) {
LOG(ERROR) << "exception in chesboard detection: " << e.what();
running = false;
throw;
}
state_->running = false;
state_->last = float(glfwGetTime());
});
return true;
}
void StereoCalibration::saveCalibration() {
auto fs = std::atomic_load(&(fs_current_));
setCalibration((*fs)[state_->id.source()], state_->calib);
}
void StereoCalibration::resetFlags() {
// reset flags and get class defaults
state_->flags.reset();
state_->flags.set(state_->flags.defaultFlags());
// load config flags
for (int i : state_->flags.list()) {
auto flag = get<bool>(state_->flags.name(i));
if (flag) {
if (*flag) state_->flags.set(i);
else state_->flags.unset(i);
}
}
}
bool StereoCalibration::isBusy() {
return state_->capture || state_->running;
}
void StereoCalibration::run() {
if (state_->running) { return; }
state_->running = true;
future_ = ftl::pool.push([this](int) {
try {
auto& calib_l = state_->calib.get(Channel::Left);
auto& calib_r = state_->calib.get(Channel::Right);
auto K1 = calib_l.intrinsic.matrix();
auto distCoeffs1 = calib_l.intrinsic.distCoeffs.Mat();
auto K2 = calib_l.intrinsic.matrix();
auto distCoeffs2 = calib_r.intrinsic.distCoeffs.Mat();
cv::Mat R, T, E, F;
state_->reprojection_error = cv::stereoCalibrate(
state_->points_object, state_->points_l,
state_->points_r, K1, distCoeffs1, K2, distCoeffs2,
state_->imsize, R, T, E, F, state_->flags);
state_->calib.get(Channel::Left).intrinsic =
CalibrationData::Intrinsic(K1, distCoeffs1, state_->imsize);
state_->calib.get(Channel::Right).intrinsic =
CalibrationData::Intrinsic(K2, distCoeffs2, state_->imsize);
state_->calib.get(Channel::Left).extrinsic = CalibrationData::Extrinsic();
state_->calib.get(Channel::Right).extrinsic = CalibrationData::Extrinsic(R, T);
}
catch (std::exception &e) {
LOG(ERROR) << "exception in calibration: " << e.what();
state_->running = false;
throw;
}
state_->running = false;
});
}
ftl::rgbd::Frame& StereoCalibration::frame_() {
if (std::atomic_load(&fs_update_)) {
fs_current_ = fs_update_;
std::atomic_store(&fs_update_, {});
}
return (*fs_current_)[state_->id.source()].cast<ftl::rgbd::Frame>();
}
bool StereoCalibration::hasFrame() {
auto cleft = Channel::Left;
auto cright = Channel::Right;
return (std::atomic_load(&fs_update_).get() != nullptr)
&& fs_update_->frames[state_->id.source()].hasAll({cleft, cright});
};
Channel StereoCalibration::channelLeft_() {
return Channel::Left;
}
Channel StereoCalibration::channelRight_() {
return Channel::Right;
}
cv::cuda::GpuMat StereoCalibration::getLeft() {
return getGpuMat(frame_() ,channelLeft_());
}
cv::cuda::GpuMat StereoCalibration::getRight() {
return getGpuMat(frame_() ,channelRight_());
}
bool StereoCalibration::hasChannel(Channel c) {
if (fs_current_) {
return (*fs_current_)[state_->id.source()].hasChannel(c);
}
return false;
}
std::vector<std::pair<std::string, FrameID>> StereoCalibration::listSources(bool all) {
std::vector<std::pair<std::string, FrameID>> cameras;
for (auto id : io->feed()->listFrames()) {
auto channels = io->feed()->availableChannels(id);
// TODO: doesn't work
if (all || (channels.count(Channel::CalibrationData) == 1)) {
auto name = io->feed()->getURI(id.frameset()) + "#" + std::to_string(id.source());
cameras.push_back({name, id});
}
}
return cameras;
}
std::vector<std::vector<cv::Point2f>> StereoCalibration::previousPoints() {
std::unique_lock<std::mutex> lk(mtx_, std::defer_lock);
if (lk.try_lock()) {
if (state_->points_l.size() > 0) {
return { state_->points_l.back(),
state_->points_r.back()
};
}
}
return {};
}
ftl::calibration::CalibrationData::Calibration StereoCalibration::calibrationLeft() {
return state_->calib.get(Channel::Left);
}
ftl::calibration::CalibrationData::Calibration StereoCalibration::calibrationRight() {
return state_->calib.get(Channel::Right);
}
bool StereoCalibration::calibrated() {
return (cv::norm(calibrationLeft().extrinsic.tvec,
calibrationRight().extrinsic.tvec) > 0);
}
void StereoCalibration::calculateRectification() {
using ftl::calibration::transform::inverse;
auto left = calibrationLeft();
auto right = calibrationRight();
auto size = left.intrinsic.resolution;
cv::Mat T = inverse(left.extrinsic.matrix()) * right.extrinsic.matrix();
cv::Mat Rl, Rr, Pl, Pr, Q;
cv::stereoRectify(left.intrinsic.matrix(), left.intrinsic.distCoeffs.Mat(),
right.intrinsic.matrix(), right.intrinsic.distCoeffs.Mat(),
size, T(cv::Rect(0, 0, 3, 3)), T(cv::Rect(3, 0, 1, 3)),
Rl, Rr, Pl, Pr, Q, 0, 1.0, {0, 0},
&(state_->validROI_l), &(state_->validROI_r));
cv::initUndistortRectifyMap(left.intrinsic.matrix(), left.intrinsic.distCoeffs.Mat(),
Rl, Pl, size, CV_16SC1,
state_->map_l.first, state_->map_l.second);
cv::initUndistortRectifyMap(right.intrinsic.matrix(), right.intrinsic.distCoeffs.Mat(),
Rr, Pr, size, CV_16SC1,
state_->map_r.first, state_->map_r.second);
}
cv::cuda::GpuMat StereoCalibration::getLeftRectify() {
if (state_->map_l.first.empty()) { calculateRectification(); }
cv::Mat tmp;
cv::cuda::GpuMat res;
cv::remap(getMat(frame_(), channelLeft_()), tmp,
state_->map_l.first, state_->map_l.second,
cv::INTER_LINEAR);
res.upload(tmp);
return res;
}
cv::cuda::GpuMat StereoCalibration::getRightRectify() {
if (state_->map_r.first.empty()) { calculateRectification(); }
cv::Mat tmp;
cv::cuda::GpuMat res;
cv::remap(getMat(frame_(), channelRight_()), tmp,
state_->map_r.first, state_->map_r.second,
cv::INTER_LINEAR);
res.upload(tmp);
return res;
}
#include "camera.hpp"
#include "statistics.hpp"
#include "../views/camera3d.hpp"
#include <ftl/rgbd/capabilities.hpp>
#include <ftl/streams/renderer.hpp>
#include <chrono>
#include <ftl/utility/matrix_conversion.hpp>
#include <ftl/calibration/structures.hpp>
#include <ftl/calibration/parameters.hpp>
#include <ftl/codecs/shapes.hpp>
#include <ftl/operators/poser.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/cudaarithm.hpp>
#include <opencv2/core/eigen.hpp>
#include <loguru.hpp>
using ftl::gui2::Camera;
using ftl::codecs::Channel;
using ftl::rgbd::Capability;
using namespace std::literals::chrono_literals;
using ftl::data::Message;
void Camera::init() {
colouriser_ = std::unique_ptr<ftl::render::Colouriser>(
ftl::create<ftl::render::Colouriser>(this, "colouriser"));
overlay_ = std::unique_ptr<ftl::overlay::Overlay>
(ftl::create<ftl::overlay::Overlay>(this, "overlay"));
}
void Camera::update(double delta) {
if (nframes_ < 0) { return; }
if (nframes_ > update_fps_freq_) {
float n = nframes_;
float l = latency_ / n;
nframes_ = 0;
latency_ = 0;
auto t = glfwGetTime();
float diff = t - last_;
last_ = t;
auto *mod = screen->getModule<ftl::gui2::Statistics>();
mod->getJSON(StatisticsPanel::PERFORMANCE_INFO)["FPS"] = n/diff;
mod->getJSON(StatisticsPanel::PERFORMANCE_INFO)["Latency"] = std::to_string(int(l))+std::string("ms");
if (live_) mod->getJSON(StatisticsPanel::MEDIA_STATUS)["LIVE"] = nlohmann::json{{"icon", ENTYPO_ICON_VIDEO_CAMERA},{"value", true},{"colour","#0000FF"},{"size",28}};
auto ptr = std::atomic_load(&latest_);
if (ptr) {
const auto &frame = ptr->frames[frame_idx];
if (frame.has(Channel::MetaData)) {
const auto &meta = frame.metadata();
if (meta.size() > 0) {
auto &jmeta = mod->getJSON(StatisticsPanel::MEDIA_META);
//if (meta.count("name")) {
// jmeta["name"] = nlohmann::json{{"nokey", true},{"value",meta.find("name")->second},{"size",20}};
//}
if (meta.count("device")) {
jmeta["Device"] = nlohmann::json{{"nokey", true},{"value",meta.find("device")->second}};
}
if (meta.count("serial")) {
jmeta["Serial"] = nlohmann::json{{"value",meta.find("serial")->second}};
}
/*for (const auto &m : meta) {
jmeta[m.first] = m.second;
}*/
}
}
const auto &rgbdf = frame.cast<ftl::rgbd::Frame>();
if (frame.has(Channel::Calibration)) {
const auto &cam = rgbdf.getLeft();
cv::Size s = rgbdf.getSize();
auto &jcam = mod->getJSON(StatisticsPanel::CAMERA_DETAILS);
jcam["D-Resolution"] = std::to_string(cam.width) + std::string("x") + std::to_string(cam.height);
jcam["C-Resolution"] = std::to_string(s.width) + std::string("x") + std::to_string(s.height);
jcam["Focal"] = cam.fx;
jcam["Baseline"] = cam.baseline;
jcam["Principle"] = std::to_string(int(cam.cx)) + std::string(",") + std::to_string(int(cam.cy));
}
if (frame.has(Channel::Capabilities)) {
const auto &caps = rgbdf.capabilities();
auto &jmeta = mod->getJSON(StatisticsPanel::MEDIA_META);
if (caps.count(Capability::TOUCH)) jmeta["Touch"] = nlohmann::json{{"icon", ENTYPO_ICON_MOUSE_POINTER},{"value", true}};
else jmeta.erase("Touch");
if (caps.count(Capability::MOVABLE)) jmeta["Movable"] = nlohmann::json{{"icon", ENTYPO_ICON_DIRECTION},{"value", true}};
else jmeta.erase("Movable");
if (caps.count(Capability::VR)) jmeta["VR"] = nlohmann::json{{"value", true}};
else jmeta.erase("VR");
if (caps.count(Capability::EQUI_RECT)) jmeta["360"] = nlohmann::json{{"icon", ENTYPO_ICON_COMPASS},{"value", true}};
else jmeta.erase("360");
}
std::map<ftl::data::Message,std::string> messages;
{
UNIQUE_LOCK(mtx_, lk);
std::swap(messages, messages_);
}
auto &jmsgs = mod->getJSON(StatisticsPanel::LOGGING);
jmsgs.clear();
if (messages.size() > 0) {
for (const auto &m : messages) {
auto &data = jmsgs.emplace_back();
data["value"] = m.second;
data["nokey"] = true;
if (int(m.first) < 1024) {
data["icon"] = ENTYPO_ICON_WARNING;
data["colour"] = "#0000ff";
} else if (int(m.first) < 2046) {
data["icon"] = ENTYPO_ICON_WARNING;
data["colour"] = "#00a6f0";
} else {
}
}
}
}
}
}
void Camera::_updateCapabilities(ftl::data::Frame &frame) {
if (frame.has(Channel::Capabilities)) {
live_ = false;
touch_ = false;
movable_ = false;
vr_ = false;
const auto &cap = frame.get<std::unordered_set<Capability>>(Channel::Capabilities);
for (auto c : cap) {
switch (c) {
case Capability::LIVE : live_ = true; break;
case Capability::TOUCH : touch_ = true; break;
case Capability::MOVABLE : movable_ = true; break;
case Capability::VR : vr_ = true; break;
default: break;
}
}
}
}
void Camera::initiate_(ftl::data::Frame &frame) {
if (frame.has(Channel::Capabilities)) {
const auto &rgbdf = frame.cast<ftl::rgbd::Frame>();
const auto &cap = rgbdf.capabilities();
for (auto c : cap) {
LOG(INFO) << " -- " << ftl::rgbd::capabilityName(c);
switch (c) {
case Capability::LIVE : live_ = true; break;
case Capability::TOUCH : touch_ = true; break;
case Capability::MOVABLE : movable_ = true; break;
case Capability::VR : vr_ = true; break;
default: break;
}
}
if (live_ && cap.count(Capability::VIRTUAL)) {
view = new ftl::gui2::CameraView3D(screen, this);
} else {
view = new ftl::gui2::CameraView(screen, this);
}
} else {
view = new ftl::gui2::CameraView(screen, this);
}
if (frame.has(Channel::MetaData)) {
const auto &meta = frame.metadata();
LOG(INFO) << "Camera Frame Meta Data:";
for (auto m : meta) {
LOG(INFO) << " -- " << m.first << " = " << m.second;
}
}
if (!view) return;
view->onClose([this](){
filter_->remove();
filter_ = nullptr;
nframes_ = -1;
auto *mod = this->screen->getModule<ftl::gui2::Statistics>();
mod->getJSON(StatisticsPanel::PERFORMANCE_INFO).clear();
mod->getJSON(StatisticsPanel::MEDIA_STATUS).clear();
mod->getJSON(StatisticsPanel::MEDIA_META).clear();
mod->getJSON(StatisticsPanel::CAMERA_DETAILS).clear();
});
setChannel(channel_);
screen->setView(view);
view->refresh();
}
float Camera::volume() {
return io->speaker()->volume();
}
void Camera::setVolume(float v) {
return io->speaker()->setVolume(v);
}
void Camera::setPaused(bool set) {
paused_ = set;
io->feed()->muxer()->set("paused", set);
}
std::unordered_set<Channel> Camera::availableChannels() {
if (std::atomic_load(&latest_)) {
return latest_->frames[frame_idx].available();
}
return {};
}
std::unordered_set<Channel> Camera::allAvailableChannels() {
if (std::atomic_load(&latest_)) {
auto set = latest_->frames[frame_idx].available();
for (auto i : latest_->frames[frame_idx].allChannels()) {
set.emplace(i);
}
return set;
}
return {};
}
void Camera::activate(ftl::data::FrameID id) {
frame_idx = id.source();
frame_id_ = id;
last_ = glfwGetTime();
nframes_ = 0;
// Clear the members to defaults
has_seen_frame_ = false;
point_.id = -1;
live_ = false;
touch_ = false;
movable_ = false;
vr_ = false;
cursor_pos_.setZero();
cursor_normal_.setZero();
cursor_normal_[2] = 1.0f;
//std::mutex m;
//std::condition_variable cv;
io->speaker()->reset();
io->feed()->mixer().reset();
filter_ = io->feed()->filter(std::unordered_set<unsigned int>{id.frameset()}, {Channel::Left});
filter_->on(
[this, feed = io->feed(), speaker = io->speaker()](ftl::data::FrameSetPtr fs){
std::atomic_store(&current_fs_, fs);
std::atomic_store(&latest_, fs);
// Deal with audio
//if (fs->frames[frame_idx].hasOwn(Channel::AudioStereo)) {
// speaker->queue(fs->timestamp(), fs->frames[frame_idx]);
//}
if (feed->mixer().frames() > 0) {
ftl::audio::Audio aframe;
feed->mixer().read(aframe.data(), feed->mixer().frames());
speaker->queue(fs->timestamp(), aframe);
}
// Need to notify GUI thread when first data comes
if (!has_seen_frame_) {
//std::unique_lock<std::mutex> lk(m);
has_seen_frame_ = true;
//cv.notify_one();
}
// Extract and record any frame messages
auto &frame = fs->frames[frame_idx];
if (frame.hasMessages()) {
const auto &msgs = frame.messages();
//auto &jmsgs = mod->getJSON(StatisticsPanel::LOGGING);
UNIQUE_LOCK(mtx_, lk);
messages_.insert(msgs.begin(), msgs.end());
}
// Some capabilities can change over time
if (frame.changed(Channel::Capabilities)) {
_updateCapabilities(frame);
}
if (!view) return true;
if (live_ && touch_) {
if (point_.id >= 0) {
auto response = fs->frames[frame_idx].response();
auto &data = response.create<std::vector<ftl::codecs::Touch>>(Channel::Touch);
data.resize(1);
UNIQUE_LOCK(mtx_, lk);
data[0] = point_;
//point_.strength = 0;
}
}
screen->redraw();
nframes_++;
latency_ += ftl::timer::get_time() - fs->localTimestamp;
return true;
}
);
auto sets = filter_->getLatestFrameSets();
if (sets.size() > 0) {
std::atomic_store(&current_fs_, sets.front());
std::atomic_store(&latest_, sets.front());
initiate_(sets.front()->frames[frame_idx]);
} else {
throw FTL_Error("Cannot activate camera, no data");
}
// For first data, extract useful things and create view
// Must be done in GUI thread, hence use of cv.
//std::unique_lock<std::mutex> lk(m);
//cv.wait_for(lk, 1s, [this](){ return has_seen_frame_; });
//initiate_(std::atomic_load(&current_fs_)->frames[frame_idx]);
}
void Camera::setChannel(Channel c) {
channel_ = c;
filter_->select({Channel::Colour, c});
}
std::string Camera::getActiveSourceURI() {
auto ptr = std::atomic_load(&latest_);
if (ptr) {
auto &frame = ptr->frames[frame_idx];
if (frame.has(ftl::codecs::Channel::MetaData)) {
const auto &meta = frame.metadata();
auto i = meta.find("id");
if (i != meta.end()) {
return i->second;
}
}
}
return "";
}
void Camera::toggleOverlay() {
overlay_->set("enabled", !overlay_->value<bool>("enabled", false));
}
ftl::audio::StereoMixerF<100> *Camera::mixer() {
return &io->feed()->mixer();
}
bool Camera::isRecording() {
return io->feed()->isRecording();
}
void Camera::stopRecording() {
io->feed()->stopRecording();
filter_->select({channel_});
}
void Camera::startRecording(const std::string &filename, const std::unordered_set<ftl::codecs::Channel> &channels) {
filter_->select(channels);
io->feed()->startRecording(filter_, filename);
}
void Camera::startStreaming(const std::unordered_set<ftl::codecs::Channel> &channels) {
filter_->select(channels);
io->feed()->startStreaming(filter_);
}
void Camera::snapshot(const std::string &filename) {
auto ptr = std::atomic_load(&latest_);
if (ptr) {
auto &frame = ptr->frames[frame_idx];
if (frame.hasChannel(channel_)) {
const auto &snap = frame.get<cv::Mat>(channel_);
cv::Mat output;
cv::cvtColor(snap, output, cv::COLOR_BGRA2BGR);
cv::imwrite(filename, output);
}
}
}
ftl::cuda::TextureObject<uchar4>& Camera::getFrame() {
return getFrame(channel_);
}
ftl::cuda::TextureObject<uchar4>& Camera::getFrame(ftl::codecs::Channel channel) {
if (std::atomic_load(&current_fs_)) {
auto& frame = current_fs_->frames[frame_idx].cast<ftl::rgbd::Frame>();
if (frame.hasChannel(Channel::Left)) current_frame_colour_ = frame.getTexture<uchar4>(Channel::Left);
if (frame.hasChannel(channel)) {
current_frame_ = colouriser_->colourise(frame, channel, 0);
} else {
throw FTL_Error("Channel missing for frame " << frame.timestamp() << ": '" << ftl::codecs::name(channel) << "'");
}
std::atomic_store(&current_fs_, {});
}
if (channel == Channel::Left) { return current_frame_colour_; }
else { return current_frame_; }
}
bool Camera::getFrame(ftl::cuda::TextureObject<uchar4>& frame, ftl::codecs::Channel channel) {
if (std::atomic_load(&current_fs_).get() != nullptr) {
frame = getFrame();
return true;
}
return false;
}
bool Camera::getFrame(ftl::cuda::TextureObject<uchar4>& frame) {
return getFrame(frame, channel_);
}
bool Camera::hasFrame() {
auto ptr = std::atomic_load(&current_fs_);
if (ptr && ptr->frames.size() > (unsigned int)(frame_idx)) {
return ptr->frames[frame_idx].hasChannel(channel_);
}
return false;
}
const Eigen::Matrix4d &Camera::cursor() const {
return cursor_;
}
Eigen::Matrix4d Camera::_cursor() const {
if (cursor_normal_.norm() > 0.0f) return nanogui::lookAt(cursor_pos_, cursor_target_, cursor_normal_).cast<double>();
Eigen::Matrix4d ident;
ident.setIdentity();
return ident;
}
void Camera::drawOverlay(NVGcontext *ctx, const nanogui::Vector2f &s, const nanogui::Vector2f &is, const Eigen::Vector2f &offset) {
auto ptr = std::atomic_load(&latest_);
// TODO: Need all the source framesets here or all data dumped in by renderer
overlay_->draw(ctx, *ptr, ptr->frames[frame_idx].cast<ftl::rgbd::Frame>(), s, is, offset, cursor()); // , view->size().cast<float>()
}
void Camera::sendPose(const Eigen::Matrix4d &pose) {
if (live_ && movable_ && !vr_) {
if (auto ptr = std::atomic_load(&latest_)) {
auto response = ptr->frames[frame_idx].response();
auto &rgbdresponse = response.cast<ftl::rgbd::Frame>();
rgbdresponse.setPose() = pose;
}
}
}
void Camera::touch(int id, ftl::codecs::TouchType t, int x, int y, float d, int strength) {
if (value("enable_touch", false)) {
UNIQUE_LOCK(mtx_, lk);
point_.id = id;
point_.type = t;
point_.x = x;
point_.y = y;
point_.d = d;
point_.strength = strength; //std::max((unsigned char)strength, point_.strength);
}
// TODO: Check for touch capability first
/*if (auto ptr = std::atomic_load(&latest_)) {
auto response = ptr->frames[frame_idx].response();
auto &data = response.create<std::vector<ftl::codecs::Touch>>(Channel::Touch);
data.resize(0);
auto &pt = data.emplace_back();
pt.id = id;
pt.type = t;
pt.x = x;
pt.y = y;
pt.d = d;
pt.strength = strength;
}*/
}
float Camera::depthAt(int x, int y) {
//if (value("show_depth", true)) {
auto ptr = std::atomic_load(&latest_);
if (ptr) {
const auto &frame = ptr->frames[frame_idx].cast<ftl::rgbd::Frame>();
if (frame.hasChannel(Channel::Depth)) {
const auto &depth = frame.get<cv::Mat>(Channel::Depth);
if (x >= 0 && y >= 0 && x < depth.cols && y < depth.rows) {
return depth.at<float>(y, x);
}
}
}
//}
return 0.0f;
}
static float3 getWorldPoint(const cv::Mat &depth, int x, int y, const ftl::rgbd::Camera &intrins, const Eigen::Matrix4f &pose) {
if (x >= 0 && y >= 0 && x < depth.cols && y < depth.rows) {
float d = depth.at<float>(y, x);
if (d > intrins.minDepth && d < intrins.maxDepth) {
float3 cam = intrins.screenToCam(x, y, d);
float3 world = MatrixConversion::toCUDA(pose) * cam;
return world;
}
}
return make_float3(0.0f,0.0f,0.0f);
}
Eigen::Vector3f Camera::worldAt(int x, int y) {
auto ptr = std::atomic_load(&latest_);
Eigen::Vector3f res;
res.setZero();
if (ptr) {
const auto &frame = ptr->frames[frame_idx].cast<ftl::rgbd::Frame>();
if (frame.hasChannel(Channel::Depth)) {
const auto &depth = frame.get<cv::Mat>(Channel::Depth);
const auto &intrins = frame.getLeft();
Eigen::Matrix4f posef = frame.getPose().cast<float>();
float3 CC = getWorldPoint(depth, x, y, intrins, posef);
res[0] = CC.x;
res[1] = CC.y;
res[2] = CC.z;
}
}
return res;
}
Eigen::Vector3f fitPlane(const std::vector<float3>& pts) {
// PCA: calculate covariance matrix and its eigenvectors. Eigenvector
// corresponding to smallest eigenvalue is the plane normal.
Eigen::Map<Eigen::Matrix<float, Eigen::Dynamic, 3, Eigen::RowMajor>>
mat((float*)(pts.data()), pts.size(), 3);
Eigen::MatrixXf centered = mat.rowwise() - mat.colwise().mean();
Eigen::MatrixXf cov = (centered.adjoint() * centered) / double(mat.rows() - 1);
Eigen::EigenSolver<Eigen::MatrixXf> es(cov);
Eigen::VectorXf::Index argmin;
es.eigenvalues().real().minCoeff(&argmin);
Eigen::Vector3cf n(es.eigenvectors().col(argmin)); // already normalized
return n.real();
}
void Camera::setCursor(int x, int y) {
auto ptr = std::atomic_load(&latest_);
cursor_pos_.setZero();
if (ptr) {
const auto &frame = ptr->frames[frame_idx].cast<ftl::rgbd::Frame>();
if (frame.hasChannel(Channel::Depth)) {
const auto &depth = frame.get<cv::Mat>(Channel::Depth);
const auto &intrins = frame.getLeft();
Eigen::Matrix4f posef = frame.getPose().cast<float>();
float3 CC = getWorldPoint(depth, x, y, intrins, posef);
cursor_pos_[0] = CC.x;
cursor_pos_[1] = CC.y;
cursor_pos_[2] = CC.z;
// get points around the selected point. candidates are selected in
// from square [-range, range] around (x, y) and points which are
// closer than max_distance are used. TODO: check bounds (depth map
// size)
const int range = 24; // 49x49 pixels square
const float max_distance = 0.075; // 15cm radius
const int min_points = 16;
std::vector<float3> pts;
pts.reserve((range*2 + 1)*(range*2 + 1));
for (int xi = -range; xi <= range; xi++) {
for (int yi = -range; yi <= range; yi++) {
auto p = getWorldPoint(depth, x + xi, y + yi, intrins, posef);
if (p.x == 0 && p.y == 0 && p.z == 0.0) {
continue;
}
const float3 d = p - CC;
if (sqrtf(d.x*d.x + d.y*d.y + d.z*d.z) < max_distance) {
pts.push_back(p);
}
}}
if (pts.size() < min_points) { return; }
cursor_normal_ = fitPlane(pts);
// don't flip y
if (cursor_normal_.y() < 0.0) { cursor_normal_ = -cursor_normal_; }
// some valid value as initial value
const float3 CP = getWorldPoint(depth, x+4, y, intrins, posef);
setCursorTarget({CP.x, CP.y, CP.z});
}
}
cursor_ = _cursor();
}
void Camera::setCursorTarget(const Eigen::Vector3f &p) {
cursor_target_ =
p - cursor_normal_.dot(p - cursor_pos_) * cursor_normal_;
cursor_ = _cursor();
}
void Camera::setOriginToCursor() {
using ftl::calibration::transform::inverse;
// Check for valid cursor
/*if (cursor_normal_.norm() == 0.0f) return;
float cursor_length = (cursor_target_ - cursor_pos_).norm();
float cursor_dist = cursor_pos_.norm();
if (cursor_length < 0.01f || cursor_length > 5.0f) return;
if (cursor_dist > 10.0f) return;*/
if (movable_) {
auto *rend = io->feed()->getRenderer(frame_id_);
if (rend) {
auto *filter = rend->filter();
if (filter) {
cv::Mat cur;
cv::eigen2cv(cursor(), cur);
auto fss = filter->getLatestFrameSets();
for (auto &fs : fss) {
if (fs->frameset() == frame_id_.frameset()) continue;
for (auto &f : fs->frames) {
auto response = f.response();
auto &rgbdf = response.cast<ftl::rgbd::Frame>();
auto &calib = rgbdf.setCalibration();
calib = f.cast<ftl::rgbd::Frame>().getCalibration();
// apply correction to existing one
cv::Mat new_origin = cur*calib.origin;
if (ftl::calibration::validate::pose(new_origin)) {
calib.origin = new_origin;
}
else {
// TODO: add error message to gui as well
LOG(ERROR) << "Bad origin update (invalid pose)";
}
}
};
}
}
}
cursor_target_ = Eigen::Vector3f(0.0f,0.0f,0.0f);
cursor_pos_ = Eigen::Vector3f(0.0f,0.0f,0.0f);
cursor_normal_ = Eigen::Vector3f(0.0f,0.0f,0.0f);
cursor_ = _cursor();
}
void Camera::resetOrigin() {
cursor_target_ = Eigen::Vector3f(0.0f,0.0f,0.0f);
cursor_pos_ = Eigen::Vector3f(0.0f,0.0f,0.0f);
cursor_normal_ = Eigen::Vector3f(0.0f,0.0f,0.0f);
cursor_ = _cursor();
if (movable_) {
auto *rend = io->feed()->getRenderer(frame_id_);
if (rend) {
auto *filter = rend->filter();
if (filter) {
cv::Mat cur;
cv::eigen2cv(cursor(), cur);
auto fss = filter->getLatestFrameSets();
for (auto &fs : fss) {
if (fs->frameset() == frame_id_.frameset()) continue;
for (auto &f : fs->frames) {
auto response = f.response();
auto &rgbdf = response.cast<ftl::rgbd::Frame>();
auto &calib = rgbdf.setCalibration();
calib = f.cast<ftl::rgbd::Frame>().getCalibration();
calib.origin = cur;
}
};
}
}
}
}
void Camera::saveCursorToPoser() {
ftl::codecs::Shape3D shape;
shape.type = ftl::codecs::Shape3DType::CURSOR;
shape.id = cursor_save_id_++;
shape.label = std::string("Cursor") + std::to_string(shape.id);
shape.pose = cursor().inverse().cast<float>();
shape.size = Eigen::Vector3f(0.1f,0.1f,0.1f);
ftl::operators::Poser::add(shape, frame_id_);
}
Eigen::Matrix4d Camera::getActivePose() {
return cursor(); //.inverse();
}
nanogui::Vector2i Camera::getActivePoseScreenCoord() {
Eigen::Matrix4d pose = getActivePose().inverse();
auto ptr = std::atomic_load(&latest_);
if (ptr) {
const auto &frame = ptr->frames[frame_idx].cast<ftl::rgbd::Frame>();
auto campose = frame.getPose().inverse() * pose;
float3 campos;
campos.x = campose(0,3);
campos.y = campose(1,3);
campos.z = campose(2,3);
int2 spos = frame.getLeft().camToScreen<int2>(campos);
return nanogui::Vector2i(spos.x, spos.y);
}
return nanogui::Vector2i(-1,-1);
}
void Camera::transformActivePose(const Eigen::Matrix4d &pose) {
cursor_ = pose * cursor_;
}
void Camera::setActivePose(const Eigen::Matrix4d &pose) {
cursor_ = pose; //.inverse();
}
#pragma once
#include "../module.hpp"
#include "../screen.hpp"
#include "../views/camera.hpp"
#include <ftl/render/colouriser.hpp>
#include <ftl/render/overlay.hpp>
#include <ftl/codecs/touch.hpp>
#include <ftl/audio/mixer.hpp>
namespace ftl {
namespace gui2 {
class Camera : public Module {
public:
using Module::Module;
virtual void init() override;
virtual void update(double delta) override;
virtual void activate(ftl::data::FrameID id);
void setChannel(ftl::codecs::Channel c);
void setPaused(bool set);
bool isPaused() const { return paused_; }
void toggleOverlay();
float volume();
void setVolume(float v);
/** Gets current active frame to display. Always 4 channel uchar4. Reference
* will stay valid until getFrame() is called again. Always returns a
* reference to internal buffer. */
ftl::cuda::TextureObject<uchar4>& getFrame();
ftl::cuda::TextureObject<uchar4>& getFrame(ftl::codecs::Channel channel);
bool getFrame(ftl::cuda::TextureObject<uchar4>&);
bool getFrame(ftl::cuda::TextureObject<uchar4>&, ftl::codecs::Channel channel);
std::unordered_set<ftl::codecs::Channel> availableChannels();
/** This includes data channels etc */
std::unordered_set<ftl::codecs::Channel> allAvailableChannels();
void touch(int id, ftl::codecs::TouchType t, int x, int y, float d, int strength);
/** Check if new frame is available */
bool hasFrame();
void sendPose(const Eigen::Matrix4d &pose);
inline bool isLive() const { return live_; }
inline bool isTouchable() const { return touch_; }
inline bool isMovable() const { return movable_; }
inline bool isVR() const { return vr_; }
ftl::render::Colouriser* colouriser() { return colouriser_.get(); };
ftl::overlay::Overlay* overlay() { return overlay_.get(); }
ftl::audio::StereoMixerF<100> *mixer();
void drawOverlay(NVGcontext *ctx, const nanogui::Vector2f &size, const nanogui::Vector2f &is, const Eigen::Vector2f &offset);
std::string getActiveSourceURI();
float depthAt(int x, int y);
Eigen::Vector3f worldAt(int x, int y);
bool isRecording();
void stopRecording();
void startRecording(const std::string &filename, const std::unordered_set<ftl::codecs::Channel> &channels);
void startStreaming(const std::unordered_set<ftl::codecs::Channel> &channels);
void snapshot(const std::string &filename);
const Eigen::Matrix4d &cursor() const;
void setCursorPosition(const Eigen::Vector3f &pos) { cursor_pos_ = pos; cursor_ = _cursor(); }
void setCursorNormal(const Eigen::Vector3f &norm) { cursor_normal_ = norm; cursor_ = _cursor(); }
void setCursorTarget(const Eigen::Vector3f &targ);
void setCursor(int x, int y);
const Eigen::Vector3f getCursorPosition() const { return cursor_pos_; }
void setOriginToCursor();
void resetOrigin();
void saveCursorToPoser();
Eigen::Matrix4d getActivePose();
nanogui::Vector2i getActivePoseScreenCoord();
void transformActivePose(const Eigen::Matrix4d &pose);
void setActivePose(const Eigen::Matrix4d &pose);
private:
int frame_idx = -1;
ftl::data::FrameID frame_id_;
ftl::codecs::Channel channel_ = ftl::codecs::Channel::Colour;
ftl::stream::Feed::Filter *filter_ = nullptr;
std::atomic_bool paused_ = false; // TODO: implement in InputOutput
bool has_seen_frame_ = false;
ftl::codecs::Touch point_;
bool live_=false;
bool touch_=false;
bool movable_=false;
bool vr_=false;
float last_=0.0f;
std::atomic_int16_t nframes_=0;
std::atomic_int64_t latency_=0;
int update_fps_freq_=30; // fps counter update frequency (frames)
Eigen::Vector3f cursor_pos_;
Eigen::Vector3f cursor_target_;
Eigen::Vector3f cursor_normal_;
int cursor_save_id_=0;
Eigen::Matrix4d cursor_;
ftl::data::FrameSetPtr current_fs_;
ftl::data::FrameSetPtr latest_;
ftl::cuda::TextureObject<uchar4> current_frame_;
ftl::cuda::TextureObject<uchar4> current_frame_colour_;
std::unique_ptr<ftl::render::Colouriser> colouriser_;
std::unique_ptr<ftl::overlay::Overlay> overlay_;
std::map<ftl::data::Message,std::string> messages_;
CameraView* view = nullptr;
ftl::audio::StereoMixerF<100> *mixer_ = nullptr;
MUTEX mtx_;
void initiate_(ftl::data::Frame &frame);
void _updateCapabilities(ftl::data::Frame &frame);
Eigen::Matrix4d _cursor() const;
};
}
}
#ifndef _FTL_GUI_CAMERA_TOOLS_HPP_
#define _FTL_GUI_CAMERA_TOOLS_HPP_
namespace ftl {
namespace gui2 {
enum class Tools {
NONE,
SELECT_POINT, // Touch 2D
MOVEMENT, // 3D first person camera controls
PAN, // 2D Panning
CENTRE_VIEW,
ZOOM_FIT,
ZOOM_IN,
ZOOM_OUT,
CLIPPING,
OVERLAY,
LAYOUT,
MOVE_CURSOR, // Move 3D Cursor
ROTATE_CURSOR,
ORIGIN_TO_CURSOR,
RESET_ORIGIN,
SAVE_CURSOR,
ROTATE_X,
ROTATE_Y,
ROTATE_Z,
TRANSLATE_X,
TRANSLATE_Y,
TRANSLATE_Z,
INSPECT_POINT
};
enum class ToolGroup {
MOUSE_MOTION,
VIEW_2D_ACTIONS,
VIEW_3D_LAYERS,
VIEW_3D_ACTIONS
};
}
}
#endif
\ No newline at end of file
#include "config.hpp"
using ftl::gui2::ConfigCtrl;
void ConfigCtrl::init() {
button = screen->addButton(ENTYPO_ICON_COG);
button->setTooltip("Settings");
button->setCallback([this](){
button->setPushed(false);
show();
});
button->setVisible(true);
}
void ConfigCtrl::show() {
if (screen->childIndex(window) == -1) {
window = new ftl::gui2::ConfigWindow(screen, io->master());
}
window->requestFocus();
window->setVisible(true);
screen->performLayout();
}
void ConfigCtrl::show(const std::string &uri) {
ftl::gui2::ConfigWindow::buildForm(screen, uri);
}
ConfigCtrl::~ConfigCtrl() {
}
#pragma once
#include "../module.hpp"
#include "../screen.hpp"
#include "../views/config.hpp"
namespace ftl {
namespace gui2 {
/**
* Controller for thumbnail view.
*/
class ConfigCtrl : public Module {
public:
using Module::Module;
virtual ~ConfigCtrl();
virtual void init() override;
virtual void show();
void show(const std::string &uri);
private:
nanogui::ToolButton *button;
ftl::gui2::ConfigWindow *window = nullptr;
std::list<nanogui::FormHelper *> forms_;
};
}
}