Reformated the project to make it more readable (to me)

This commit is contained in:
Arnaud Fauconnet 2023-07-17 11:45:28 +02:00
parent de6743207d
commit 4738ae7444
66 changed files with 6713 additions and 5880 deletions

View File

@ -1,18 +1,16 @@
#pragma once #pragma once
#include <esim/common/types.hpp>
#include <deque> #include <deque>
#include <esim/common/types.hpp>
#include <ze/common/time_conversions.hpp> #include <ze/common/time_conversions.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
class ImageBuffer class ImageBuffer {
{
public: public:
ZE_POINTER_TYPEDEFS(ImageBuffer); ZE_POINTER_TYPEDEFS(ImageBuffer);
struct ImageData struct ImageData {
{
ImageData(Image img, Time stamp, Duration exposure_time) ImageData(Image img, Time stamp, Duration exposure_time)
: image(img), : image(img),
stamp(stamp), stamp(stamp),
@ -20,29 +18,34 @@ public:
Image image; Image image;
Time stamp; Time stamp;
Duration exposure_time; // timestamp since last image (0 if this is the first image) Duration exposure_time; // timestamp since last image (0 if this is
// the first image)
}; };
using ExposureImage = std::pair<Duration, Image>; using ExposureImage = std::pair<Duration, Image>;
// Rolling image buffer of mazimum size 'buffer_size_ns'. // Rolling image buffer of mazimum size 'buffer_size_ns'.
ImageBuffer(Duration buffer_size_ns) ImageBuffer(Duration buffer_size_ns): buffer_size_ns_(buffer_size_ns) {}
: buffer_size_ns_(buffer_size_ns) {}
void addImage(Time t, const Image& img); void addImage(Time t, const Image& img);
std::deque<ImageData> getRawBuffer() const { return data_; } std::deque<ImageData> getRawBuffer() const {
return data_;
}
size_t size() const { return data_.size(); } size_t size() const {
return data_.size();
}
Duration getExposureTime() const { return buffer_size_ns_; } Duration getExposureTime() const {
return buffer_size_ns_;
}
private: private:
Duration buffer_size_ns_; Duration buffer_size_ns_;
std::deque<ImageData> data_; std::deque<ImageData> data_;
}; };
/* /*
* The CameraSimulator takes as input a sequence of stamped images, * The CameraSimulator takes as input a sequence of stamped images,
* assumed to be sampled at a "sufficiently high" framerate and with * assumed to be sampled at a "sufficiently high" framerate and with
@ -52,17 +55,16 @@ private:
* *
* @TODO: simulate a non-linear camera response curve, shot noise, etc. * @TODO: simulate a non-linear camera response curve, shot noise, etc.
*/ */
class CameraSimulator class CameraSimulator {
{
public: public:
CameraSimulator(double exposure_time_ms) CameraSimulator(double exposure_time_ms)
: exposure_time_(ze::secToNanosec(exposure_time_ms / 1000.0)) : exposure_time_(ze::secToNanosec(exposure_time_ms / 1000.0)) {
{
buffer_.reset(new ImageBuffer(exposure_time_)); buffer_.reset(new ImageBuffer(exposure_time_));
} }
bool imageCallback(const Image& img, Time time, bool imageCallback(
const ImagePtr &camera_image); const Image& img, Time time, const ImagePtr& camera_image
);
private: private:
ImageBuffer::Ptr buffer_; ImageBuffer::Ptr buffer_;

View File

@ -14,12 +14,9 @@ namespace event_camera_simulator {
* The pixel-wise voltages are reset with the values from the first image * The pixel-wise voltages are reset with the values from the first image
* which is passed to the simulator. * which is passed to the simulator.
*/ */
class EventSimulator class EventSimulator {
{
public: public:
struct Config {
struct Config
{
double Cp; double Cp;
double Cm; double Cm;
double sigma_Cp; double sigma_Cp;
@ -34,8 +31,7 @@ public:
EventSimulator(const Config& config) EventSimulator(const Config& config)
: config_(config), : config_(config),
is_initialized_(false), is_initialized_(false),
current_time_(0) current_time_(0) {}
{}
void init(const Image& img, Time time); void init(const Image& img, Time time);
Events imageCallback(const Image& img, Time time); Events imageCallback(const Image& img, Time time);

View File

@ -1,30 +1,29 @@
#pragma once #pragma once
#include <esim/esim/event_simulator.hpp>
#include <esim/esim/camera_simulator.hpp> #include <esim/esim/camera_simulator.hpp>
#include <esim/esim/event_simulator.hpp>
#include <esim/visualization/publisher_interface.hpp> #include <esim/visualization/publisher_interface.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
/* The Simulator forwards the simulated images / depth maps /* The Simulator forwards the simulated images / depth maps
* from the data provider to multiple, specialized camera simulators, such as: * from the data provider to multiple, specialized camera simulators, such
* (i) event camera simulators that simulate events based on sequences of images * as: (i) event camera simulators that simulate events based on sequences
* (ii) camera simulators that simulate real cameras * of images (ii) camera simulators that simulate real cameras (including
* (including motion blur, camera response function, noise, etc.) * motion blur, camera response function, noise, etc.)
* *
* The Simulator then forwards the simulated data to one or more publishers. * The Simulator then forwards the simulated data to one or more publishers.
*/ */
class Simulator class Simulator {
{
public: public:
Simulator(size_t num_cameras, Simulator(
size_t num_cameras,
const EventSimulator::Config& event_sim_config, const EventSimulator::Config& event_sim_config,
double exposure_time_ms) double exposure_time_ms
)
: num_cameras_(num_cameras), : num_cameras_(num_cameras),
exposure_time_(ze::millisecToNanosec(exposure_time_ms)) exposure_time_(ze::millisecToNanosec(exposure_time_ms)) {
{ for (size_t i = 0; i < num_cameras_; ++i) {
for(size_t i=0; i<num_cameras_; ++i)
{
event_simulators_.push_back(EventSimulator(event_sim_config)); event_simulators_.push_back(EventSimulator(event_sim_config));
camera_simulators_.push_back(CameraSimulator(exposure_time_ms)); camera_simulators_.push_back(CameraSimulator(exposure_time_ms));
} }
@ -32,18 +31,19 @@ public:
~Simulator(); ~Simulator();
void addPublisher(const Publisher::Ptr& publisher) void addPublisher(const Publisher::Ptr& publisher) {
{
CHECK(publisher); CHECK(publisher);
publishers_.push_back(std::move(publisher)); publishers_.push_back(std::move(publisher));
} }
void dataProviderCallback(const SimulatorData& sim_data); void dataProviderCallback(const SimulatorData& sim_data);
void publishData(const SimulatorData &sim_data, void publishData(
const SimulatorData& sim_data,
const EventsVector& events, const EventsVector& events,
bool camera_simulator_success, bool camera_simulator_success,
const ImagePtrVector &camera_images); const ImagePtrVector& camera_images
);
private: private:
size_t num_cameras_; size_t num_cameras_;

View File

@ -88,7 +88,6 @@ namespace event_camera_simulator {
<< buffer_->getExposureTime() << std::endl; << buffer_->getExposureTime() << std::endl;
exposures_file_.close(); exposures_file_.close();
*camera_image /= denom; *camera_image /= denom;
cv::Mat disp; cv::Mat disp;
camera_image->convertTo(disp, CV_8U, 255); camera_image->convertTo(disp, CV_8U, 255);

View File

@ -1,15 +1,16 @@
#include <esim/esim/event_simulator.hpp> #include <esim/esim/event_simulator.hpp>
#include <ze/common/random.hpp>
#include <glog/logging.h> #include <glog/logging.h>
#include <opencv2/imgproc/imgproc.hpp> #include <opencv2/imgproc/imgproc.hpp>
#include <ze/common/random.hpp>
#include <ze/common/time_conversions.hpp> #include <ze/common/time_conversions.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
void EventSimulator::init(const Image &img, Time time) void EventSimulator::init(const Image& img, Time time) {
{ VLOG(1) << "Initialized event camera simulator with sensor size: "
VLOG(1) << "Initialized event camera simulator with sensor size: " << img.size(); << img.size();
VLOG(1) << "and contrast thresholds: C+ = " << config_.Cp << " , C- = " << config_.Cm; VLOG(1) << "and contrast thresholds: C+ = " << config_.Cp
<< " , C- = " << config_.Cm;
is_initialized_ = true; is_initialized_ = true;
last_img_ = img.clone(); last_img_ = img.clone();
ref_values_ = img.clone(); ref_values_ = img.clone();
@ -18,23 +19,23 @@ void EventSimulator::init(const Image &img, Time time)
size_ = img.size(); size_ = img.size();
} }
Events EventSimulator::imageCallback(const Image& img, Time time) Events EventSimulator::imageCallback(const Image& img, Time time) {
{
CHECK_GE(time, 0); CHECK_GE(time, 0);
Image preprocessed_img = img.clone(); Image preprocessed_img = img.clone();
if(config_.use_log_image) if (config_.use_log_image) {
{ LOG_FIRST_N(INFO, 1)
LOG_FIRST_N(INFO, 1) << "Converting the image to log image with eps = " << config_.log_eps << "."; << "Converting the image to log image with eps = "
<< config_.log_eps << ".";
cv::log(config_.log_eps + img, preprocessed_img); cv::log(config_.log_eps + img, preprocessed_img);
} }
if(!is_initialized_) if (!is_initialized_) {
{
init(preprocessed_img, time); init(preprocessed_img, time);
return {}; return {};
} }
// For each pixel, check if new events need to be generated since the last image sample // For each pixel, check if new events need to be generated since the
// last image sample
static constexpr ImageFloatType tolerance = 1e-6; static constexpr ImageFloatType tolerance = 1e-6;
Events events; Events events;
Duration delta_t_ns = time - current_time_; Duration delta_t_ns = time - current_time_;
@ -42,58 +43,61 @@ Events EventSimulator::imageCallback(const Image& img, Time time)
CHECK_GT(delta_t_ns, 0u); CHECK_GT(delta_t_ns, 0u);
CHECK_EQ(img.size(), size_); CHECK_EQ(img.size(), size_);
for (int y = 0; y < size_.height; ++y) for (int y = 0; y < size_.height; ++y) {
{ for (int x = 0; x < size_.width; ++x) {
for (int x = 0; x < size_.width; ++x)
{
ImageFloatType itdt = preprocessed_img(y, x); ImageFloatType itdt = preprocessed_img(y, x);
ImageFloatType it = last_img_(y, x); ImageFloatType it = last_img_(y, x);
ImageFloatType prev_cross = ref_values_(y, x); ImageFloatType prev_cross = ref_values_(y, x);
if (std::fabs (it - itdt) > tolerance) if (std::fabs(it - itdt) > tolerance) {
{
ImageFloatType pol = (itdt >= it) ? +1.0 : -1.0; ImageFloatType pol = (itdt >= it) ? +1.0 : -1.0;
ImageFloatType C = (pol > 0) ? config_.Cp : config_.Cm; ImageFloatType C = (pol > 0) ? config_.Cp : config_.Cm;
ImageFloatType sigma_C = (pol > 0) ? config_.sigma_Cp : config_.sigma_Cm; ImageFloatType sigma_C =
if(sigma_C > 0) (pol > 0) ? config_.sigma_Cp : config_.sigma_Cm;
{ if (sigma_C > 0) {
C += ze::sampleNormalDistribution<ImageFloatType>(false, 0, sigma_C); C += ze::sampleNormalDistribution<ImageFloatType>(
constexpr ImageFloatType minimum_contrast_threshold = 0.01; false,
0,
sigma_C
);
constexpr ImageFloatType minimum_contrast_threshold =
0.01;
C = std::max(minimum_contrast_threshold, C); C = std::max(minimum_contrast_threshold, C);
} }
ImageFloatType curr_cross = prev_cross; ImageFloatType curr_cross = prev_cross;
bool all_crossings = false; bool all_crossings = false;
do do {
{
curr_cross += pol * C; curr_cross += pol * C;
if ((pol > 0 && curr_cross > it && curr_cross <= itdt) if ((pol > 0 && curr_cross > it && curr_cross <= itdt)
|| (pol < 0 && curr_cross < it && curr_cross >= itdt)) || (pol < 0 && curr_cross < it && curr_cross >= itdt
{ )) {
Duration edt = (curr_cross - it) * delta_t_ns / (itdt - it); Duration edt =
(curr_cross - it) * delta_t_ns / (itdt - it);
Time t = current_time_ + edt; Time t = current_time_ + edt;
// check that pixel (x,y) is not currently in a "refractory" state // check that pixel (x,y) is not currently in a
// i.e. |t-that last_timestamp(x,y)| >= refractory_period // "refractory" state i.e. |t-that
const Time last_stamp_at_xy = ze::secToNanosec(last_event_timestamp_(y,x)); // last_timestamp(x,y)| >= refractory_period
const Time last_stamp_at_xy =
ze::secToNanosec(last_event_timestamp_(y, x));
CHECK_GE(t, last_stamp_at_xy); CHECK_GE(t, last_stamp_at_xy);
const Duration dt = t - last_stamp_at_xy; const Duration dt = t - last_stamp_at_xy;
if(last_event_timestamp_(y,x) == 0 || dt >= config_.refractory_period_ns) if (last_event_timestamp_(y, x) == 0
{ || dt >= config_.refractory_period_ns) {
events.push_back(Event(x, y, t, pol > 0)); events.push_back(Event(x, y, t, pol > 0));
last_event_timestamp_(y,x) = ze::nanosecToSecTrunc(t); last_event_timestamp_(y, x) =
} ze::nanosecToSecTrunc(t);
else } else {
{ VLOG(3)
VLOG(3) << "Dropping event because time since last event (" << "Dropping event because time since last "
"event ("
<< dt << " ns) < refractory period (" << dt << " ns) < refractory period ("
<< config_.refractory_period_ns << " ns)."; << config_.refractory_period_ns << " ns).";
} }
ref_values_(y, x) = curr_cross; ref_values_(y, x) = curr_cross;
} } else {
else
{
all_crossings = true; all_crossings = true;
} }
} while (!all_crossings); } while (!all_crossings);
@ -107,11 +111,11 @@ Events EventSimulator::imageCallback(const Image& img, Time time)
// Sort the events by increasing timestamps, since this is what // Sort the events by increasing timestamps, since this is what
// most event processing algorithms expect // most event processing algorithms expect
sort(events.begin(), events.end(), sort(
[](const Event& a, const Event& b) -> bool events.begin(),
{ events.end(),
return a.t < b.t; [](const Event& a, const Event& b) -> bool { return a.t < b.t; }
}); );
return events; return events;
} }

View File

@ -1,70 +1,92 @@
#include <esim/common/utils.hpp>
#include <esim/esim/simulator.hpp> #include <esim/esim/simulator.hpp>
#include <ze/common/timer_collection.hpp> #include <ze/common/timer_collection.hpp>
#include <esim/common/utils.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
DECLARE_TIMER(TimerEventSimulator, timers_event_simulator_, DECLARE_TIMER(
TimerEventSimulator,
timers_event_simulator_,
simulate_events, simulate_events,
visualization visualization
); );
Simulator::~Simulator() Simulator::~Simulator() {
{
timers_event_simulator_.saveToFile("/tmp", "event_simulator.csv"); timers_event_simulator_.saveToFile("/tmp", "event_simulator.csv");
} }
void Simulator::dataProviderCallback(const SimulatorData &sim_data) void Simulator::dataProviderCallback(const SimulatorData& sim_data) {
{
CHECK_EQ(event_simulators_.size(), num_cameras_); CHECK_EQ(event_simulators_.size(), num_cameras_);
bool camera_simulator_success; bool camera_simulator_success;
if(sim_data.images_updated) if (sim_data.images_updated) {
{
EventsVector events(num_cameras_); EventsVector events(num_cameras_);
Time time = sim_data.timestamp; Time time = sim_data.timestamp;
// simulate the events and camera images for every sensor in the rig // simulate the events and camera images for every sensor in the rig
{ {
auto t = timers_event_simulator_[TimerEventSimulator::simulate_events].timeScope(); auto t = timers_event_simulator_
for(size_t i=0; i<num_cameras_; ++i) [TimerEventSimulator::simulate_events]
{ .timeScope();
events[i] = event_simulators_[i].imageCallback(*sim_data.images[i], time); for (size_t i = 0; i < num_cameras_; ++i) {
events[i] = event_simulators_[i].imageCallback(
*sim_data.images[i],
time
);
if(corrupted_camera_images_.size() < num_cameras_) if (corrupted_camera_images_.size() < num_cameras_) {
{ // allocate memory for the corrupted camera images and
// allocate memory for the corrupted camera images and set them to 0 // set them to 0
corrupted_camera_images_.emplace_back(std::make_shared<Image>(sim_data.images[i]->size())); corrupted_camera_images_.emplace_back(
std::make_shared<Image>(sim_data.images[i]->size())
);
corrupted_camera_images_[i]->setTo(0.); corrupted_camera_images_[i]->setTo(0.);
} }
camera_simulator_success = camera_simulators_[i].imageCallback(*sim_data.images[i], time, corrupted_camera_images_[i]); camera_simulator_success =
camera_simulators_[i].imageCallback(
*sim_data.images[i],
time,
corrupted_camera_images_[i]
);
} }
} }
// publish the simulation data + events // publish the simulation data + events
{ {
auto t = timers_event_simulator_[TimerEventSimulator::visualization].timeScope(); auto t =
publishData(sim_data, events, camera_simulator_success, corrupted_camera_images_); timers_event_simulator_[TimerEventSimulator::visualization]
.timeScope();
publishData(
sim_data,
events,
camera_simulator_success,
corrupted_camera_images_
);
} }
} } else {
else
{
{ {
// just forward the simulation data to the publisher // just forward the simulation data to the publisher
auto t = timers_event_simulator_[TimerEventSimulator::visualization].timeScope(); auto t =
publishData(sim_data, {}, camera_simulator_success, corrupted_camera_images_); timers_event_simulator_[TimerEventSimulator::visualization]
.timeScope();
publishData(
sim_data,
{},
camera_simulator_success,
corrupted_camera_images_
);
} }
} }
} }
void Simulator::publishData(const SimulatorData& sim_data, void Simulator::publishData(
const SimulatorData& sim_data,
const EventsVector& events, const EventsVector& events,
bool camera_simulator_success, bool camera_simulator_success,
const ImagePtrVector& camera_images) const ImagePtrVector& camera_images
{ ) {
if(publishers_.empty()) if (publishers_.empty()) {
{
LOG_FIRST_N(WARNING, 1) << "No publisher available"; LOG_FIRST_N(WARNING, 1) << "No publisher available";
return; return;
} }
@ -74,66 +96,61 @@ void Simulator::publishData(const SimulatorData& sim_data,
const TransformationVector& T_W_Cs = sim_data.groundtruth.T_W_Cs; const TransformationVector& T_W_Cs = sim_data.groundtruth.T_W_Cs;
const ze::CameraRig::Ptr& camera_rig = sim_data.camera_rig; const ze::CameraRig::Ptr& camera_rig = sim_data.camera_rig;
// Publish the new data (events, images, depth maps, poses, point clouds, etc.) // Publish the new data (events, images, depth maps, poses, point
// clouds, etc.)
if (!events.empty()) if (!events.empty())
{
for (const Publisher::Ptr& publisher : publishers_) for (const Publisher::Ptr& publisher : publishers_)
publisher->eventsCallback(events); publisher->eventsCallback(events);
}
if (sim_data.poses_updated) if (sim_data.poses_updated)
{
for (const Publisher::Ptr& publisher : publishers_) for (const Publisher::Ptr& publisher : publishers_)
publisher->poseCallback(T_W_B, T_W_Cs, time); publisher->poseCallback(T_W_B, T_W_Cs, time);
}
if (sim_data.twists_updated) if (sim_data.twists_updated)
{
for (const Publisher::Ptr& publisher : publishers_) for (const Publisher::Ptr& publisher : publishers_)
publisher->twistCallback(sim_data.groundtruth.angular_velocities_, publisher->twistCallback(
sim_data.groundtruth.angular_velocities_,
sim_data.groundtruth.linear_velocities_, sim_data.groundtruth.linear_velocities_,
time); time
} );
if (sim_data.imu_updated) if (sim_data.imu_updated)
{
for (const Publisher::Ptr& publisher : publishers_) for (const Publisher::Ptr& publisher : publishers_)
publisher->imuCallback(sim_data.specific_force_corrupted, sim_data.angular_velocity_corrupted, time); publisher->imuCallback(
} sim_data.specific_force_corrupted,
sim_data.angular_velocity_corrupted,
time
);
if (camera_rig) if (camera_rig)
{
for (const Publisher::Ptr& publisher : publishers_) for (const Publisher::Ptr& publisher : publishers_)
publisher->cameraInfoCallback(camera_rig, time); publisher->cameraInfoCallback(camera_rig, time);
} if (sim_data.images_updated) {
if(sim_data.images_updated) for (const Publisher::Ptr& publisher : publishers_) {
{
for(const Publisher::Ptr& publisher : publishers_)
{
publisher->imageCallback(sim_data.images, time); publisher->imageCallback(sim_data.images, time);
if(camera_simulator_success && time >= exposure_time_) if (camera_simulator_success && time >= exposure_time_) {
{
// the images should be timestamped at mid-exposure // the images should be timestamped at mid-exposure
const Time mid_exposure_time = time - 0.5 * exposure_time_; const Time mid_exposure_time = time - 0.5 * exposure_time_;
publisher->imageCorruptedCallback(camera_images, mid_exposure_time); publisher->imageCorruptedCallback(
camera_images,
mid_exposure_time
);
} }
} }
} }
if (sim_data.depthmaps_updated) if (sim_data.depthmaps_updated)
{
for (const Publisher::Ptr& publisher : publishers_) for (const Publisher::Ptr& publisher : publishers_)
publisher->depthmapCallback(sim_data.depthmaps, time); publisher->depthmapCallback(sim_data.depthmaps, time);
}
if (sim_data.optic_flows_updated) if (sim_data.optic_flows_updated)
{
for (const Publisher::Ptr& publisher : publishers_) for (const Publisher::Ptr& publisher : publishers_)
publisher->opticFlowCallback(sim_data.optic_flows, time); publisher->opticFlowCallback(sim_data.optic_flows, time);
} if (sim_data.depthmaps_updated && !events.empty()) {
if(sim_data.depthmaps_updated && !events.empty())
{
PointCloudVector pointclouds(num_cameras_); PointCloudVector pointclouds(num_cameras_);
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{
CHECK(sim_data.depthmaps[i]); CHECK(sim_data.depthmaps[i]);
pointclouds[i] = eventsToPointCloud(events[i], *sim_data.depthmaps[i], camera_rig->atShared(i)); pointclouds[i] = eventsToPointCloud(
events[i],
*sim_data.depthmaps[i],
camera_rig->atShared(i)
);
} }
for (const Publisher::Ptr& publisher : publishers_) for (const Publisher::Ptr& publisher : publishers_)
publisher->pointcloudCallback(pointclouds, time); publisher->pointcloudCallback(pointclouds, time);

View File

@ -1,54 +1,50 @@
#include <esim/esim/event_simulator.hpp> #include <esim/esim/event_simulator.hpp>
#include <ze/common/test_entrypoint.hpp>
#include <fstream> #include <fstream>
#include <opencv2/highgui/highgui.hpp>
#include <ze/common/file_utils.hpp> #include <ze/common/file_utils.hpp>
#include <ze/common/path_utils.hpp> #include <ze/common/path_utils.hpp>
#include <ze/common/string_utils.hpp> #include <ze/common/string_utils.hpp>
#include <ze/common/file_utils.hpp> #include <ze/common/test_entrypoint.hpp>
#include <ze/common/time_conversions.hpp> #include <ze/common/time_conversions.hpp>
#include <ze/matplotlib/matplotlibcpp.hpp> #include <ze/matplotlib/matplotlibcpp.hpp>
#include <opencv2/highgui/highgui.hpp>
#define USE_OPENCV #define USE_OPENCV
namespace event_camera_simulator namespace event_camera_simulator {
{
class CSVImageLoader class CSVImageLoader {
{
public: public:
CSVImageLoader(const std::string& path_to_data_folder) CSVImageLoader(const std::string& path_to_data_folder)
: path_to_data_folder_(path_to_data_folder) : path_to_data_folder_(path_to_data_folder) {
{ images_in_str_.open(ze::joinPath(path_to_data_folder, "images.csv")
images_in_str_.open(ze::joinPath(path_to_data_folder, "images.csv")); );
CHECK(images_in_str_.is_open()); CHECK(images_in_str_.is_open());
} }
bool next(int64_t& stamp, Image& img) bool next(int64_t& stamp, Image& img) {
{
std::string line; std::string line;
if(!getline(images_in_str_, line)) if (!getline(images_in_str_, line)) {
{
LOG(INFO) << "Finished reading all the images in the folder"; LOG(INFO) << "Finished reading all the images in the folder";
return false; return false;
} }
if('%' != line.at(0) && '#' != line.at(0)) if ('%' != line.at(0) && '#' != line.at(0)) {
{ std::vector<std::string> items =
std::vector<std::string> items = ze::splitString(line, delimiter_); ze::splitString(line, delimiter_);
stamp = std::stoll(items[0]); stamp = std::stoll(items[0]);
const std::string& path_to_image const std::string& path_to_image = ze::joinPath(
= ze::joinPath(path_to_data_folder_, "frame", "cam_0", items[1]); path_to_data_folder_,
"frame",
"cam_0",
items[1]
);
img = cv::imread(path_to_image, 0); img = cv::imread(path_to_image, 0);
CHECK(img.data) << "Error loading image: " << path_to_image; CHECK(img.data) << "Error loading image: " << path_to_image;
return true; return true;
} } else {
else
{
return next(stamp, img); return next(stamp, img);
} }
} }
@ -61,8 +57,7 @@ private:
} // namespace event_camera_simulator } // namespace event_camera_simulator
std::string getTestDataDir(const std::string& dataset_name) std::string getTestDataDir(const std::string& dataset_name) {
{
using namespace ze; using namespace ze;
const char* datapath_dir = std::getenv("ESIM_TEST_DATA_PATH"); const char* datapath_dir = std::getenv("ESIM_TEST_DATA_PATH");
@ -77,8 +72,7 @@ std::string getTestDataDir(const std::string& dataset_name)
return path; return path;
} }
TEST(EventSimulator, testImageReconstruction) TEST(EventSimulator, testImageReconstruction) {
{
using namespace event_camera_simulator; using namespace event_camera_simulator;
// Load image sequence from folder // Load image sequence from folder
@ -94,22 +88,20 @@ TEST(EventSimulator, testImageReconstruction)
event_sim_config.log_eps = 0.001; event_sim_config.log_eps = 0.001;
EventSimulator simulator(event_sim_config); EventSimulator simulator(event_sim_config);
LOG(INFO) << "Testing event camera simulator with C+ = " << event_sim_config.Cp LOG(INFO) << "Testing event camera simulator with C+ = "
<< ", C- = " << event_sim_config.Cm; << event_sim_config.Cp << ", C- = " << event_sim_config.Cm;
const ImageFloatType max_reconstruction_error const ImageFloatType max_reconstruction_error =
= std::max(event_sim_config.Cp, event_sim_config.Cm); std::max(event_sim_config.Cp, event_sim_config.Cm);
bool is_first_image = true; bool is_first_image = true;
Image I, L, L_reconstructed; Image I, L, L_reconstructed;
int64_t stamp; int64_t stamp;
while(reader.next(stamp, I)) while (reader.next(stamp, I)) {
{
I.convertTo(I, cv::DataType<ImageFloatType>::type, 1. / 255.); I.convertTo(I, cv::DataType<ImageFloatType>::type, 1. / 255.);
cv::log(event_sim_config.log_eps + I, L); cv::log(event_sim_config.log_eps + I, L);
if(is_first_image) if (is_first_image) {
{
// Initialize reconstructed image from the ground truth image // Initialize reconstructed image from the ground truth image
L_reconstructed = L.clone(); L_reconstructed = L.clone();
is_first_image = false; is_first_image = false;
@ -118,19 +110,19 @@ TEST(EventSimulator, testImageReconstruction)
Events events = simulator.imageCallback(I, stamp); Events events = simulator.imageCallback(I, stamp);
// Reconstruct next image from previous one using the events in between // Reconstruct next image from previous one using the events in between
for(const Event& e : events) for (const Event& e : events) {
{
ImageFloatType pol = e.pol ? 1. : -1.; ImageFloatType pol = e.pol ? 1. : -1.;
const ImageFloatType C = e.pol ? event_sim_config.Cp : event_sim_config.Cm; const ImageFloatType C =
e.pol ? event_sim_config.Cp : event_sim_config.Cm;
L_reconstructed(e.y, e.x) += pol * C; L_reconstructed(e.y, e.x) += pol * C;
} }
// Check that the reconstruction error is bounded by the contrast thresholds // Check that the reconstruction error is bounded by the contrast
for(int y=0; y<I.rows; ++y) // thresholds
{ for (int y = 0; y < I.rows; ++y) {
for(int x=0; x<I.cols; ++x) for (int x = 0; x < I.cols; ++x) {
{ const ImageFloatType reconstruction_error =
const ImageFloatType reconstruction_error = std::fabs(L_reconstructed(y,x) - L(y,x)); std::fabs(L_reconstructed(y, x) - L(y, x));
VLOG_EVERY_N(2, I.rows * I.cols) << reconstruction_error; VLOG_EVERY_N(2, I.rows * I.cols) << reconstruction_error;
EXPECT_LE(reconstruction_error, max_reconstruction_error); EXPECT_LE(reconstruction_error, max_reconstruction_error);
} }
@ -147,9 +139,7 @@ TEST(EventSimulator, testImageReconstruction)
} }
} }
TEST(EventSimulator, testEvolutionReconstructionError) {
TEST(EventSimulator, testEvolutionReconstructionError)
{
using namespace event_camera_simulator; using namespace event_camera_simulator;
// Load image sequence from folder // Load image sequence from folder
@ -166,21 +156,19 @@ TEST(EventSimulator, testEvolutionReconstructionError)
EventSimulator simulator(event_sim_config); EventSimulator simulator(event_sim_config);
const double contrast_bias = 0.0; const double contrast_bias = 0.0;
LOG(INFO) << "Testing event camera simulator with C+ = " << event_sim_config.Cp LOG(INFO) << "Testing event camera simulator with C+ = "
<< ", C- = " << event_sim_config.Cm; << event_sim_config.Cp << ", C- = " << event_sim_config.Cm;
std::vector<ze::real_t> times, mean_errors, stddev_errors; std::vector<ze::real_t> times, mean_errors, stddev_errors;
bool is_first_image = true; bool is_first_image = true;
Image I, L, L_reconstructed; Image I, L, L_reconstructed;
int64_t stamp; int64_t stamp;
while(reader.next(stamp, I)) while (reader.next(stamp, I)) {
{
LOG_EVERY_N(INFO, 50) << "t = " << ze::nanosecToSecTrunc(stamp) << " s"; LOG_EVERY_N(INFO, 50) << "t = " << ze::nanosecToSecTrunc(stamp) << " s";
I.convertTo(I, cv::DataType<ImageFloatType>::type, 1. / 255.); I.convertTo(I, cv::DataType<ImageFloatType>::type, 1. / 255.);
cv::log(event_sim_config.log_eps + I, L); cv::log(event_sim_config.log_eps + I, L);
if(is_first_image) if (is_first_image) {
{
// Initialize reconstructed image from the ground truth image // Initialize reconstructed image from the ground truth image
L_reconstructed = L.clone(); L_reconstructed = L.clone();
is_first_image = false; is_first_image = false;
@ -189,21 +177,21 @@ TEST(EventSimulator, testEvolutionReconstructionError)
Events events = simulator.imageCallback(I, stamp); Events events = simulator.imageCallback(I, stamp);
// Reconstruct next image from previous one using the events in between // Reconstruct next image from previous one using the events in between
for(const Event& e : events) for (const Event& e : events) {
{
ImageFloatType pol = e.pol ? 1. : -1.; ImageFloatType pol = e.pol ? 1. : -1.;
ImageFloatType C = e.pol ? event_sim_config.Cp : event_sim_config.Cm; ImageFloatType C =
e.pol ? event_sim_config.Cp : event_sim_config.Cm;
C += contrast_bias; C += contrast_bias;
L_reconstructed(e.y, e.x) += pol * C; L_reconstructed(e.y, e.x) += pol * C;
} }
// Compute the mean and average reconstruction error over the whole image // Compute the mean and average reconstruction error over the whole
// image
Image error; Image error;
cv::absdiff(L, L_reconstructed, error); cv::absdiff(L, L_reconstructed, error);
cv::Scalar mean_error, stddev_error; cv::Scalar mean_error, stddev_error;
cv::meanStdDev(error, mean_error, stddev_error); cv::meanStdDev(error, mean_error, stddev_error);
VLOG(1) << "Mean error: " << mean_error VLOG(1) << "Mean error: " << mean_error << ", Stddev: " << stddev_error;
<< ", Stddev: " << stddev_error;
times.push_back(ze::nanosecToSecTrunc(stamp)); times.push_back(ze::nanosecToSecTrunc(stamp));
mean_errors.push_back(mean_error[0]); mean_errors.push_back(mean_error[0]);

View File

@ -1,18 +1,18 @@
#pragma once #pragma once
#include <ze/common/transformation.hpp> #include <memory>
#include <opencv2/core/core.hpp> #include <opencv2/core/core.hpp>
#include <ze/cameras/camera_rig.hpp> #include <ze/cameras/camera_rig.hpp>
#include <memory> #include <ze/common/transformation.hpp>
// FloatType defines the floating point accuracy (single or double precision) // FloatType defines the floating point accuracy (single or double precision)
// for the geometric operations (computing rotation matrices, point projection, etc.). // for the geometric operations (computing rotation matrices, point projection,
// This should typically be double precision (highest accuracy). // etc.). This should typically be double precision (highest accuracy).
#define FloatType ze::real_t #define FloatType ze::real_t
// ImageFloatType defines the floating point accuracy (single or double precision) // ImageFloatType defines the floating point accuracy (single or double
// of the intensity images (and depth images). // precision) of the intensity images (and depth images). Single precision
// Single precision should be enough there in most cases. // should be enough there in most cases.
#define ImageFloatType float #define ImageFloatType float
namespace event_camera_simulator { namespace event_camera_simulator {
@ -53,16 +53,12 @@ using OpticFlowPtrVector = std::vector<OpticFlowPtr>;
using Camera = ze::Camera; using Camera = ze::Camera;
struct Event struct Event {
{
Event(uint16_t x, uint16_t y, Time t, bool pol) Event(uint16_t x, uint16_t y, Time t, bool pol)
: x(x), : x(x),
y(y), y(y),
t(t), t(t),
pol(pol) pol(pol) {}
{
}
uint16_t x; uint16_t x;
uint16_t y; uint16_t y;
@ -74,15 +70,14 @@ using Events = std::vector<Event>;
using EventsVector = std::vector<Events>; using EventsVector = std::vector<Events>;
using EventsPtr = std::shared_ptr<Events>; using EventsPtr = std::shared_ptr<Events>;
struct PointXYZRGB struct PointXYZRGB {
{ PointXYZRGB(
PointXYZRGB(FloatType x, FloatType y, FloatType z, FloatType x, FloatType y, FloatType z, int red, int green, int blue
int red, int green, int blue) )
: xyz(x, y, z), : xyz(x, y, z),
rgb(red, green, blue) {} rgb(red, green, blue) {}
PointXYZRGB(const Vector3& xyz) PointXYZRGB(const Vector3& xyz): xyz(xyz) {}
: xyz(xyz) {}
PointXYZRGB(const Vector3& xyz, const Vector3i& rgb) PointXYZRGB(const Vector3& xyz, const Vector3i& rgb)
: xyz(xyz), : xyz(xyz),
@ -95,8 +90,7 @@ struct PointXYZRGB
using PointCloud = std::vector<PointXYZRGB>; using PointCloud = std::vector<PointXYZRGB>;
using PointCloudVector = std::vector<PointCloud>; using PointCloudVector = std::vector<PointCloud>;
struct SimulatorData struct SimulatorData {
{
EIGEN_MAKE_ALIGNED_OPERATOR_NEW EIGEN_MAKE_ALIGNED_OPERATOR_NEW
//! Nanosecond timestamp. //! Nanosecond timestamp.
@ -118,12 +112,12 @@ struct SimulatorData
//! corrupted by noise and bias. //! corrupted by noise and bias.
Vector3 specific_force_corrupted; Vector3 specific_force_corrupted;
//! The angular velocity (in the body frame) corrupted by noise and bias. //! The angular velocity (in the body frame) corrupted by noise and
//! bias.
Vector3 angular_velocity_corrupted; Vector3 angular_velocity_corrupted;
//! Groundtruth states. //! Groundtruth states.
struct Groundtruth struct Groundtruth {
{
EIGEN_MAKE_ALIGNED_OPERATOR_NEW EIGEN_MAKE_ALIGNED_OPERATOR_NEW
//! Pose of the body (i.e. the IMU) expressed in the world frame. //! Pose of the body (i.e. the IMU) expressed in the world frame.
@ -136,8 +130,8 @@ struct SimulatorData
//! Poses of the cameras in the rig expressed in the world frame. //! Poses of the cameras in the rig expressed in the world frame.
TransformationVector T_W_Cs; TransformationVector T_W_Cs;
//! Linear and angular velocities (i.e. twists) of the cameras in the rig, //! Linear and angular velocities (i.e. twists) of the cameras in
//! expressed in each camera's local coordinate frame. //! the rig, expressed in each camera's local coordinate frame.
LinearVelocityVector linear_velocities_; LinearVelocityVector linear_velocities_;
AngularVelocityVector angular_velocities_; AngularVelocityVector angular_velocities_;
@ -146,6 +140,7 @@ struct SimulatorData
std::vector<LinearVelocity> linear_velocity_obj_; std::vector<LinearVelocity> linear_velocity_obj_;
std::vector<AngularVelocity> angular_velocity_obj_; std::vector<AngularVelocity> angular_velocity_obj_;
}; };
Groundtruth groundtruth; Groundtruth groundtruth;
// Flags to indicate whether a value has been updated or not // Flags to indicate whether a value has been updated or not

View File

@ -8,31 +8,35 @@ namespace ze {
namespace event_camera_simulator { namespace event_camera_simulator {
inline double degToRad(double deg) inline double degToRad(double deg) {
{
return deg * CV_PI / 180.0; return deg * CV_PI / 180.0;
} }
inline double hfovToFocalLength(double hfov_deg, int W) inline double hfovToFocalLength(double hfov_deg, int W) {
{ return 0.5 * static_cast<double>(W)
return 0.5 * static_cast<double>(W) / std::tan(0.5 * degToRad(hfov_deg)); / std::tan(0.5 * degToRad(hfov_deg));
} }
CalibrationMatrix calibrationMatrixFromCamera(const Camera::Ptr& camera); CalibrationMatrix calibrationMatrixFromCamera(const Camera::Ptr& camera);
PointCloud eventsToPointCloud(const Events& events, const Depthmap& depthmap, const ze::Camera::Ptr& camera); PointCloud eventsToPointCloud(
const Events& events,
const Depthmap& depthmap,
const ze::Camera::Ptr& camera
);
FloatType maxMagnitudeOpticFlow(const OpticFlowPtr& flow); FloatType maxMagnitudeOpticFlow(const OpticFlowPtr& flow);
FloatType maxPredictedAbsBrightnessChange(const ImagePtr& I, const OpticFlowPtr& flow); FloatType maxPredictedAbsBrightnessChange(
const ImagePtr& I, const OpticFlowPtr& flow
);
void gaussianBlur(ImagePtr& I, FloatType sigma); void gaussianBlur(ImagePtr& I, FloatType sigma);
// Helper class to compute optic flow from a twist vector and depth map // Helper class to compute optic flow from a twist vector and depth map
// Precomputes a lookup table for pixel -> bearing vector correspondences // Precomputes a lookup table for pixel -> bearing vector correspondences
// to accelerate the computation // to accelerate the computation
class OpticFlowHelper class OpticFlowHelper {
{
public: public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW EIGEN_MAKE_ALIGNED_OPERATOR_NEW
@ -40,15 +44,24 @@ public:
OpticFlowHelper(const ze::Camera::Ptr& camera); OpticFlowHelper(const ze::Camera::Ptr& camera);
void computeFromDepthAndTwist(const ze::Vector3& w_WC, const ze::Vector3& v_WC, void computeFromDepthAndTwist(
const DepthmapPtr& depthmap, OpticFlowPtr& flow); const ze::Vector3& w_WC,
const ze::Vector3& v_WC,
const DepthmapPtr& depthmap,
OpticFlowPtr& flow
);
void computeFromDepthCamTwistAndObjDepthAndTwist(const ze::Vector3& w_WC, const ze::Vector3& v_WC, const DepthmapPtr& depthmap, void computeFromDepthCamTwistAndObjDepthAndTwist(
const ze::Vector3& r_COBJ, const ze::Vector3& w_WOBJ, const ze::Vector3& v_WOBJ, const ze::Vector3& w_WC,
OpticFlowPtr& flow); const ze::Vector3& v_WC,
const DepthmapPtr& depthmap,
const ze::Vector3& r_COBJ,
const ze::Vector3& w_WOBJ,
const ze::Vector3& v_WOBJ,
OpticFlowPtr& flow
);
private: private:
void precomputePixelToBearingLookupTable(); void precomputePixelToBearingLookupTable();
ze::Camera::Ptr camera_; ze::Camera::Ptr camera_;

View File

@ -1,67 +1,79 @@
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <ze/cameras/camera_models.hpp>
#include <opencv2/imgproc/imgproc.hpp> #include <opencv2/imgproc/imgproc.hpp>
#include <ze/cameras/camera_models.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
OpticFlowHelper::OpticFlowHelper(const ze::Camera::Ptr& camera) OpticFlowHelper::OpticFlowHelper(const ze::Camera::Ptr& camera)
: camera_(camera) : camera_(camera) {
{
CHECK(camera_); CHECK(camera_);
precomputePixelToBearingLookupTable(); precomputePixelToBearingLookupTable();
} }
void OpticFlowHelper::precomputePixelToBearingLookupTable() void OpticFlowHelper::precomputePixelToBearingLookupTable() {
{ // points_C is a matrix containing the coordinates of each pixel
// points_C is a matrix containing the coordinates of each pixel coordinate in the image plane // coordinate in the image plane
ze::Keypoints points_C(2, camera_->width() * camera_->height()); ze::Keypoints points_C(2, camera_->width() * camera_->height());
for (int y = 0; y < camera_->height(); ++y) for (int y = 0; y < camera_->height(); ++y)
{
for (int x = 0; x < camera_->width(); ++x) for (int x = 0; x < camera_->width(); ++x)
{
points_C.col(x + camera_->width() * y) = ze::Keypoint(x, y); points_C.col(x + camera_->width() * y) = ze::Keypoint(x, y);
}
}
bearings_C_ = camera_->backProjectVectorized(points_C); bearings_C_ = camera_->backProjectVectorized(points_C);
bearings_C_.array().rowwise() /= bearings_C_.row(2).array(); bearings_C_.array().rowwise() /= bearings_C_.row(2).array();
} }
void OpticFlowHelper::computeFromDepthAndTwist(const ze::Vector3& w_WC, const ze::Vector3& v_WC, void OpticFlowHelper::computeFromDepthAndTwist(
const DepthmapPtr& depthmap, OpticFlowPtr& flow) const ze::Vector3& w_WC,
{ const ze::Vector3& v_WC,
const DepthmapPtr& depthmap,
OpticFlowPtr& flow
) {
CHECK(depthmap); CHECK(depthmap);
CHECK_EQ(depthmap->rows, camera_->height()); CHECK_EQ(depthmap->rows, camera_->height());
CHECK_EQ(depthmap->cols, camera_->width()); CHECK_EQ(depthmap->cols, camera_->width());
CHECK(depthmap->isContinuous()); CHECK(depthmap->isContinuous());
const ze::Vector3 w_CW = -w_WC; // rotation speed of the world wrt the camera const ze::Vector3 w_CW =
-w_WC; // rotation speed of the world wrt the camera
const ze::Vector3 v_CW = -v_WC; // speed of the world wrt the camera const ze::Vector3 v_CW = -v_WC; // speed of the world wrt the camera
const ze::Matrix33 R_CW = ze::skewSymmetric(w_CW); const ze::Matrix33 R_CW = ze::skewSymmetric(w_CW);
Eigen::Map<const Eigen::Matrix<ImageFloatType, 1, Eigen::Dynamic, Eigen::RowMajor>> depths(depthmap->ptr<ImageFloatType>(), 1, depthmap->rows * depthmap->cols); Eigen::Map<
const Eigen::
Matrix<ImageFloatType, 1, Eigen::Dynamic, Eigen::RowMajor>>
depths(
depthmap->ptr<ImageFloatType>(),
1,
depthmap->rows * depthmap->cols
);
ze::Positions Xs = bearings_C_; ze::Positions Xs = bearings_C_;
Xs.array().rowwise() *= depths.cast<FloatType>().array(); Xs.array().rowwise() *= depths.cast<FloatType>().array();
ze::Matrix6X dproject_dX = ze::Matrix6X dproject_dX = camera_->dProject_dLandmarkVectorized(Xs);
camera_->dProject_dLandmarkVectorized(Xs);
for(int y=0; y<camera_->height(); ++y) for (int y = 0; y < camera_->height(); ++y) {
{ for (int x = 0; x < camera_->width(); ++x) {
for(int x=0; x<camera_->width(); ++x)
{
const Vector3 X = Xs.col(x + camera_->width() * y); const Vector3 X = Xs.col(x + camera_->width() * y);
ze::Matrix31 dXdt = R_CW * X + v_CW; ze::Matrix31 dXdt = R_CW * X + v_CW;
ze::Vector2 flow_vec ze::Vector2 flow_vec =
= Eigen::Map<ze::Matrix23>(dproject_dX.col(x + camera_->width() * y).data()) * dXdt; Eigen::Map<ze::Matrix23>(
dproject_dX.col(x + camera_->width() * y).data()
)
* dXdt;
(*flow)(y, x) = cv::Vec<FloatType, 2>(flow_vec(0), flow_vec(1)); (*flow)(y, x) = cv::Vec<FloatType, 2>(flow_vec(0), flow_vec(1));
} }
} }
} }
void OpticFlowHelper::computeFromDepthCamTwistAndObjDepthAndTwist(const ze::Vector3& w_WC, const ze::Vector3& v_WC, const DepthmapPtr& depthmap, void OpticFlowHelper::computeFromDepthCamTwistAndObjDepthAndTwist(
const ze::Vector3& r_COBJ, const ze::Vector3& w_WOBJ, const ze::Vector3& v_WOBJ, OpticFlowPtr& flow) const ze::Vector3& w_WC,
{ const ze::Vector3& v_WC,
const DepthmapPtr& depthmap,
const ze::Vector3& r_COBJ,
const ze::Vector3& w_WOBJ,
const ze::Vector3& v_WOBJ,
OpticFlowPtr& flow
) {
CHECK(depthmap); CHECK(depthmap);
CHECK_EQ(depthmap->rows, camera_->height()); CHECK_EQ(depthmap->rows, camera_->height());
CHECK_EQ(depthmap->cols, camera_->width()); CHECK_EQ(depthmap->cols, camera_->width());
@ -70,107 +82,117 @@ void OpticFlowHelper::computeFromDepthCamTwistAndObjDepthAndTwist(const ze::Vect
const ze::Matrix33 w_WC_tilde = ze::skewSymmetric(w_WC); const ze::Matrix33 w_WC_tilde = ze::skewSymmetric(w_WC);
const ze::Matrix33 w_WOBJ_tilde = ze::skewSymmetric(w_WOBJ); const ze::Matrix33 w_WOBJ_tilde = ze::skewSymmetric(w_WOBJ);
Eigen::Map<const Eigen::Matrix<ImageFloatType, 1, Eigen::Dynamic, Eigen::RowMajor>> depths(depthmap->ptr<ImageFloatType>(), 1, depthmap->rows * depthmap->cols); Eigen::Map<
const Eigen::
Matrix<ImageFloatType, 1, Eigen::Dynamic, Eigen::RowMajor>>
depths(
depthmap->ptr<ImageFloatType>(),
1,
depthmap->rows * depthmap->cols
);
ze::Positions Xs = bearings_C_; ze::Positions Xs = bearings_C_;
Xs.array().rowwise() *= depths.cast<FloatType>().array(); Xs.array().rowwise() *= depths.cast<FloatType>().array();
ze::Matrix6X dproject_dX = ze::Matrix6X dproject_dX = camera_->dProject_dLandmarkVectorized(Xs);
camera_->dProject_dLandmarkVectorized(Xs);
for(int y=0; y<camera_->height(); ++y) for (int y = 0; y < camera_->height(); ++y) {
{ for (int x = 0; x < camera_->width(); ++x) {
for(int x=0; x<camera_->width(); ++x)
{
const Vector3 r_CX = Xs.col(x + camera_->width() * y); const Vector3 r_CX = Xs.col(x + camera_->width() * y);
const Vector3 r_OBJX = r_CX - r_COBJ; const Vector3 r_OBJX = r_CX - r_COBJ;
ze::Matrix31 dXdt = v_WOBJ - v_WC - w_WC_tilde*r_CX + w_WOBJ_tilde*r_OBJX; ze::Matrix31 dXdt =
ze::Vector2 flow_vec v_WOBJ - v_WC - w_WC_tilde * r_CX + w_WOBJ_tilde * r_OBJX;
= Eigen::Map<ze::Matrix23>(dproject_dX.col(x + camera_->width() * y).data()) * dXdt; ze::Vector2 flow_vec =
Eigen::Map<ze::Matrix23>(
dproject_dX.col(x + camera_->width() * y).data()
)
* dXdt;
(*flow)(y, x) = cv::Vec<FloatType, 2>(flow_vec(0), flow_vec(1)); (*flow)(y, x) = cv::Vec<FloatType, 2>(flow_vec(0), flow_vec(1));
} }
} }
} }
FloatType maxMagnitudeOpticFlow(const OpticFlowPtr& flow) FloatType maxMagnitudeOpticFlow(const OpticFlowPtr& flow) {
{
CHECK(flow); CHECK(flow);
FloatType max_squared_magnitude = 0; FloatType max_squared_magnitude = 0;
for(int y=0; y<flow->rows; ++y) for (int y = 0; y < flow->rows; ++y) {
{ for (int x = 0; x < flow->cols; ++x) {
for(int x=0; x<flow->cols; ++x) const FloatType squared_magnitude =
{ cv::norm((*flow)(y, x), cv::NORM_L2SQR);
const FloatType squared_magnitude = cv::norm((*flow)(y,x), cv::NORM_L2SQR);
if (squared_magnitude > max_squared_magnitude) if (squared_magnitude > max_squared_magnitude)
{
max_squared_magnitude = squared_magnitude; max_squared_magnitude = squared_magnitude;
} }
} }
}
return std::sqrt(max_squared_magnitude); return std::sqrt(max_squared_magnitude);
} }
FloatType maxPredictedAbsBrightnessChange(const ImagePtr& I, const OpticFlowPtr& flow) FloatType maxPredictedAbsBrightnessChange(
{ const ImagePtr& I, const OpticFlowPtr& flow
) {
const size_t height = I->rows; const size_t height = I->rows;
const size_t width = I->cols; const size_t width = I->cols;
Image Ix, Iy; // horizontal/vertical gradients of I Image Ix, Iy; // horizontal/vertical gradients of I
// the factor 1/8 accounts for the scaling introduced by the Sobel filter mask // the factor 1/8 accounts for the scaling introduced by the Sobel
//cv::Sobel(*I, Ix, cv::DataType<ImageFloatType>::type, 1, 0, 3, 1./8.); // filter mask cv::Sobel(*I, Ix, cv::DataType<ImageFloatType>::type, 1,
//cv::Sobel(*I, Iy, cv::DataType<ImageFloatType>::type, 0, 1, 3, 1./8.); // 0, 3, 1./8.); cv::Sobel(*I, Iy, cv::DataType<ImageFloatType>::type,
// 0, 1, 3, 1./8.);
// the factor 1/32 accounts for the scaling introduced by the Scharr filter mask // the factor 1/32 accounts for the scaling introduced by the Scharr
// filter mask
cv::Scharr(*I, Ix, cv::DataType<ImageFloatType>::type, 1, 0, 1. / 32.); cv::Scharr(*I, Ix, cv::DataType<ImageFloatType>::type, 1, 0, 1. / 32.);
cv::Scharr(*I, Iy, cv::DataType<ImageFloatType>::type, 0, 1, 1. / 32.); cv::Scharr(*I, Iy, cv::DataType<ImageFloatType>::type, 0, 1, 1. / 32.);
Image Lx, Ly; // horizontal/vertical gradients of log(I). d(logI)/dx = 1/I * dI/dx Image Lx,
static const ImageFloatType eps = 1e-3; // small additive term to avoid problems at I=0 Ly; // horizontal/vertical gradients of log(I). d(logI)/dx = 1/I *
// dI/dx
static const ImageFloatType eps =
1e-3; // small additive term to avoid problems at I=0
cv::divide(Ix, *I + eps, Lx); cv::divide(Ix, *I + eps, Lx);
cv::divide(Iy, *I + eps, Ly); cv::divide(Iy, *I + eps, Ly);
Image dLdt(height, width); Image dLdt(height, width);
for(int y=0; y<height; ++y) for (int y = 0; y < height; ++y) {
{ for (int x = 0; x < width; ++x) {
for(int x=0; x<width; ++x)
{
// dL/dt ~= - <nablaL, flow> // dL/dt ~= - <nablaL, flow>
const ImageFloatType dLdt_at_xy = const ImageFloatType dLdt_at_xy =
Lx(y,x) * (*flow)(y,x)[0] + Lx(y, x) * (*flow)(y, x)[0]
Ly(y,x) * (*flow)(y,x)[1]; // "-" sign ignored since we are interested in the absolute value... + Ly(y, x)
* (*flow)(
y,
x
)[1]; // "-" sign ignored since we are
// interested in the absolute value...
dLdt(y, x) = std::fabs(dLdt_at_xy); dLdt(y, x) = std::fabs(dLdt_at_xy);
} }
} }
double min_dLdt, max_dLdt; double min_dLdt, max_dLdt;
int min_idx, max_idx; int min_idx, max_idx;
cv::minMaxIdx(dLdt, &min_dLdt, &max_dLdt, cv::minMaxIdx(dLdt, &min_dLdt, &max_dLdt, &min_idx, &max_idx);
&min_idx, &max_idx);
return static_cast<FloatType>(max_dLdt); return static_cast<FloatType>(max_dLdt);
} }
void gaussianBlur(ImagePtr& I, FloatType sigma) void gaussianBlur(ImagePtr& I, FloatType sigma) {
{
cv::GaussianBlur(*I, *I, cv::Size(15, 15), sigma, sigma); cv::GaussianBlur(*I, *I, cv::Size(15, 15), sigma, sigma);
} }
CalibrationMatrix calibrationMatrixFromCamera(const Camera::Ptr& camera) CalibrationMatrix calibrationMatrixFromCamera(const Camera::Ptr& camera) {
{
CHECK(camera); CHECK(camera);
const ze::VectorX params = camera->projectionParameters(); const ze::VectorX params = camera->projectionParameters();
CalibrationMatrix K; CalibrationMatrix K;
K << params(0), 0, params(2), K << params(0), 0, params(2), 0, params(1), params(3), 0, 0, 1;
0, params(1), params(3),
0, 0, 1;
return K; return K;
} }
PointCloud eventsToPointCloud(const Events& events, const Depthmap& depthmap, const ze::Camera::Ptr& camera) PointCloud eventsToPointCloud(
{ const Events& events,
const Depthmap& depthmap,
const ze::Camera::Ptr& camera
) {
PointCloud pcl_camera; PointCloud pcl_camera;
for(const Event& ev : events) for (const Event& ev : events) {
{
Vector3 X_c = camera->backProject(ze::Keypoint(ev.x, ev.y)); Vector3 X_c = camera->backProject(ze::Keypoint(ev.x, ev.y));
X_c[0] /= X_c[2]; X_c[0] /= X_c[2];
X_c[1] /= X_c[2]; X_c[1] /= X_c[2];

View File

@ -1,13 +1,12 @@
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <ze/common/test_entrypoint.hpp> #include <ze/cameras/camera_rig.hpp>
#include <ze/common/file_utils.hpp> #include <ze/common/file_utils.hpp>
#include <ze/common/path_utils.hpp> #include <ze/common/path_utils.hpp>
#include <ze/common/string_utils.hpp>
#include <ze/cameras/camera_rig.hpp>
#include <ze/common/random.hpp> #include <ze/common/random.hpp>
#include <ze/common/string_utils.hpp>
#include <ze/common/test_entrypoint.hpp>
std::string getTestDataDir(const std::string& dataset_name) std::string getTestDataDir(const std::string& dataset_name) {
{
using namespace ze; using namespace ze;
const char* datapath_dir = std::getenv("ESIM_TEST_DATA_PATH"); const char* datapath_dir = std::getenv("ESIM_TEST_DATA_PATH");
@ -24,28 +23,29 @@ std::string getTestDataDir(const std::string& dataset_name)
namespace event_camera_simulator { namespace event_camera_simulator {
void computeOpticFlowFiniteDifference(const ze::Camera::Ptr& camera, void computeOpticFlowFiniteDifference(
const ze::Camera::Ptr& camera,
const ze::Vector3& angular_velocity, const ze::Vector3& angular_velocity,
const ze::Vector3& linear_velocity, const ze::Vector3& linear_velocity,
const DepthmapPtr& depth, const DepthmapPtr& depth,
OpticFlowPtr& flow) OpticFlowPtr& flow
{ ) {
CHECK(flow); CHECK(flow);
CHECK_EQ(flow->rows, camera->height()); CHECK_EQ(flow->rows, camera->height());
CHECK_EQ(flow->cols, camera->width()); CHECK_EQ(flow->cols, camera->width());
const FloatType dt = 0.001; const FloatType dt = 0.001;
for(int y=0; y<flow->rows; ++y) for (int y = 0; y < flow->rows; ++y) {
{ for (int x = 0; x < flow->cols; ++x) {
for(int x=0; x<flow->cols; ++x)
{
ze::Keypoint u_t(x, y); ze::Keypoint u_t(x, y);
ze::Bearing f = camera->backProject(u_t); ze::Bearing f = camera->backProject(u_t);
const FloatType z = static_cast<FloatType>((*depth)(y, x)); const FloatType z = static_cast<FloatType>((*depth)(y, x));
ze::Position Xc_t = z * ze::Position(f[0]/f[2], f[1]/f[2], 1.); ze::Position Xc_t =
z * ze::Position(f[0] / f[2], f[1] / f[2], 1.);
ze::Transformation::Rotation dR = ze::Transformation::Rotation::exp(-angular_velocity * dt); ze::Transformation::Rotation dR =
ze::Transformation::Rotation::exp(-angular_velocity * dt);
ze::Transformation::Position dT = -linear_velocity * dt; ze::Transformation::Position dT = -linear_velocity * dt;
// Transform Xc(t) to Xc(t+dt) // Transform Xc(t) to Xc(t+dt)
@ -67,34 +67,38 @@ void computeOpticFlowFiniteDifference(const ze::Camera::Ptr& camera,
} }
} }
void computeOpticFlowFiniteDifference(const ze::Camera::Ptr& camera, void computeOpticFlowFiniteDifference(
const ze::Camera::Ptr& camera,
const ze::Vector3& angular_velocity, const ze::Vector3& angular_velocity,
const ze::Vector3& linear_velocity, const ze::Vector3& linear_velocity,
const DepthmapPtr& depth, const DepthmapPtr& depth,
const ze::Vector3& r_COBJ, const ze::Vector3& r_COBJ,
const ze::Vector3& angular_velocity_obj, const ze::Vector3& angular_velocity_obj,
const ze::Vector3& linear_velocity_obj, const ze::Vector3& linear_velocity_obj,
OpticFlowPtr& flow) OpticFlowPtr& flow
{ ) {
CHECK(flow); CHECK(flow);
CHECK_EQ(flow->rows, camera->height()); CHECK_EQ(flow->rows, camera->height());
CHECK_EQ(flow->cols, camera->width()); CHECK_EQ(flow->cols, camera->width());
const FloatType dt = 0.001; const FloatType dt = 0.001;
for(int y=0; y<flow->rows; ++y) for (int y = 0; y < flow->rows; ++y) {
{ for (int x = 0; x < flow->cols; ++x) {
for(int x=0; x<flow->cols; ++x)
{
ze::Keypoint u_t(x, y); ze::Keypoint u_t(x, y);
ze::Bearing f = camera->backProject(u_t); ze::Bearing f = camera->backProject(u_t);
const FloatType z = static_cast<FloatType>((*depth)(y, x)); const FloatType z = static_cast<FloatType>((*depth)(y, x));
ze::Position Xc_t = z * ze::Position(f[0]/f[2], f[1]/f[2], 1.); ze::Position Xc_t =
z * ze::Position(f[0] / f[2], f[1] / f[2], 1.);
ze::Position r_OBJX = Xc_t - r_COBJ; ze::Position r_OBJX = Xc_t - r_COBJ;
ze::Matrix33 w_WOBJ_tilde = ze::skewSymmetric(angular_velocity_obj); ze::Matrix33 w_WOBJ_tilde =
ze::skewSymmetric(angular_velocity_obj);
ze::Transformation::Rotation dR = ze::Transformation::Rotation::exp(-angular_velocity * dt); ze::Transformation::Rotation dR =
ze::Transformation::Position dT = linear_velocity_obj*dt - linear_velocity * dt + w_WOBJ_tilde*r_OBJX*dt; ze::Transformation::Rotation::exp(-angular_velocity * dt);
ze::Transformation::Position dT = linear_velocity_obj * dt
- linear_velocity * dt
+ w_WOBJ_tilde * r_OBJX * dt;
// Transform Xc(t) to Xc(t+dt) // Transform Xc(t) to Xc(t+dt)
ze::Transformation T_tdt_t; ze::Transformation T_tdt_t;
@ -115,34 +119,32 @@ void computeOpticFlowFiniteDifference(const ze::Camera::Ptr& camera,
} }
} }
} // event_camera_simulator } // namespace event_camera_simulator
TEST(Utils, testOpticFlowComputation) TEST(Utils, testOpticFlowComputation) {
{
using namespace event_camera_simulator; using namespace event_camera_simulator;
// Load camera calib from folder // Load camera calib from folder
const std::string path_to_data_folder = getTestDataDir("camera_calibs"); const std::string path_to_data_folder = getTestDataDir("camera_calibs");
ze::CameraRig::Ptr camera_rig ze::CameraRig::Ptr camera_rig = ze::cameraRigFromYaml(
= ze::cameraRigFromYaml(ze::joinPath(path_to_data_folder, "pinhole_mono.yaml")); ze::joinPath(path_to_data_folder, "pinhole_mono.yaml")
);
CHECK(camera_rig); CHECK(camera_rig);
const ze::Camera::Ptr camera = camera_rig->atShared(0); const ze::Camera::Ptr camera = camera_rig->atShared(0);
cv::Size sensor_size(camera->width(), camera->height()); cv::Size sensor_size(camera->width(), camera->height());
OpticFlowPtr flow_analytic = OpticFlowPtr flow_analytic = std::make_shared<OpticFlow>(sensor_size);
std::make_shared<OpticFlow>(sensor_size);
// Sample random depth map // Sample random depth map
const ImageFloatType z_mean = 5.0; const ImageFloatType z_mean = 5.0;
const ImageFloatType z_stddev = 0.5; const ImageFloatType z_stddev = 0.5;
DepthmapPtr depth = std::make_shared<Depthmap>(sensor_size); DepthmapPtr depth = std::make_shared<Depthmap>(sensor_size);
for(int y=0; y<sensor_size.height; ++y) for (int y = 0; y < sensor_size.height; ++y) {
{ for (int x = 0; x < sensor_size.width; ++x) {
for(int x=0; x<sensor_size.width; ++x) (*depth)(y, x) =
{ ze::sampleNormalDistribution(true, z_mean, z_stddev);
(*depth)(y,x) = ze::sampleNormalDistribution(true, z_mean, z_stddev);
} }
} }
@ -157,22 +159,36 @@ TEST(Utils, testOpticFlowComputation)
// Compute optic flow on the image plane according // Compute optic flow on the image plane according
// to the sampled angular/linear velocity // to the sampled angular/linear velocity
OpticFlowHelper optic_flow_helper(camera); OpticFlowHelper optic_flow_helper(camera);
optic_flow_helper.computeFromDepthAndTwist(angular_velocity, linear_velocity, optic_flow_helper.computeFromDepthAndTwist(
depth, flow_analytic); angular_velocity,
linear_velocity,
depth,
flow_analytic
);
OpticFlowPtr flow_finite_diff = OpticFlowPtr flow_finite_diff = std::make_shared<OpticFlow>(sensor_size);
std::make_shared<OpticFlow>(sensor_size);
computeOpticFlowFiniteDifference(camera, angular_velocity, linear_velocity, computeOpticFlowFiniteDifference(
depth, flow_finite_diff); camera,
angular_velocity,
linear_velocity,
depth,
flow_finite_diff
);
// Check that the analytical flow and finite-difference flow match // Check that the analytical flow and finite-difference flow match
for(int y=0; y<sensor_size.height; ++y) for (int y = 0; y < sensor_size.height; ++y) {
{ for (int x = 0; x < sensor_size.width; ++x) {
for(int x=0; x<sensor_size.width; ++x) EXPECT_NEAR(
{ (*flow_analytic)(y, x)[0],
EXPECT_NEAR((*flow_analytic)(y,x)[0], (*flow_finite_diff)(y,x)[0], 0.1); (*flow_finite_diff)(y, x)[0],
EXPECT_NEAR((*flow_analytic)(y,x)[1], (*flow_finite_diff)(y,x)[1], 0.1); 0.1
);
EXPECT_NEAR(
(*flow_analytic)(y, x)[1],
(*flow_finite_diff)(y, x)[1],
0.1
);
} }
} }
@ -183,7 +199,11 @@ TEST(Utils, testOpticFlowComputation)
// sample random obj position and linear and angular velocity // sample random obj position and linear and angular velocity
ze::Vector3 r_COBJ; ze::Vector3 r_COBJ;
r_COBJ.setRandom(); r_COBJ.setRandom();
r_COBJ(2) = ze::sampleNormalDistribution(true, z_mean, z_stddev); // assume object is in front of camera r_COBJ(2) = ze::sampleNormalDistribution(
true,
z_mean,
z_stddev
); // assume object is in front of camera
ze::Vector3 angular_velocity_obj, linear_velocity_obj; ze::Vector3 angular_velocity_obj, linear_velocity_obj;
angular_velocity_obj.setRandom(); angular_velocity_obj.setRandom();
@ -191,21 +211,40 @@ TEST(Utils, testOpticFlowComputation)
// Compute optic flow on the image plane according // Compute optic flow on the image plane according
// to the sampled angular/linear velocity // to the sampled angular/linear velocity
optic_flow_helper.computeFromDepthCamTwistAndObjDepthAndTwist(angular_velocity, linear_velocity, depth, optic_flow_helper.computeFromDepthCamTwistAndObjDepthAndTwist(
r_COBJ, angular_velocity_obj, linear_velocity_obj, angular_velocity,
flow_analytic); linear_velocity,
depth,
r_COBJ,
angular_velocity_obj,
linear_velocity_obj,
flow_analytic
);
computeOpticFlowFiniteDifference(camera, angular_velocity, linear_velocity, depth, computeOpticFlowFiniteDifference(
r_COBJ, angular_velocity_obj, linear_velocity_obj, camera,
flow_finite_diff); angular_velocity,
linear_velocity,
depth,
r_COBJ,
angular_velocity_obj,
linear_velocity_obj,
flow_finite_diff
);
// Check that the analytical flow and finite-difference flow match // Check that the analytical flow and finite-difference flow match
for(int y=0; y<sensor_size.height; ++y) for (int y = 0; y < sensor_size.height; ++y) {
{ for (int x = 0; x < sensor_size.width; ++x) {
for(int x=0; x<sensor_size.width; ++x) EXPECT_NEAR(
{ (*flow_analytic)(y, x)[0],
EXPECT_NEAR((*flow_analytic)(y,x)[0], (*flow_finite_diff)(y,x)[0], 0.1); (*flow_finite_diff)(y, x)[0],
EXPECT_NEAR((*flow_analytic)(y,x)[1], (*flow_finite_diff)(y,x)[1], 0.1); 0.1
);
EXPECT_NEAR(
(*flow_analytic)(y, x)[1],
(*flow_finite_diff)(y, x)[1],
0.1
);
} }
} }
} }

View File

@ -1,10 +1,9 @@
#pragma once #pragma once
#include <atomic> #include <atomic>
#include <esim/common/types.hpp>
#include <functional> #include <functional>
#include <memory> #include <memory>
#include <esim/common/types.hpp>
#include <ze/common/macros.hpp> #include <ze/common/macros.hpp>
#include <ze/common/noncopyable.hpp> #include <ze/common/noncopyable.hpp>
#include <ze/common/signal_handler.hpp> #include <ze/common/signal_handler.hpp>
@ -16,19 +15,13 @@ class Mat;
namespace event_camera_simulator { namespace event_camera_simulator {
using Callback = using Callback = std::function<void(const SimulatorData& sim_data)>;
std::function<void (const SimulatorData& sim_data)>;
enum class DataProviderType { enum class DataProviderType { RendererOnline, Folder, Rosbag };
RendererOnline,
Folder,
Rosbag
};
//! A data provider registers to a data source and triggers callbacks when //! A data provider registers to a data source and triggers callbacks when
//! new data is available. //! new data is available.
class DataProviderBase : ze::Noncopyable class DataProviderBase : ze::Noncopyable {
{
public: public:
ZE_POINTER_TYPEDEFS(DataProviderBase); ZE_POINTER_TYPEDEFS(DataProviderBase);
@ -39,10 +32,12 @@ public:
//! Process all callbacks. Waits until callback is processed. //! Process all callbacks. Waits until callback is processed.
void spin(); void spin();
//! Read next data field and process callback. Returns false when datatset finished. //! Read next data field and process callback. Returns false when
//! datatset finished.
virtual bool spinOnce() = 0; virtual bool spinOnce() = 0;
//! False if there is no more data to process or there was a shutdown signal. //! False if there is no more data to process or there was a shutdown
//! signal.
virtual bool ok() const = 0; virtual bool ok() const = 0;
//! Pause data provider. //! Pause data provider.
@ -58,7 +53,9 @@ public:
virtual size_t numCameras() const = 0; virtual size_t numCameras() const = 0;
//! Returns the camera rig //! Returns the camera rig
ze::CameraRig::Ptr getCameraRig() { return camera_rig_; } ze::CameraRig::Ptr getCameraRig() {
return camera_rig_;
}
protected: protected:
DataProviderType type_; DataProviderType type_;
@ -68,7 +65,8 @@ protected:
ze::CameraRig::Ptr camera_rig_; ze::CameraRig::Ptr camera_rig_;
private: private:
ze::SimpleSigtermHandler signal_handler_; //!< Sets running_ to false when Ctrl-C is pressed. ze::SimpleSigtermHandler
signal_handler_; //!< Sets running_ to false when Ctrl-C is pressed.
}; };
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -1,10 +1,10 @@
#pragma once #pragma once
#include <gflags/gflags.h>
#include <esim/data_provider/data_provider_base.hpp> #include <esim/data_provider/data_provider_base.hpp>
#include <gflags/gflags.h>
namespace event_camera_simulator { namespace event_camera_simulator {
DataProviderBase::Ptr loadDataProviderFromGflags(); DataProviderBase::Ptr loadDataProviderFromGflags();
} // namespace ze } // namespace event_camera_simulator

View File

@ -1,20 +1,18 @@
#pragma once #pragma once
#include <esim/data_provider/data_provider_base.hpp>
#include <fstream>
#include <map> #include <map>
#include <memory> #include <memory>
#include <string> #include <string>
#include <vector> #include <vector>
#include <ze/cameras/camera_rig.hpp>
#include <ze/common/macros.hpp> #include <ze/common/macros.hpp>
#include <ze/common/types.hpp> #include <ze/common/types.hpp>
#include <esim/data_provider/data_provider_base.hpp>
#include <ze/cameras/camera_rig.hpp>
#include <fstream>
namespace event_camera_simulator { namespace event_camera_simulator {
class DataProviderFromFolder : public DataProviderBase class DataProviderFromFolder : public DataProviderBase {
{
public: public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW EIGEN_MAKE_ALIGNED_OPERATOR_NEW
@ -29,7 +27,6 @@ public:
size_t numCameras() const override; size_t numCameras() const override;
private: private:
int64_t getTimeStamp(const std::string& ts_str) const; int64_t getTimeStamp(const std::string& ts_str) const;
std::string path_to_data_folder_; std::string path_to_data_folder_;

View File

@ -1,36 +1,35 @@
#pragma once #pragma once
#include <esim/common/utils.hpp>
#include <esim/data_provider/data_provider_base.hpp>
#include <esim/rendering/renderer_base.hpp>
#include <map> #include <map>
#include <memory> #include <memory>
#include <string> #include <string>
#include <vector> #include <vector>
#include <ze/cameras/camera_rig.hpp>
#include <ze/common/macros.hpp> #include <ze/common/macros.hpp>
#include <ze/common/types.hpp> #include <ze/common/types.hpp>
#include <esim/data_provider/data_provider_base.hpp>
#include <esim/rendering/renderer_base.hpp>
#include <esim/common/utils.hpp>
#include <ze/vi_simulation/trajectory_simulator.hpp>
#include <ze/vi_simulation/imu_simulator.hpp> #include <ze/vi_simulation/imu_simulator.hpp>
#include <ze/cameras/camera_rig.hpp> #include <ze/vi_simulation/trajectory_simulator.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
/** /**
* Online data provider intended to simulate a camera rig composed of multiple * Online data provider intended to simulate a camera rig composed of
* cameras rigidly attached together, along with an Inertial Measurement Unit (IMU). * multiple cameras rigidly attached together, along with an Inertial
* Measurement Unit (IMU).
* *
* The camera rig follows a camera trajectory in 3D. * The camera rig follows a camera trajectory in 3D.
*/ */
class DataProviderOnlineMoving3DCameraRig : public DataProviderBase class DataProviderOnlineMoving3DCameraRig : public DataProviderBase {
{
public: public:
DataProviderOnlineMoving3DCameraRig(ze::real_t simulation_minimum_framerate, DataProviderOnlineMoving3DCameraRig(
ze::real_t simulation_minimum_framerate,
ze::real_t simulation_imu_rate, ze::real_t simulation_imu_rate,
int simulation_adaptive_sampling_method, int simulation_adaptive_sampling_method,
ze::real_t simulation_adaptive_sampling_lambda); ze::real_t simulation_adaptive_sampling_lambda
);
virtual ~DataProviderOnlineMoving3DCameraRig(); virtual ~DataProviderOnlineMoving3DCameraRig();
@ -41,7 +40,6 @@ public:
size_t numCameras() const override; size_t numCameras() const override;
private: private:
void updateGroundtruth(); void updateGroundtruth();
void sampleImu(); void sampleImu();
void sampleFrame(); void sampleFrame();
@ -57,9 +55,13 @@ private:
SimulatorData sim_data_; SimulatorData sim_data_;
ze::real_t t_; ze::real_t t_;
ze::real_t last_t_frame_; // latest next sampling time in order to guarantee the IMU rate ze::real_t last_t_frame_; // latest next sampling time in order to
ze::real_t last_t_imu_; // latest next sampling time in order to guarantee the minimum frame rate // guarantee the IMU rate
ze::real_t next_t_flow_; // latest next sampling time in order to guarantee that the max pixel displacement since the last frame is bounded ze::real_t last_t_imu_; // latest next sampling time in order to
// guarantee the minimum frame rate
ze::real_t next_t_flow_; // latest next sampling time in order to
// guarantee that the max pixel displacement
// since the last frame is bounded
ze::real_t dt_imu_; ze::real_t dt_imu_;
ze::real_t dt_frame_; ze::real_t dt_frame_;

View File

@ -1,15 +1,13 @@
#pragma once #pragma once
#include <esim/data_provider/data_provider_base.hpp>
#include <esim/rendering/simple_renderer_base.hpp>
#include <map> #include <map>
#include <memory> #include <memory>
#include <string> #include <string>
#include <vector> #include <vector>
#include <ze/common/macros.hpp> #include <ze/common/macros.hpp>
#include <ze/common/types.hpp> #include <ze/common/types.hpp>
#include <esim/data_provider/data_provider_base.hpp>
#include <esim/rendering/simple_renderer_base.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
@ -19,12 +17,13 @@ namespace event_camera_simulator {
* *
* This data provider does NOT simulate a camera trajectory or an IMU. * This data provider does NOT simulate a camera trajectory or an IMU.
*/ */
class DataProviderOnlineSimple : public DataProviderBase class DataProviderOnlineSimple : public DataProviderBase {
{
public: public:
DataProviderOnlineSimple(ze::real_t simulation_minimum_framerate, DataProviderOnlineSimple(
ze::real_t simulation_minimum_framerate,
int simulation_adaptive_sampling_method, int simulation_adaptive_sampling_method,
ze::real_t simulation_adaptive_sampling_lambda); ze::real_t simulation_adaptive_sampling_lambda
);
virtual ~DataProviderOnlineSimple(); virtual ~DataProviderOnlineSimple();
@ -42,8 +41,11 @@ private:
SimulatorData sim_data_; SimulatorData sim_data_;
ze::real_t t_; ze::real_t t_;
ze::real_t last_t_frame_; // latest next sampling time in order to guarantee the minimum frame rate ze::real_t last_t_frame_; // latest next sampling time in order to
ze::real_t next_t_adaptive_; // latest next sampling time in order to guarantee that the adaptive sampling scheme is respected // guarantee the minimum frame rate
ze::real_t
next_t_adaptive_; // latest next sampling time in order to guarantee
// that the adaptive sampling scheme is respected
ze::real_t dt_frame_; ze::real_t dt_frame_;
ze::real_t simulation_minimum_framerate_; ze::real_t simulation_minimum_framerate_;

View File

@ -1,30 +1,27 @@
// This file was adapted from the ze_oss project: https://github.com/zurich-eye/ze_oss/blob/master/ze_data_provider/include/ze/data_provider/data_provider_rosbag.hpp // This file was adapted from the ze_oss project:
// https://github.com/zurich-eye/ze_oss/blob/master/ze_data_provider/include/ze/data_provider/data_provider_rosbag.hpp
// Copyright (C) ETH Zurich, Wyss Zurich, Zurich Eye - All Rights Reserved // Copyright (C) ETH Zurich, Wyss Zurich, Zurich Eye - All Rights Reserved
#pragma once #pragma once
#include <map> #include <esim/data_provider/data_provider_base.hpp>
#include <string>
#include <memory>
#include <vector>
#include <gflags/gflags.h> #include <gflags/gflags.h>
#include <map>
#include <memory>
#include <rosbag/bag.h> #include <rosbag/bag.h>
#include <rosbag/view.h> #include <rosbag/view.h>
#include <sensor_msgs/Image.h> #include <sensor_msgs/Image.h>
#include <string>
#include <esim/data_provider/data_provider_base.hpp> #include <vector>
namespace event_camera_simulator { namespace event_camera_simulator {
class DataProviderRosbag : public DataProviderBase class DataProviderRosbag : public DataProviderBase {
{
public: public:
DataProviderRosbag( DataProviderRosbag(
const std::string& bag_filename, const std::string& bag_filename,
const std::map<std::string, size_t>& camera_topics); const std::map<std::string, size_t>& camera_topics
);
virtual ~DataProviderRosbag() = default; virtual ~DataProviderRosbag() = default;
@ -40,8 +37,10 @@ private:
void loadRosbag(const std::string& bag_filename); void loadRosbag(const std::string& bag_filename);
void initBagView(const std::vector<std::string>& topics); void initBagView(const std::vector<std::string>& topics);
inline bool cameraSpin(const sensor_msgs::ImageConstPtr m_img, inline bool cameraSpin(
const rosbag::MessageInstance& m); const sensor_msgs::ImageConstPtr m_img,
const rosbag::MessageInstance& m
);
std::unique_ptr<rosbag::Bag> bag_; std::unique_ptr<rosbag::Bag> bag_;
std::unique_ptr<rosbag::View> bag_view_; std::unique_ptr<rosbag::View> bag_view_;
@ -49,7 +48,8 @@ private:
int n_processed_images_ = 0; int n_processed_images_ = 0;
// subscribed topics: // subscribed topics:
std::map<std::string, size_t> img_topic_camidx_map_; // camera_topic --> camera_id std::map<std::string, size_t>
img_topic_camidx_map_; // camera_topic --> camera_id
SimulatorData sim_data_; SimulatorData sim_data_;
}; };

View File

@ -1,8 +1,8 @@
#pragma once #pragma once
#include <gflags/gflags.h>
#include <esim/rendering/renderer_base.hpp> #include <esim/rendering/renderer_base.hpp>
#include <esim/rendering/simple_renderer_base.hpp> #include <esim/rendering/simple_renderer_base.hpp>
#include <gflags/gflags.h>
namespace event_camera_simulator { namespace event_camera_simulator {

View File

@ -3,30 +3,23 @@
namespace event_camera_simulator { namespace event_camera_simulator {
DataProviderBase::DataProviderBase(DataProviderType type) DataProviderBase::DataProviderBase(DataProviderType type)
: type_(type) : type_(type),
, signal_handler_(running_) signal_handler_(running_) {}
{}
void DataProviderBase::spin() void DataProviderBase::spin() {
{
while (ok()) while (ok())
{
spinOnce(); spinOnce();
} }
}
void DataProviderBase::pause() void DataProviderBase::pause() {
{
running_ = false; running_ = false;
} }
void DataProviderBase::shutdown() void DataProviderBase::shutdown() {
{
running_ = false; running_ = false;
} }
void DataProviderBase::registerCallback(const Callback& callback) void DataProviderBase::registerCallback(const Callback& callback) {
{
callback_ = callback; callback_ = callback;
} }

View File

@ -1,38 +1,50 @@
#include <ze/common/logging.hpp>
#include <esim/data_provider/data_provider_factory.hpp>
#include <esim/data_provider/data_provider_base.hpp> #include <esim/data_provider/data_provider_base.hpp>
#include <esim/data_provider/data_provider_factory.hpp>
#include <esim/data_provider/data_provider_from_folder.hpp>
#include <esim/data_provider/data_provider_online_render.hpp> #include <esim/data_provider/data_provider_online_render.hpp>
#include <esim/data_provider/data_provider_online_simple.hpp> #include <esim/data_provider/data_provider_online_simple.hpp>
#include <esim/data_provider/data_provider_from_folder.hpp>
#include <esim/data_provider/data_provider_rosbag.hpp> #include <esim/data_provider/data_provider_rosbag.hpp>
#include <ze/common/logging.hpp>
DEFINE_int32(data_source, 0, " 0: Online renderer"); DEFINE_int32(data_source, 0, " 0: Online renderer");
DEFINE_double(simulation_minimum_framerate, 30.0, DEFINE_double(
simulation_minimum_framerate,
30.0,
"Minimum frame rate, in Hz" "Minimum frame rate, in Hz"
"Especially useful when the event rate is low, to guarantee" "Especially useful when the event rate is low, to guarantee"
"that frames are still published at a minimum framerate"); "that frames are still published at a minimum framerate"
);
DEFINE_double(simulation_imu_rate, 1000.0, DEFINE_double(
"Fixed IMU sampling frequency, in Hz"); simulation_imu_rate, 1000.0, "Fixed IMU sampling frequency, in Hz"
);
DEFINE_int32(simulation_adaptive_sampling_method, 0, DEFINE_int32(
simulation_adaptive_sampling_method,
0,
"Method to use for adaptive sampling." "Method to use for adaptive sampling."
"0: based on predicted absolute brightness change" "0: based on predicted absolute brightness change"
"1: based on optic flow"); "1: based on optic flow"
);
DEFINE_double(simulation_adaptive_sampling_lambda, 0.5, DEFINE_double(
simulation_adaptive_sampling_lambda,
0.5,
"Parameter that controls the behavior of the adaptive sampling method." "Parameter that controls the behavior of the adaptive sampling method."
"The meaning of this value depends on the adaptive sampling method used:" "The meaning of this value depends on the adaptive sampling method used:"
"...based on predicted absolute brightness change: deltaT = lambda / max(|dL/dt|)" "...based on predicted absolute brightness change: deltaT = lambda / "
"...based on optic flow: deltaT = lambda \ max(||du/dt||) where du/dt denotes the 2D optic flow field."); "max(|dL/dt|)"
"...based on optic flow: deltaT = lambda \ max(||du/dt||) where du/dt "
DEFINE_string(path_to_data_folder, "", "denotes the 2D optic flow field."
"Path to folder containing the data."); );
DEFINE_string(path_to_data_folder, "", "Path to folder containing the data.");
// Parameters for the DataProviderRosbag // Parameters for the DataProviderRosbag
DEFINE_string(bag_filename, "dataset.bag", "Name of bagfile from which to read."); DEFINE_string(
bag_filename, "dataset.bag", "Name of bagfile from which to read."
);
DEFINE_string(topic_cam0, "/cam0/image_raw", ""); DEFINE_string(topic_cam0, "/cam0/image_raw", "");
DEFINE_string(topic_cam1, "/cam1/image_raw", ""); DEFINE_string(topic_cam1, "/cam1/image_raw", "");
DEFINE_string(topic_cam2, "/cam2/image_raw", ""); DEFINE_string(topic_cam2, "/cam2/image_raw", "");
@ -41,53 +53,58 @@ DEFINE_uint64(num_cams, 1, "Number of normal cameras to read from rosbag.");
namespace event_camera_simulator { namespace event_camera_simulator {
DataProviderBase::Ptr loadDataProviderFromGflags() DataProviderBase::Ptr loadDataProviderFromGflags() {
{
// Create data provider. // Create data provider.
DataProviderBase::Ptr data_provider; DataProviderBase::Ptr data_provider;
switch (FLAGS_data_source) switch (FLAGS_data_source) {
{
case 0: // Online Renderer for Moving 3D Camera Rig with IMU case 0: // Online Renderer for Moving 3D Camera Rig with IMU
{ {
data_provider.reset( data_provider.reset(new DataProviderOnlineMoving3DCameraRig(
new DataProviderOnlineMoving3DCameraRig(FLAGS_simulation_minimum_framerate, FLAGS_simulation_minimum_framerate,
FLAGS_simulation_imu_rate, FLAGS_simulation_imu_rate,
FLAGS_simulation_adaptive_sampling_method, FLAGS_simulation_adaptive_sampling_method,
FLAGS_simulation_adaptive_sampling_lambda)); FLAGS_simulation_adaptive_sampling_lambda
));
break; break;
} }
case 1: // Online Renderer Simple case 1: // Online Renderer Simple
{ {
data_provider.reset( data_provider.reset(new DataProviderOnlineSimple(
new DataProviderOnlineSimple(FLAGS_simulation_minimum_framerate, FLAGS_simulation_minimum_framerate,
FLAGS_simulation_adaptive_sampling_method, FLAGS_simulation_adaptive_sampling_method,
FLAGS_simulation_adaptive_sampling_lambda)); FLAGS_simulation_adaptive_sampling_lambda
));
break; break;
} }
case 2: // Read data from a folder case 2: // Read data from a folder
{ {
data_provider.reset( data_provider.reset(
new DataProviderFromFolder(FLAGS_path_to_data_folder)); new DataProviderFromFolder(FLAGS_path_to_data_folder)
);
break; break;
} }
case 3: // Read data (images) from a rosbag case 3: // Read data (images) from a rosbag
{ {
CHECK_LE(FLAGS_num_cams, 4u); CHECK_LE(FLAGS_num_cams, 4u);
CHECK_EQ(FLAGS_num_cams, 1u) << "Only one camera is supported currently"; CHECK_EQ(FLAGS_num_cams, 1u)
<< "Only one camera is supported currently";
// Fill camera topics. // Fill camera topics.
std::map<std::string, size_t> cam_topics; std::map<std::string, size_t> cam_topics;
if (FLAGS_num_cams >= 1) cam_topics[FLAGS_topic_cam0] = 0; if (FLAGS_num_cams >= 1)
if (FLAGS_num_cams >= 2) cam_topics[FLAGS_topic_cam1] = 1; cam_topics[FLAGS_topic_cam0] = 0;
if (FLAGS_num_cams >= 3) cam_topics[FLAGS_topic_cam2] = 2; if (FLAGS_num_cams >= 2)
if (FLAGS_num_cams >= 4) cam_topics[FLAGS_topic_cam3] = 3; cam_topics[FLAGS_topic_cam1] = 1;
if (FLAGS_num_cams >= 3)
cam_topics[FLAGS_topic_cam2] = 2;
if (FLAGS_num_cams >= 4)
cam_topics[FLAGS_topic_cam3] = 3;
data_provider.reset( data_provider.reset(
new DataProviderRosbag(FLAGS_bag_filename, new DataProviderRosbag(FLAGS_bag_filename, cam_topics)
cam_topics)); );
break; break;
} }
default: default: {
{
LOG(FATAL) << "Data source not known."; LOG(FATAL) << "Data source not known.";
break; break;
} }
@ -96,4 +113,4 @@ DataProviderBase::Ptr loadDataProviderFromGflags()
return data_provider; return data_provider;
} }
} // namespace ze } // namespace event_camera_simulator

View File

@ -1,31 +1,45 @@
#include <esim/data_provider/data_provider_from_folder.hpp> #include <esim/data_provider/data_provider_from_folder.hpp>
#include <ze/common/file_utils.hpp>
#include <opencv2/imgcodecs/imgcodecs.hpp> #include <opencv2/imgcodecs/imgcodecs.hpp>
#include <ze/cameras/camera_impl.hpp> #include <ze/cameras/camera_impl.hpp>
#include <ze/common/file_utils.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
DataProviderFromFolder::DataProviderFromFolder(const std::string& path_to_data_folder) DataProviderFromFolder::DataProviderFromFolder(
const std::string& path_to_data_folder
)
: DataProviderBase(DataProviderType::Folder), : DataProviderBase(DataProviderType::Folder),
path_to_data_folder_(path_to_data_folder), path_to_data_folder_(path_to_data_folder),
finished_parsing_(false) finished_parsing_(false) {
{
// Load CSV image file // Load CSV image file
images_in_str_.open(ze::joinPath(path_to_data_folder, "images.csv")); images_in_str_.open(ze::joinPath(path_to_data_folder, "images.csv"));
CHECK(images_in_str_.is_open()); CHECK(images_in_str_.is_open());
// Create dummy camera rig // Create dummy camera rig
// these intrinsic values are filled with garbage (width = height = 1) since the actual values are not known // these intrinsic values are filled with garbage (width = height = 1)
// since the actual values are not known
ze::CameraVector cameras; ze::CameraVector cameras;
cameras.emplace_back(ze::createEquidistantCameraShared(1, 1, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)); cameras.emplace_back(ze::createEquidistantCameraShared(
1,
1,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
));
ze::TransformationVector extrinsics; ze::TransformationVector extrinsics;
extrinsics.push_back(ze::Transformation()); extrinsics.push_back(ze::Transformation());
camera_rig_ = std::make_shared<ze::CameraRig>(extrinsics, cameras, "camera"); camera_rig_ =
std::make_shared<ze::CameraRig>(extrinsics, cameras, "camera");
// Allocate memory for image data // Allocate memory for image data
sim_data_.images.emplace_back(ImagePtr(new Image( sim_data_.images.emplace_back(ImagePtr(new Image(
cv::Size(camera_rig_->at(0).width(), cv::Size(camera_rig_->at(0).width(), camera_rig_->at(0).height())
camera_rig_->at(0).height())))); )));
sim_data_.camera_rig = camera_rig_; sim_data_.camera_rig = camera_rig_;
sim_data_.images_updated = true; sim_data_.images_updated = true;
@ -36,41 +50,42 @@ DataProviderFromFolder::DataProviderFromFolder(const std::string& path_to_data_f
sim_data_.imu_updated = false; sim_data_.imu_updated = false;
} }
int64_t DataProviderFromFolder::getTimeStamp(const std::string& ts_str) const int64_t DataProviderFromFolder::getTimeStamp(const std::string& ts_str
{ ) const {
return std::stoll(ts_str); return std::stoll(ts_str);
} }
size_t DataProviderFromFolder::numCameras() const size_t DataProviderFromFolder::numCameras() const {
{
return camera_rig_->size(); return camera_rig_->size();
} }
bool DataProviderFromFolder::spinOnce() bool DataProviderFromFolder::spinOnce() {
{
std::string line; std::string line;
if(!getline(images_in_str_, line)) if (!getline(images_in_str_, line)) {
{
VLOG(1) << "Finished parsing images.csv file"; VLOG(1) << "Finished parsing images.csv file";
finished_parsing_ = true; finished_parsing_ = true;
return false; return false;
} }
if('%' != line.at(0) && '#' != line.at(0)) if ('%' != line.at(0) && '#' != line.at(0)) {
{
std::vector<std::string> items = ze::splitString(line, delimiter_); std::vector<std::string> items = ze::splitString(line, delimiter_);
CHECK_GE(items.size(), num_tokens_in_line_); CHECK_GE(items.size(), num_tokens_in_line_);
int64_t stamp = getTimeStamp(items[0]); int64_t stamp = getTimeStamp(items[0]);
const std::string& path_to_image = ze::joinPath(path_to_data_folder_, items[1]); const std::string& path_to_image =
ze::joinPath(path_to_data_folder_, items[1]);
cv::Mat image = cv::imread(path_to_image, 0); cv::Mat image = cv::imread(path_to_image, 0);
CHECK(image.data) << "Could not load image from file: " << path_to_image; CHECK(image.data)
<< "Could not load image from file: " << path_to_image;
VLOG(3) << "Read image from file: " << path_to_image; VLOG(3) << "Read image from file: " << path_to_image;
image.convertTo(*sim_data_.images[0], cv::DataType<ImageFloatType>::type, 1./255.); image.convertTo(
*sim_data_.images[0],
cv::DataType<ImageFloatType>::type,
1. / 255.
);
if(callback_) if (callback_) {
{
sim_data_.timestamp = static_cast<Time>(stamp); sim_data_.timestamp = static_cast<Time>(stamp);
callback_(sim_data_); callback_(sim_data_);
} }
@ -79,10 +94,8 @@ bool DataProviderFromFolder::spinOnce()
return true; return true;
} }
bool DataProviderFromFolder::ok() const bool DataProviderFromFolder::ok() const {
{ if (!running_ || finished_parsing_) {
if (!running_ || finished_parsing_)
{
VLOG(1) << "Data Provider was paused/terminated."; VLOG(1) << "Data Provider was paused/terminated.";
return false; return false;
} }

View File

@ -1,42 +1,54 @@
#include <esim/data_provider/data_provider_online_render.hpp>
#include <esim/trajectory/trajectory_factory.hpp>
#include <esim/trajectory/imu_factory.hpp>
#include <esim/data_provider/renderer_factory.hpp>
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <esim/data_provider/data_provider_online_render.hpp>
#include <esim/data_provider/renderer_factory.hpp>
#include <esim/trajectory/imu_factory.hpp>
#include <esim/trajectory/trajectory_factory.hpp>
#include <ze/cameras/camera_rig.hpp> #include <ze/cameras/camera_rig.hpp>
#include <ze/common/time_conversions.hpp> #include <ze/common/time_conversions.hpp>
#include <ze/common/timer_collection.hpp> #include <ze/common/timer_collection.hpp>
DECLARE_TIMER(TimerDataProvider, timers_data_provider_, DECLARE_TIMER(
TimerDataProvider,
timers_data_provider_,
render, render,
optic_flow, optic_flow,
sample_frame, sample_frame,
sample_imu sample_imu
); );
DEFINE_double(simulation_post_gaussian_blur_sigma, 0, DEFINE_double(
simulation_post_gaussian_blur_sigma,
0,
"If sigma > 0, Gaussian blur the renderered images" "If sigma > 0, Gaussian blur the renderered images"
"with a Gaussian filter standard deviation sigma."); "with a Gaussian filter standard deviation sigma."
);
namespace event_camera_simulator { namespace event_camera_simulator {
DataProviderOnlineMoving3DCameraRig::DataProviderOnlineMoving3DCameraRig(ze::real_t simulation_minimum_framerate, DataProviderOnlineMoving3DCameraRig::DataProviderOnlineMoving3DCameraRig(
ze::real_t simulation_minimum_framerate,
ze::real_t simulation_imu_rate, ze::real_t simulation_imu_rate,
int simulation_adaptive_sampling_method, int simulation_adaptive_sampling_method,
ze::real_t simulation_adaptive_sampling_lambda) ze::real_t simulation_adaptive_sampling_lambda
)
: DataProviderBase(DataProviderType::RendererOnline), : DataProviderBase(DataProviderType::RendererOnline),
simulation_minimum_framerate_(simulation_minimum_framerate), simulation_minimum_framerate_(simulation_minimum_framerate),
simulation_imu_rate_(simulation_imu_rate), simulation_imu_rate_(simulation_imu_rate),
simulation_adaptive_sampling_method_(simulation_adaptive_sampling_method), simulation_adaptive_sampling_method_(
simulation_adaptive_sampling_lambda_(simulation_adaptive_sampling_lambda), simulation_adaptive_sampling_method
),
simulation_adaptive_sampling_lambda_(
simulation_adaptive_sampling_lambda
),
dt_imu_(1. / simulation_imu_rate), dt_imu_(1. / simulation_imu_rate),
dt_frame_(1./simulation_minimum_framerate) dt_frame_(1. / simulation_minimum_framerate) {
{ CHECK(
CHECK(simulation_adaptive_sampling_method == 0 simulation_adaptive_sampling_method == 0
|| simulation_adaptive_sampling_method == 1); || simulation_adaptive_sampling_method == 1
);
std::tie(trajectory_, trajectory_dyn_obj_) = loadTrajectorySimulatorFromGflags(); std::tie(trajectory_, trajectory_dyn_obj_) =
loadTrajectorySimulatorFromGflags();
imu_ = loadImuSimulatorFromGflags(trajectory_); imu_ = loadImuSimulatorFromGflags(trajectory_);
camera_rig_ = ze::cameraRigFromGflags(); camera_rig_ = ze::cameraRigFromGflags();
@ -46,30 +58,37 @@ DataProviderOnlineMoving3DCameraRig::DataProviderOnlineMoving3DCameraRig(ze::rea
const int height = camera->height(); const int height = camera->height();
const ze::real_t horizontal_fov = const ze::real_t horizontal_fov =
180.0 / CV_PI * 180.0 / CV_PI
std::acos(camera->backProject(ze::Keypoint(0,height/2)).dot( * std::acos(camera->backProject(ze::Keypoint(0, height / 2))
camera->backProject(ze::Keypoint(width-1,height/2)))); .dot(camera->backProject(
ze::Keypoint(width - 1, height / 2)
)));
const ze::real_t vertical_fov = const ze::real_t vertical_fov =
180.0 / CV_PI * 180.0 / CV_PI
std::acos(camera->backProject(ze::Keypoint(width/2,0)).dot( * std::acos(camera->backProject(ze::Keypoint(width / 2, 0))
camera->backProject(ze::Keypoint(width/2,height-1)))); .dot(camera->backProject(
ze::Keypoint(width / 2, height - 1)
)));
const ze::real_t diagonal_fov = const ze::real_t diagonal_fov =
180.0 / CV_PI * 180.0 / CV_PI
std::acos(camera->backProject(ze::Keypoint(0,0)).dot( * std::acos(camera->backProject(ze::Keypoint(0, 0))
camera->backProject(ze::Keypoint(width-1,height-1)))); .dot(camera->backProject(
ze::Keypoint(width - 1, height - 1)
)));
LOG(INFO) << "Horizontal FOV: " << horizontal_fov << " deg"; LOG(INFO) << "Horizontal FOV: " << horizontal_fov << " deg";
LOG(INFO) << "Vertical FOV: " << vertical_fov << " deg"; LOG(INFO) << "Vertical FOV: " << vertical_fov << " deg";
LOG(INFO) << "Diagonal FOV: " << diagonal_fov << " deg"; LOG(INFO) << "Diagonal FOV: " << diagonal_fov << " deg";
for(size_t i=0; i<camera_rig_->size(); ++i) for (size_t i = 0; i < camera_rig_->size(); ++i) {
{
renderers_.push_back(std::move(loadRendererFromGflags())); renderers_.push_back(std::move(loadRendererFromGflags()));
renderers_[i]->setCamera(camera_rig_->atShared(i)); renderers_[i]->setCamera(camera_rig_->atShared(i));
optic_flow_helpers_.emplace_back(std::make_shared<OpticFlowHelper>(camera_rig_->atShared(i))); optic_flow_helpers_.emplace_back(
std::make_shared<OpticFlowHelper>(camera_rig_->atShared(i))
);
} }
const size_t num_cameras = camera_rig_->size(); const size_t num_cameras = camera_rig_->size();
@ -77,25 +96,29 @@ DataProviderOnlineMoving3DCameraRig::DataProviderOnlineMoving3DCameraRig(ze::rea
sim_data_.groundtruth.T_W_Cs.resize(num_cameras); sim_data_.groundtruth.T_W_Cs.resize(num_cameras);
sim_data_.groundtruth.angular_velocities_.resize(num_cameras); sim_data_.groundtruth.angular_velocities_.resize(num_cameras);
sim_data_.groundtruth.linear_velocities_.resize(num_cameras); sim_data_.groundtruth.linear_velocities_.resize(num_cameras);
for(size_t i=0; i<num_cameras; ++i) for (size_t i = 0; i < num_cameras; ++i) {
{ const cv::Size size = cv::Size(
const cv::Size size = cv::Size(camera_rig_->at(i).width(), camera_rig_->at(i).width(),
camera_rig_->at(i).height()); camera_rig_->at(i).height()
);
sim_data_.images.emplace_back(ImagePtr(new Image(size))); sim_data_.images.emplace_back(ImagePtr(new Image(size)));
sim_data_.depthmaps.emplace_back(DepthmapPtr(new Depthmap(size))); sim_data_.depthmaps.emplace_back(DepthmapPtr(new Depthmap(size)));
sim_data_.optic_flows.emplace_back(OpticFlowPtr(new OpticFlow(size))); sim_data_.optic_flows.emplace_back(OpticFlowPtr(new OpticFlow(size))
);
sim_data_.images[i]->setTo(0); sim_data_.images[i]->setTo(0);
sim_data_.depthmaps[i]->setTo(0); sim_data_.depthmaps[i]->setTo(0);
sim_data_.optic_flows[i]->setTo(0); sim_data_.optic_flows[i]->setTo(0);
} }
for(size_t i=0; i<trajectory_dyn_obj_.size(); i++) for (size_t i = 0; i < trajectory_dyn_obj_.size(); i++) {
{
sim_data_.groundtruth.T_W_OBJ_.push_back(Transformation()); sim_data_.groundtruth.T_W_OBJ_.push_back(Transformation());
sim_data_.groundtruth.linear_velocity_obj_.push_back(LinearVelocity()); sim_data_.groundtruth.linear_velocity_obj_.push_back(LinearVelocity(
sim_data_.groundtruth.angular_velocity_obj_.push_back(AngularVelocity()); ));
sim_data_.groundtruth.angular_velocity_obj_.push_back(
AngularVelocity()
);
} }
sim_data_.camera_rig = camera_rig_; sim_data_.camera_rig = camera_rig_;
@ -106,108 +129,112 @@ DataProviderOnlineMoving3DCameraRig::DataProviderOnlineMoving3DCameraRig(ze::rea
sampleFrame(); sampleFrame();
setAllUpdated(); setAllUpdated();
if (callback_) if (callback_)
{
callback_(sim_data_); callback_(sim_data_);
} }
DataProviderOnlineMoving3DCameraRig::~DataProviderOnlineMoving3DCameraRig(
) {
timers_data_provider_.saveToFile(
"/tmp",
"data_provider_online_render.csv"
);
} }
DataProviderOnlineMoving3DCameraRig::~DataProviderOnlineMoving3DCameraRig() size_t DataProviderOnlineMoving3DCameraRig::numCameras() const {
{
timers_data_provider_.saveToFile("/tmp", "data_provider_online_render.csv");
}
size_t DataProviderOnlineMoving3DCameraRig::numCameras() const
{
if (camera_rig_) if (camera_rig_)
{
return camera_rig_->size(); return camera_rig_->size();
}
else else
{
return 0; return 0;
} }
}
void DataProviderOnlineMoving3DCameraRig::updateGroundtruth() void DataProviderOnlineMoving3DCameraRig::updateGroundtruth() {
{
const Transformation T_W_B = trajectory_->T_W_B(t_); const Transformation T_W_B = trajectory_->T_W_B(t_);
sim_data_.groundtruth.T_W_B = T_W_B; sim_data_.groundtruth.T_W_B = T_W_B;
const AngularVelocity omega_B = trajectory_->angularVelocity_B(t_); const AngularVelocity omega_B = trajectory_->angularVelocity_B(t_);
const LinearVelocity v_B_W = trajectory_->velocity_W(t_); const LinearVelocity v_B_W = trajectory_->velocity_W(t_);
for(size_t i=0; i<camera_rig_->size(); ++i) for (size_t i = 0; i < camera_rig_->size(); ++i) {
{
sim_data_.groundtruth.T_W_Cs[i] = sim_data_.groundtruth.T_W_Cs[i] =
sim_data_.groundtruth.T_W_B * camera_rig_->T_B_C(i); sim_data_.groundtruth.T_W_B * camera_rig_->T_B_C(i);
const LinearVelocity v_W = v_B_W + T_W_B.getRotation().rotate( const LinearVelocity v_W = v_B_W
ze::skewSymmetric(omega_B) * camera_rig_->T_B_C(i).getPosition()); + T_W_B.getRotation().rotate(
ze::skewSymmetric(omega_B)
* camera_rig_->T_B_C(i).getPosition()
);
const AngularVelocity omega_C = camera_rig_->T_C_B(i).getRotation().rotate(omega_B); const AngularVelocity omega_C =
const LinearVelocity v_C = (camera_rig_->T_C_B(i) * T_W_B.inverse()).getRotation().rotate(v_W); camera_rig_->T_C_B(i).getRotation().rotate(omega_B);
const LinearVelocity v_C = (camera_rig_->T_C_B(i) * T_W_B.inverse())
.getRotation()
.rotate(v_W);
sim_data_.groundtruth.angular_velocities_[i] = omega_C; sim_data_.groundtruth.angular_velocities_[i] = omega_C;
sim_data_.groundtruth.linear_velocities_[i] = v_C; sim_data_.groundtruth.linear_velocities_[i] = v_C;
} }
// update poses of dynamic objects // update poses of dynamic objects
for (size_t i = 0; i < trajectory_dyn_obj_.size(); i++) for (size_t i = 0; i < trajectory_dyn_obj_.size(); i++) {
{ sim_data_.groundtruth.T_W_OBJ_[i] =
sim_data_.groundtruth.T_W_OBJ_[i] = trajectory_dyn_obj_[i]->T_W_B(t_); trajectory_dyn_obj_[i]->T_W_B(t_);
sim_data_.groundtruth.linear_velocity_obj_[i] = trajectory_dyn_obj_[i]->velocity_W(t_); sim_data_.groundtruth.linear_velocity_obj_[i] =
sim_data_.groundtruth.angular_velocity_obj_[i] = sim_data_.groundtruth.T_W_OBJ_[i].getRotation().rotate(trajectory_dyn_obj_[i]->angularVelocity_B(t_)); trajectory_dyn_obj_[i]->velocity_W(t_);
sim_data_.groundtruth.angular_velocity_obj_[i] =
sim_data_.groundtruth.T_W_OBJ_[i].getRotation().rotate(
trajectory_dyn_obj_[i]->angularVelocity_B(t_)
);
} }
} }
void DataProviderOnlineMoving3DCameraRig::sampleImu() void DataProviderOnlineMoving3DCameraRig::sampleImu() {
{
// Sample new IMU (+ poses, twists, etc.) values, // Sample new IMU (+ poses, twists, etc.) values,
// Fill in the approriate data in sim_data // Fill in the approriate data in sim_data
auto t = timers_data_provider_[::TimerDataProvider::sample_imu].timeScope(); auto t =
timers_data_provider_[::TimerDataProvider::sample_imu].timeScope();
if (t_ > trajectory_->end()) if (t_ > trajectory_->end())
{
return; return;
}
updateGroundtruth(); updateGroundtruth();
sim_data_.specific_force_corrupted = imu_->specificForceCorrupted(t_); sim_data_.specific_force_corrupted = imu_->specificForceCorrupted(t_);
sim_data_.angular_velocity_corrupted = imu_->angularVelocityCorrupted(t_); sim_data_.angular_velocity_corrupted =
imu_->angularVelocityCorrupted(t_);
sim_data_.groundtruth.acc_bias = imu_->bias()->accelerometer(t_); sim_data_.groundtruth.acc_bias = imu_->bias()->accelerometer(t_);
sim_data_.groundtruth.gyr_bias = imu_->bias()->gyroscope(t_); sim_data_.groundtruth.gyr_bias = imu_->bias()->gyroscope(t_);
last_t_imu_ = t_; last_t_imu_ = t_;
} }
void DataProviderOnlineMoving3DCameraRig::sampleFrame() void DataProviderOnlineMoving3DCameraRig::sampleFrame() {
{
// Sample (i.e. render) a new frame (+ depth map), // Sample (i.e. render) a new frame (+ depth map),
// Fill in the appropriate data in sim_data // Fill in the appropriate data in sim_data
// Compute the optic flow and compute the next latest sampling time in order // Compute the optic flow and compute the next latest sampling time in
// to guarantee that the displacement is bounded by simulation_max_displacement_successive_frames // order to guarantee that the displacement is bounded by
auto t = timers_data_provider_[::TimerDataProvider::sample_frame].timeScope(); // simulation_max_displacement_successive_frames
auto t =
timers_data_provider_[::TimerDataProvider::sample_frame].timeScope(
);
if (t_ > trajectory_->end()) if (t_ > trajectory_->end())
{
return; return;
}
updateGroundtruth(); updateGroundtruth();
{ {
auto t = timers_data_provider_[::TimerDataProvider::render].timeScope(); auto t =
timers_data_provider_[::TimerDataProvider::render].timeScope();
for(size_t i=0; i<camera_rig_->size(); ++i) for (size_t i = 0; i < camera_rig_->size(); ++i) {
{
CHECK(renderers_[i]); CHECK(renderers_[i]);
// if the renderer provides a function to compute the optic // if the renderer provides a function to compute the optic
// flow (for example, the OpenGL renderer which implements // flow (for example, the OpenGL renderer which implements
// optic flow computation efficiently using a shader), use that. // optic flow computation efficiently using a shader), use that.
// otherwise, compute the optic flow ourselves using the renderer depthmap // otherwise, compute the optic flow ourselves using the
if(renderers_[i]->canComputeOpticFlow()) // renderer depthmap
{ if (renderers_[i]->canComputeOpticFlow()) {
renderers_[i]->renderWithFlow(sim_data_.groundtruth.T_W_B * camera_rig_->T_B_C(i), renderers_[i]->renderWithFlow(
sim_data_.groundtruth.T_W_B * camera_rig_->T_B_C(i),
sim_data_.groundtruth.linear_velocities_[i], sim_data_.groundtruth.linear_velocities_[i],
sim_data_.groundtruth.angular_velocities_[i], sim_data_.groundtruth.angular_velocities_[i],
sim_data_.groundtruth.T_W_OBJ_, sim_data_.groundtruth.T_W_OBJ_,
@ -215,59 +242,63 @@ void DataProviderOnlineMoving3DCameraRig::sampleFrame()
sim_data_.groundtruth.angular_velocity_obj_, sim_data_.groundtruth.angular_velocity_obj_,
sim_data_.images[i], sim_data_.images[i],
sim_data_.depthmaps[i], sim_data_.depthmaps[i],
sim_data_.optic_flows[i]); sim_data_.optic_flows[i]
} );
else } else {
{ renderers_[i]->render(
renderers_[i]->render(sim_data_.groundtruth.T_W_B * camera_rig_->T_B_C(i), sim_data_.groundtruth.T_W_B * camera_rig_->T_B_C(i),
sim_data_.groundtruth.T_W_OBJ_, sim_data_.groundtruth.T_W_OBJ_,
sim_data_.images[i], sim_data_.images[i],
sim_data_.depthmaps[i]); sim_data_.depthmaps[i]
);
} }
// Optionally blur the rendered images slightly // Optionally blur the rendered images slightly
if(FLAGS_simulation_post_gaussian_blur_sigma > 0) if (FLAGS_simulation_post_gaussian_blur_sigma > 0) {
{ gaussianBlur(
gaussianBlur(sim_data_.images[i], FLAGS_simulation_post_gaussian_blur_sigma); sim_data_.images[i],
FLAGS_simulation_post_gaussian_blur_sigma
);
} }
} }
} }
{ {
auto t = timers_data_provider_[::TimerDataProvider::optic_flow].timeScope(); auto t = timers_data_provider_[::TimerDataProvider::optic_flow]
for(size_t i=0; i<camera_rig_->size(); ++i) .timeScope();
{ for (size_t i = 0; i < camera_rig_->size(); ++i) {
CHECK(optic_flow_helpers_[i]); CHECK(optic_flow_helpers_[i]);
if(!renderers_[i]->canComputeOpticFlow()) if (!renderers_[i]->canComputeOpticFlow()) {
{ optic_flow_helpers_[i]->computeFromDepthAndTwist(
optic_flow_helpers_[i]->computeFromDepthAndTwist(sim_data_.groundtruth.angular_velocities_[i], sim_data_.groundtruth.angular_velocities_[i],
sim_data_.groundtruth.linear_velocities_[i], sim_data_.groundtruth.linear_velocities_[i],
sim_data_.depthmaps[i], sim_data_.optic_flows[i]); sim_data_.depthmaps[i],
sim_data_.optic_flows[i]
);
} }
} }
} }
// Adaptive sampling scheme based on predicted brightness change // Adaptive sampling scheme based on predicted brightness change
if(simulation_adaptive_sampling_method_ == 0) if (simulation_adaptive_sampling_method_ == 0) {
{
// Predict brightness change based on image gradient and optic flow // Predict brightness change based on image gradient and optic flow
std::vector<FloatType> max_dLdts; std::vector<FloatType> max_dLdts;
for(size_t i=0; i<camera_rig_->size(); ++i) for (size_t i = 0; i < camera_rig_->size(); ++i) {
{ max_dLdts.push_back(maxPredictedAbsBrightnessChange(
max_dLdts.push_back( sim_data_.images[i],
maxPredictedAbsBrightnessChange(sim_data_.images[i], sim_data_.optic_flows[i]
sim_data_.optic_flows[i])); ));
} }
const FloatType max_dLdt = *std::max_element(max_dLdts.begin(), const FloatType max_dLdt =
max_dLdts.end()); *std::max_element(max_dLdts.begin(), max_dLdts.end());
VLOG(1) << "max(|dLdt|) = " << max_dLdt << " logDN/s"; VLOG(1) << "max(|dLdt|) = " << max_dLdt << " logDN/s";
// Compute next sampling time // Compute next sampling time
// t_{k+1} = t_k + delta_t where // t_{k+1} = t_k + delta_t where
// delta_t = lambda / max(|dL/dt|) // delta_t = lambda / max(|dL/dt|)
const ze::real_t delta_t = simulation_adaptive_sampling_lambda_ / max_dLdt; const ze::real_t delta_t =
simulation_adaptive_sampling_lambda_ / max_dLdt;
VLOG(1) << "deltaT = " << 1000.0 * delta_t << " ms"; VLOG(1) << "deltaT = " << 1000.0 * delta_t << " ms";
next_t_flow_ = t_ + delta_t; next_t_flow_ = t_ + delta_t;
@ -276,18 +307,24 @@ void DataProviderOnlineMoving3DCameraRig::sampleFrame()
// Adaptive sampling scheme based on optic flow // Adaptive sampling scheme based on optic flow
else { else {
std::vector<FloatType> max_flow_magnitudes; std::vector<FloatType> max_flow_magnitudes;
for(size_t i=0; i<camera_rig_->size(); ++i) for (size_t i = 0; i < camera_rig_->size(); ++i) {
{ max_flow_magnitudes.push_back(
max_flow_magnitudes.push_back(maxMagnitudeOpticFlow(sim_data_.optic_flows[i])); maxMagnitudeOpticFlow(sim_data_.optic_flows[i])
);
} }
const FloatType max_flow_magnitude = *std::max_element(max_flow_magnitudes.begin(), max_flow_magnitudes.end()); const FloatType max_flow_magnitude = *std::max_element(
max_flow_magnitudes.begin(),
max_flow_magnitudes.end()
);
VLOG(1) << "max(||optic_flow||) = " << max_flow_magnitude << " px/s"; VLOG(1) << "max(||optic_flow||) = " << max_flow_magnitude
<< " px/s";
// Compute next sampling time // Compute next sampling time
// t_{k+1} = t_k + delta_t where // t_{k+1} = t_k + delta_t where
// delta_t = lambda / max(||optic_flow||) // delta_t = lambda / max(||optic_flow||)
const ze::real_t delta_t = simulation_adaptive_sampling_lambda_ / max_flow_magnitude; const ze::real_t delta_t =
simulation_adaptive_sampling_lambda_ / max_flow_magnitude;
VLOG(1) << "deltaT = " << 1000.0 * delta_t << " ms"; VLOG(1) << "deltaT = " << 1000.0 * delta_t << " ms";
next_t_flow_ = t_ + delta_t; next_t_flow_ = t_ + delta_t;
@ -296,9 +333,9 @@ void DataProviderOnlineMoving3DCameraRig::sampleFrame()
last_t_frame_ = t_; last_t_frame_ = t_;
} }
void DataProviderOnlineMoving3DCameraRig::setImuUpdated() void DataProviderOnlineMoving3DCameraRig::setImuUpdated() {
{ // Set all the IMU-related flags to true, and all the frame-related
// Set all the IMU-related flags to true, and all the frame-related flags to false // flags to false
sim_data_.imu_updated = true; sim_data_.imu_updated = true;
sim_data_.twists_updated = true; sim_data_.twists_updated = true;
sim_data_.poses_updated = true; sim_data_.poses_updated = true;
@ -307,9 +344,9 @@ void DataProviderOnlineMoving3DCameraRig::setImuUpdated()
sim_data_.optic_flows_updated = false; sim_data_.optic_flows_updated = false;
} }
void DataProviderOnlineMoving3DCameraRig::setFrameUpdated() void DataProviderOnlineMoving3DCameraRig::setFrameUpdated() {
{ // Set all the frame-related flags to true, and all the IMU-related
// Set all the frame-related flags to true, and all the IMU-related flags to false // flags to false
sim_data_.imu_updated = false; sim_data_.imu_updated = false;
sim_data_.twists_updated = true; sim_data_.twists_updated = true;
sim_data_.poses_updated = true; sim_data_.poses_updated = true;
@ -318,8 +355,7 @@ void DataProviderOnlineMoving3DCameraRig::setFrameUpdated()
sim_data_.optic_flows_updated = true; sim_data_.optic_flows_updated = true;
} }
void DataProviderOnlineMoving3DCameraRig::setAllUpdated() void DataProviderOnlineMoving3DCameraRig::setAllUpdated() {
{
// Set all the flags to true to indicated everything has been changed // Set all the flags to true to indicated everything has been changed
sim_data_.imu_updated = true; sim_data_.imu_updated = true;
sim_data_.twists_updated = true; sim_data_.twists_updated = true;
@ -329,25 +365,31 @@ void DataProviderOnlineMoving3DCameraRig::setAllUpdated()
sim_data_.optic_flows_updated = true; sim_data_.optic_flows_updated = true;
} }
bool DataProviderOnlineMoving3DCameraRig::spinOnce() bool DataProviderOnlineMoving3DCameraRig::spinOnce() {
{
/* At what time do we need to sample "something" (and what "something"?) /* At what time do we need to sample "something" (and what "something"?)
We choose the next sampling time by considering the following constraints: We choose the next sampling time by considering the following
constraints:
1. The IMU sampling rate must be constant (typically, from 200 Hz to 1 kHz) 1. The IMU sampling rate must be constant (typically, from 200 Hz to
2. The frame sampling rate must be greater than a minimum value (typically, from 20 Hz to 100 Hz) 1 kHz)
3. The pixel displacement between two successive frames must be lower than a threshold. 2. The frame sampling rate must be greater than a minimum value
(typically, from 20 Hz to 100 Hz)
3. The pixel displacement between two successive frames must be lower
than a threshold.
* If the next sample needs to be an IMU sample, we just sample a new IMU value, without regenerating a frame, * If the next sample needs to be an IMU sample, we just sample a new
and transmit only the new IMU (+ poses, twists, IMU bias) to the publisher by setting the approriate "data_changed" flags in the IMU value, without regenerating a frame, and transmit only the new
sim_data structure. IMU (+ poses, twists, IMU bias) to the publisher by setting the
approriate "data_changed" flags in the sim_data structure.
* If the next sample needs to be a frame sample, we render a new frame (+ depth map + optic flow), but not a new IMU value. * If the next sample needs to be a frame sample, we render a new
This can happen either because frame (+ depth map + optic flow), but not a new IMU value. This can
(i) a new frame must be rendered in order to guarantee that the displacement between frames is bounded, or happen either because (i) a new frame must be rendered in order to
(ii) the frame rate should be higher than a minimum (used-defined) value. guarantee that the displacement between frames is bounded, or (ii)
the frame rate should be higher than a minimum (used-defined) value.
At the beginning of time (t_ = trajectory_->start()), we sample everything (IMU + frame). At the beginning of time (t_ = trajectory_->start()), we sample
everything (IMU + frame).
*/ */
const ze::real_t next_t_imu = last_t_imu_ + dt_imu_; const ze::real_t next_t_imu = last_t_imu_ + dt_imu_;
@ -358,29 +400,22 @@ bool DataProviderOnlineMoving3DCameraRig::spinOnce()
VLOG(2) << "next_t_frame = " << next_t_frame; VLOG(2) << "next_t_frame = " << next_t_frame;
VLOG(2) << "next_t_flow = " << next_t_flow_; VLOG(2) << "next_t_flow = " << next_t_flow_;
if(next_t_imu < next_t_frame && next_t_imu < next_t_flow_) if (next_t_imu < next_t_frame && next_t_imu < next_t_flow_) {
{
VLOG(2) << "Sample IMU"; VLOG(2) << "Sample IMU";
t_ = next_t_imu; t_ = next_t_imu;
sampleImu(); sampleImu();
setImuUpdated(); setImuUpdated();
} } else if (next_t_flow_ < next_t_imu && next_t_flow_ < next_t_frame) {
else if(next_t_flow_ < next_t_imu && next_t_flow_ < next_t_frame)
{
VLOG(2) << "Sample frame (because of optic flow)"; VLOG(2) << "Sample frame (because of optic flow)";
t_ = next_t_flow_; t_ = next_t_flow_;
sampleFrame(); sampleFrame();
setFrameUpdated(); setFrameUpdated();
} } else if (next_t_frame < next_t_imu && next_t_frame < next_t_flow_) {
else if(next_t_frame < next_t_imu && next_t_frame < next_t_flow_)
{
VLOG(2) << "Sample frame (because of minimum framerate)"; VLOG(2) << "Sample frame (because of minimum framerate)";
t_ = next_t_frame; t_ = next_t_frame;
sampleFrame(); sampleFrame();
setFrameUpdated(); setFrameUpdated();
} } else {
else
{
VLOG(2) << "Sample IMU and frame"; VLOG(2) << "Sample IMU and frame";
t_ = next_t_frame; t_ = next_t_frame;
// In that case, we sample everything // In that case, we sample everything
@ -389,28 +424,23 @@ bool DataProviderOnlineMoving3DCameraRig::spinOnce()
setAllUpdated(); setAllUpdated();
} }
if(t_ > trajectory_->end()) if (t_ > trajectory_->end()) {
{
running_ = false; running_ = false;
return false; return false;
} }
if(callback_) if (callback_) {
{
sim_data_.timestamp = static_cast<Time>(ze::secToNanosec(t_)); sim_data_.timestamp = static_cast<Time>(ze::secToNanosec(t_));
callback_(sim_data_); callback_(sim_data_);
} } else {
else LOG_FIRST_N(WARNING, 1)
{ << "No camera callback registered but measurements available.";
LOG_FIRST_N(WARNING, 1) << "No camera callback registered but measurements available.";
} }
return true; return true;
} }
bool DataProviderOnlineMoving3DCameraRig::ok() const bool DataProviderOnlineMoving3DCameraRig::ok() const {
{ if (!running_) {
if (!running_)
{
VLOG(1) << "Data Provider was paused/terminated."; VLOG(1) << "Data Provider was paused/terminated.";
return false; return false;
} }

View File

@ -1,35 +1,42 @@
#include <esim/data_provider/data_provider_online_simple.hpp>
#include <ze/common/time_conversions.hpp>
#include <esim/data_provider/renderer_factory.hpp>
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <esim/data_provider/data_provider_online_simple.hpp>
#include <esim/data_provider/renderer_factory.hpp>
#include <ze/common/time_conversions.hpp>
DECLARE_double(simulation_post_gaussian_blur_sigma); DECLARE_double(simulation_post_gaussian_blur_sigma);
namespace event_camera_simulator { namespace event_camera_simulator {
DataProviderOnlineSimple::DataProviderOnlineSimple(ze::real_t simulation_minimum_framerate, DataProviderOnlineSimple::DataProviderOnlineSimple(
ze::real_t simulation_minimum_framerate,
int simulation_adaptive_sampling_method, int simulation_adaptive_sampling_method,
ze::real_t simulation_adaptive_sampling_lambda) ze::real_t simulation_adaptive_sampling_lambda
)
: DataProviderBase(DataProviderType::RendererOnline), : DataProviderBase(DataProviderType::RendererOnline),
simulation_minimum_framerate_(simulation_minimum_framerate), simulation_minimum_framerate_(simulation_minimum_framerate),
simulation_adaptive_sampling_method_(simulation_adaptive_sampling_method), simulation_adaptive_sampling_method_(
simulation_adaptive_sampling_lambda_(simulation_adaptive_sampling_lambda), simulation_adaptive_sampling_method
dt_frame_(1./simulation_minimum_framerate) ),
{ simulation_adaptive_sampling_lambda_(
CHECK(simulation_adaptive_sampling_method == 0 simulation_adaptive_sampling_lambda
|| simulation_adaptive_sampling_method == 1); ),
dt_frame_(1. / simulation_minimum_framerate) {
CHECK(
simulation_adaptive_sampling_method == 0
|| simulation_adaptive_sampling_method == 1
);
renderer_ = loadSimpleRendererFromGflags(); renderer_ = loadSimpleRendererFromGflags();
const size_t num_cameras = 1u; const size_t num_cameras = 1u;
for(size_t i=0; i<num_cameras; ++i) for (size_t i = 0; i < num_cameras; ++i) {
{ const cv::Size size =
const cv::Size size = cv::Size(renderer_->getWidth(), cv::Size(renderer_->getWidth(), renderer_->getHeight());
renderer_->getHeight());
sim_data_.images.emplace_back(ImagePtr(new Image(size))); sim_data_.images.emplace_back(ImagePtr(new Image(size)));
sim_data_.optic_flows.emplace_back(OpticFlowPtr(new OpticFlow(size))); sim_data_.optic_flows.emplace_back(OpticFlowPtr(new OpticFlow(size))
);
sim_data_.images[i]->setTo(0); sim_data_.images[i]->setTo(0);
sim_data_.optic_flows[i]->setTo(0); sim_data_.optic_flows[i]->setTo(0);
@ -40,55 +47,54 @@ DataProviderOnlineSimple::DataProviderOnlineSimple(ze::real_t simulation_minimum
sampleFrame(); sampleFrame();
setFrameUpdated(); setFrameUpdated();
if (callback_) if (callback_)
{
callback_(sim_data_); callback_(sim_data_);
} }
}
DataProviderOnlineSimple::~DataProviderOnlineSimple() DataProviderOnlineSimple::~DataProviderOnlineSimple() {}
{
}
size_t DataProviderOnlineSimple::numCameras() const size_t DataProviderOnlineSimple::numCameras() const {
{
return 1u; return 1u;
} }
bool DataProviderOnlineSimple::sampleFrame() bool DataProviderOnlineSimple::sampleFrame() {
{
// Sample (i.e. render) a new frame (+ optic flow map), // Sample (i.e. render) a new frame (+ optic flow map),
// Fill in the appropriate data in sim_data // Fill in the appropriate data in sim_data
// Compute the optic flow and compute the next latest sampling time in order // Compute the optic flow and compute the next latest sampling time in
// to guarantee that the displacement is bounded by simulation_max_displacement_successive_frames // order to guarantee that the displacement is bounded by
// simulation_max_displacement_successive_frames
CHECK(renderer_); CHECK(renderer_);
bool is_finished = renderer_->render(ze::secToNanosec(t_), bool is_finished = renderer_->render(
ze::secToNanosec(t_),
sim_data_.images[0], sim_data_.images[0],
sim_data_.optic_flows[0]); sim_data_.optic_flows[0]
);
if (is_finished) if (is_finished)
{
return true; return true;
}
// Optionally blur the rendered images slightly // Optionally blur the rendered images slightly
if(FLAGS_simulation_post_gaussian_blur_sigma > 0) if (FLAGS_simulation_post_gaussian_blur_sigma > 0) {
{ gaussianBlur(
gaussianBlur(sim_data_.images[0], FLAGS_simulation_post_gaussian_blur_sigma); sim_data_.images[0],
FLAGS_simulation_post_gaussian_blur_sigma
);
} }
// Adaptive sampling scheme based on predicted brightness change // Adaptive sampling scheme based on predicted brightness change
if(simulation_adaptive_sampling_method_ == 0) if (simulation_adaptive_sampling_method_ == 0) {
{
// Predict brightness change based on image gradient and optic flow // Predict brightness change based on image gradient and optic flow
const FloatType max_dLdt = maxPredictedAbsBrightnessChange(sim_data_.images[0], const FloatType max_dLdt = maxPredictedAbsBrightnessChange(
sim_data_.optic_flows[0]); sim_data_.images[0],
sim_data_.optic_flows[0]
);
VLOG(1) << "max(|dLdt|) = " << max_dLdt << " logDN/s"; VLOG(1) << "max(|dLdt|) = " << max_dLdt << " logDN/s";
// Compute next sampling time // Compute next sampling time
// t_{k+1} = t_k + delta_t where // t_{k+1} = t_k + delta_t where
// delta_t = lambda / max(|dL/dt|) // delta_t = lambda / max(|dL/dt|)
const ze::real_t delta_t = simulation_adaptive_sampling_lambda_ / max_dLdt; const ze::real_t delta_t =
simulation_adaptive_sampling_lambda_ / max_dLdt;
VLOG(1) << "deltaT = " << 1000.0 * delta_t << " ms"; VLOG(1) << "deltaT = " << 1000.0 * delta_t << " ms";
next_t_adaptive_ = t_ + delta_t; next_t_adaptive_ = t_ + delta_t;
@ -96,14 +102,17 @@ bool DataProviderOnlineSimple::sampleFrame()
// Adaptive sampling scheme based on optic flow // Adaptive sampling scheme based on optic flow
else { else {
const FloatType max_flow_magnitude = maxMagnitudeOpticFlow(sim_data_.optic_flows[0]); const FloatType max_flow_magnitude =
maxMagnitudeOpticFlow(sim_data_.optic_flows[0]);
VLOG(1) << "max(||optic_flow||) = " << max_flow_magnitude << " px/s"; VLOG(1) << "max(||optic_flow||) = " << max_flow_magnitude
<< " px/s";
// Compute next sampling time // Compute next sampling time
// t_{k+1} = t_k + delta_t where // t_{k+1} = t_k + delta_t where
// delta_t = lambda / max(||optic_flow||) // delta_t = lambda / max(||optic_flow||)
const ze::real_t delta_t = simulation_adaptive_sampling_lambda_ / max_flow_magnitude; const ze::real_t delta_t =
simulation_adaptive_sampling_lambda_ / max_flow_magnitude;
VLOG(1) << "deltaT = " << 1000.0 * delta_t << " ms"; VLOG(1) << "deltaT = " << 1000.0 * delta_t << " ms";
next_t_adaptive_ = t_ + delta_t; next_t_adaptive_ = t_ + delta_t;
@ -114,9 +123,9 @@ bool DataProviderOnlineSimple::sampleFrame()
return false; return false;
} }
void DataProviderOnlineSimple::setFrameUpdated() void DataProviderOnlineSimple::setFrameUpdated() {
{ // Set all the frame-related flags to true, and all the IMU-related
// Set all the frame-related flags to true, and all the IMU-related flags to false // flags to false
sim_data_.imu_updated = false; sim_data_.imu_updated = false;
sim_data_.twists_updated = false; sim_data_.twists_updated = false;
sim_data_.poses_updated = false; sim_data_.poses_updated = false;
@ -125,49 +134,40 @@ void DataProviderOnlineSimple::setFrameUpdated()
sim_data_.optic_flows_updated = true; sim_data_.optic_flows_updated = true;
} }
bool DataProviderOnlineSimple::spinOnce() bool DataProviderOnlineSimple::spinOnce() {
{
const ze::real_t next_t_frame = last_t_frame_ + dt_frame_; const ze::real_t next_t_frame = last_t_frame_ + dt_frame_;
VLOG(2) << "t = " << t_; VLOG(2) << "t = " << t_;
VLOG(2) << "next_t_frame = " << next_t_frame; VLOG(2) << "next_t_frame = " << next_t_frame;
VLOG(2) << "next_t_flow = " << next_t_adaptive_; VLOG(2) << "next_t_flow = " << next_t_adaptive_;
if(next_t_adaptive_ < next_t_frame) if (next_t_adaptive_ < next_t_frame) {
{
VLOG(2) << "Sample frame (because of optic flow)"; VLOG(2) << "Sample frame (because of optic flow)";
t_ = next_t_adaptive_; t_ = next_t_adaptive_;
} } else {
else
{
VLOG(2) << "Sample frame (because of minimum framerate)"; VLOG(2) << "Sample frame (because of minimum framerate)";
t_ = next_t_frame; t_ = next_t_frame;
} }
bool is_finished = sampleFrame(); bool is_finished = sampleFrame();
setFrameUpdated(); setFrameUpdated();
if(is_finished) if (is_finished) {
{
running_ = false; running_ = false;
return false; return false;
} }
if(callback_) if (callback_) {
{
sim_data_.timestamp = static_cast<Time>(ze::secToNanosec(t_)); sim_data_.timestamp = static_cast<Time>(ze::secToNanosec(t_));
callback_(sim_data_); callback_(sim_data_);
} } else {
else LOG_FIRST_N(WARNING, 1)
{ << "No camera callback registered but measurements available.";
LOG_FIRST_N(WARNING, 1) << "No camera callback registered but measurements available.";
} }
return true; return true;
} }
bool DataProviderOnlineSimple::ok() const bool DataProviderOnlineSimple::ok() const {
{ if (!running_) {
if (!running_)
{
VLOG(1) << "Data Provider was paused/terminated."; VLOG(1) << "Data Provider was paused/terminated.";
return false; return false;
} }

View File

@ -1,34 +1,30 @@
#include <esim/data_provider/data_provider_rosbag.hpp>
#include <ze/common/logging.hpp>
#include <rosbag/query.h>
#include <cv_bridge/cv_bridge.h> #include <cv_bridge/cv_bridge.h>
#include <esim/data_provider/data_provider_rosbag.hpp>
#include <rosbag/query.h>
#include <sensor_msgs/image_encodings.h> #include <sensor_msgs/image_encodings.h>
#include <ze/common/logging.hpp>
#include <ze/common/time_conversions.hpp>
#include <ze/common/string_utils.hpp>
#include <ze/common/path_utils.hpp> #include <ze/common/path_utils.hpp>
#include <ze/common/string_utils.hpp>
#include <ze/common/time_conversions.hpp>
DEFINE_int32(data_source_stop_after_n_frames, -1, DEFINE_int32(
"How many frames should be processed?"); data_source_stop_after_n_frames, -1, "How many frames should be processed?"
DEFINE_double(data_source_start_time_s, 0.0, );
"Start time in seconds"); DEFINE_double(data_source_start_time_s, 0.0, "Start time in seconds");
DEFINE_double(data_source_stop_time_s, 0.0, DEFINE_double(data_source_stop_time_s, 0.0, "Stop time in seconds");
"Stop time in seconds");
namespace event_camera_simulator { namespace event_camera_simulator {
DataProviderRosbag::DataProviderRosbag( DataProviderRosbag::DataProviderRosbag(
const std::string& bag_filename, const std::string& bag_filename,
const std::map<std::string, size_t>& img_topic_camidx_map) const std::map<std::string, size_t>& img_topic_camidx_map
: DataProviderBase(DataProviderType::Rosbag) )
, img_topic_camidx_map_(img_topic_camidx_map) : DataProviderBase(DataProviderType::Rosbag),
{ img_topic_camidx_map_(img_topic_camidx_map) {
loadRosbag(bag_filename); loadRosbag(bag_filename);
std::vector<std::string> topics; std::vector<std::string> topics;
for (auto it : img_topic_camidx_map_) for (auto it : img_topic_camidx_map_) {
{
VLOG(1) << "Subscribing to: " << it.first; VLOG(1) << "Subscribing to: " << it.first;
topics.push_back(it.first); topics.push_back(it.first);
sim_data_.images.emplace_back(ImagePtr(new Image())); sim_data_.images.emplace_back(ImagePtr(new Image()));
@ -37,74 +33,74 @@ DataProviderRosbag::DataProviderRosbag(
initBagView(topics); initBagView(topics);
} }
void DataProviderRosbag::loadRosbag(const std::string& bag_filename) void DataProviderRosbag::loadRosbag(const std::string& bag_filename) {
{ CHECK(ze::fileExists(bag_filename))
CHECK(ze::fileExists(bag_filename)) << "File does not exist: " << bag_filename; << "File does not exist: " << bag_filename;
VLOG(1) << "Opening rosbag: " << bag_filename << " ..."; VLOG(1) << "Opening rosbag: " << bag_filename << " ...";
bag_.reset(new rosbag::Bag); bag_.reset(new rosbag::Bag);
try try {
{
bag_->open(bag_filename, rosbag::bagmode::Read); bag_->open(bag_filename, rosbag::bagmode::Read);
} } catch (const std::exception e) {
catch (const std::exception e) LOG(FATAL) << "Could not open rosbag " << bag_filename << ": "
{ << e.what();
LOG(FATAL) << "Could not open rosbag " << bag_filename << ": " << e.what();
} }
} }
void DataProviderRosbag::initBagView(const std::vector<std::string>& topics) void DataProviderRosbag::initBagView(const std::vector<std::string>& topics
{ ) {
bag_view_.reset(new rosbag::View(*bag_, rosbag::TopicQuery(topics))); bag_view_.reset(new rosbag::View(*bag_, rosbag::TopicQuery(topics)));
if (FLAGS_data_source_start_time_s != 0.0 || if (FLAGS_data_source_start_time_s != 0.0
FLAGS_data_source_stop_time_s != 0.0) || FLAGS_data_source_stop_time_s != 0.0) {
{
CHECK_GE(FLAGS_data_source_start_time_s, 0); CHECK_GE(FLAGS_data_source_start_time_s, 0);
CHECK_GE(FLAGS_data_source_stop_time_s, 0); CHECK_GE(FLAGS_data_source_stop_time_s, 0);
// Retrieve begin and end times from the bag file (given the topic query). // Retrieve begin and end times from the bag file (given the topic
// query).
const ros::Time absolute_time_offset = bag_view_->getBeginTime(); const ros::Time absolute_time_offset = bag_view_->getBeginTime();
VLOG(2) << "Bag begin time: " << absolute_time_offset; VLOG(2) << "Bag begin time: " << absolute_time_offset;
const ros::Time absolute_end_time = bag_view_->getEndTime(); const ros::Time absolute_end_time = bag_view_->getEndTime();
VLOG(2) << "Bag end time: " << absolute_end_time; VLOG(2) << "Bag end time: " << absolute_end_time;
if (absolute_end_time < absolute_time_offset) if (absolute_end_time < absolute_time_offset) {
{ LOG(FATAL) << "Invalid bag end time: " << absolute_end_time
LOG(FATAL) << "Invalid bag end time: "
<< absolute_end_time
<< ". Check that the bag file is properly indexed" << ". Check that the bag file is properly indexed"
<< " by running 'rosbag reindex file.bag'."; << " by running 'rosbag reindex file.bag'.";
} }
// Compute start and stop time. // Compute start and stop time.
LOG(INFO) << "Starting rosbag at time: " << FLAGS_data_source_start_time_s; LOG(INFO) << "Starting rosbag at time: "
const ros::Duration data_source_start_time(FLAGS_data_source_start_time_s); << FLAGS_data_source_start_time_s;
const ros::Duration data_source_start_time(
FLAGS_data_source_start_time_s
);
const ros::Time absolute_start_time = const ros::Time absolute_start_time =
data_source_start_time.isZero() ? data_source_start_time.isZero()
absolute_time_offset : absolute_time_offset + data_source_start_time; ? absolute_time_offset
const ros::Duration data_source_stop_time(FLAGS_data_source_stop_time_s); : absolute_time_offset + data_source_start_time;
const ros::Duration data_source_stop_time(
FLAGS_data_source_stop_time_s
);
const ros::Time absolute_stop_time = const ros::Time absolute_stop_time =
data_source_stop_time.isZero() ? data_source_stop_time.isZero()
absolute_end_time : absolute_time_offset + data_source_stop_time; ? absolute_end_time
: absolute_time_offset + data_source_stop_time;
// Ensure that the provided stop time is valid. // Ensure that the provided stop time is valid.
// When a bag file is corrupted / invalid the bag end time // When a bag file is corrupted / invalid the bag end time
// cannot be retrieved. Run rosbag info to check if the bag file // cannot be retrieved. Run rosbag info to check if the bag file
// is properly indexed. // is properly indexed.
if (absolute_stop_time < absolute_start_time) if (absolute_stop_time < absolute_start_time) {
{ LOG(ERROR
LOG(ERROR) << "Provided stop time is less than bag begin time. " ) << "Provided stop time is less than bag begin time. "
<< "Please make sure to provide a valid stop time and " << "Please make sure to provide a valid stop time and "
<< "check that the bag file is properly indexed " << "check that the bag file is properly indexed "
<< "by running 'rosbag reindex file.bag'."; << "by running 'rosbag reindex file.bag'.";
} } else if (absolute_stop_time > absolute_end_time) {
else if (absolute_stop_time > absolute_end_time) LOG(ERROR
{ ) << "Provided stop time is greater than bag end time. "
LOG(ERROR) << "Provided stop time is greater than bag end time. "
<< "Please make sure to provide a valid stop time and " << "Please make sure to provide a valid stop time and "
<< "check that the bag file is properly indexed " << "check that the bag file is properly indexed "
<< "by running 'rosbag reindex file.bag'."; << "by running 'rosbag reindex file.bag'.";
} } else {
else
{
VLOG(1) << "Absolute start time set to " << absolute_start_time; VLOG(1) << "Absolute start time set to " << absolute_start_time;
VLOG(1) << "Absolute stop time set to " << absolute_stop_time; VLOG(1) << "Absolute stop time set to " << absolute_stop_time;
} }
@ -112,57 +108,55 @@ void DataProviderRosbag::initBagView(const std::vector<std::string>& topics)
// Reset the bag View // Reset the bag View
CHECK_GT(absolute_stop_time, absolute_start_time); CHECK_GT(absolute_stop_time, absolute_start_time);
CHECK_LE(absolute_stop_time, absolute_end_time); CHECK_LE(absolute_stop_time, absolute_end_time);
bag_view_.reset(new rosbag::View(*bag_, rosbag::TopicQuery(topics), bag_view_.reset(new rosbag::View(
absolute_start_time, absolute_stop_time)); *bag_,
rosbag::TopicQuery(topics),
absolute_start_time,
absolute_stop_time
));
} }
bag_view_it_ = bag_view_->begin(); bag_view_it_ = bag_view_->begin();
// Ensure that topics exist // Ensure that topics exist
// The connection info only contains topics that are available in the bag // The connection info only contains topics that are available in the
// If a topic is requested that is not avaiable, it does not show up in the info. // bag If a topic is requested that is not avaiable, it does not show up
// in the info.
std::vector<const rosbag::ConnectionInfo*> connection_infos = std::vector<const rosbag::ConnectionInfo*> connection_infos =
bag_view_->getConnections(); bag_view_->getConnections();
if (topics.size() != connection_infos.size()) if (topics.size() != connection_infos.size()) {
{ LOG(ERROR) << "Successfully connected to "
LOG(ERROR) << "Successfully connected to " << connection_infos.size() << " topics:"; << connection_infos.size() << " topics:";
for (const rosbag::ConnectionInfo* info : connection_infos) for (const rosbag::ConnectionInfo* info : connection_infos)
{
LOG(ERROR) << "*) " << info->topic; LOG(ERROR) << "*) " << info->topic;
}
LOG(ERROR) << "Requested " << topics.size() << " topics:"; LOG(ERROR) << "Requested " << topics.size() << " topics:";
for (const std::string topic : topics) for (const std::string topic : topics)
{
LOG(ERROR) << "*) " << topic; LOG(ERROR) << "*) " << topic;
} LOG(FATAL
LOG(FATAL) << "Not all requested topics founds in bagfile. " ) << "Not all requested topics founds in bagfile. "
<< "Is topic_cam0, topic_imu0, etc. set correctly? " << "Is topic_cam0, topic_imu0, etc. set correctly? "
<< "Maybe removing/adding a slash as prefix solves the problem."; << "Maybe removing/adding a slash as prefix solves the problem.";
} }
} }
size_t DataProviderRosbag::numCameras() const size_t DataProviderRosbag::numCameras() const {
{
return img_topic_camidx_map_.size(); return img_topic_camidx_map_.size();
} }
bool DataProviderRosbag::spinOnce() bool DataProviderRosbag::spinOnce() {
{ if (bag_view_it_ != bag_view_->end()) {
if (bag_view_it_ != bag_view_->end())
{
const rosbag::MessageInstance m = *bag_view_it_; const rosbag::MessageInstance m = *bag_view_it_;
// Camera Messages: // Camera Messages:
const sensor_msgs::ImageConstPtr m_img = m.instantiate<sensor_msgs::Image>(); const sensor_msgs::ImageConstPtr m_img =
if (m_img && callback_) m.instantiate<sensor_msgs::Image>();
{ if (m_img && callback_) {
if (!cameraSpin(m_img, m)) if (!cameraSpin(m_img, m))
{
return false; return false;
} } else {
} LOG_FIRST_N(
else WARNING,
{ 1
LOG_FIRST_N(WARNING, 1) << "No camera callback registered but measurements available"; ) << "No camera callback registered but measurements available";
} }
++bag_view_it_; ++bag_view_it_;
@ -171,35 +165,40 @@ bool DataProviderRosbag::spinOnce()
return false; return false;
} }
bool DataProviderRosbag::cameraSpin(sensor_msgs::ImageConstPtr m_img, bool DataProviderRosbag::cameraSpin(
const rosbag::MessageInstance& m) sensor_msgs::ImageConstPtr m_img, const rosbag::MessageInstance& m
{ ) {
auto it = img_topic_camidx_map_.find(m.getTopic()); auto it = img_topic_camidx_map_.find(m.getTopic());
if (it != img_topic_camidx_map_.end()) if (it != img_topic_camidx_map_.end()) {
{
++n_processed_images_; ++n_processed_images_;
if (FLAGS_data_source_stop_after_n_frames > 0 && if (FLAGS_data_source_stop_after_n_frames > 0
n_processed_images_ > FLAGS_data_source_stop_after_n_frames) && n_processed_images_
{ > FLAGS_data_source_stop_after_n_frames) {
LOG(WARNING) << "Data source has reached max number of desired frames."; LOG(WARNING)
<< "Data source has reached max number of desired frames.";
running_ = false; running_ = false;
return false; return false;
} }
cv_bridge::CvImagePtr cv_ptr; cv_bridge::CvImagePtr cv_ptr;
try try {
{ cv_ptr = cv_bridge::toCvCopy(
cv_ptr = cv_bridge::toCvCopy(m_img, sensor_msgs::image_encodings::MONO8); m_img,
} sensor_msgs::image_encodings::MONO8
catch (cv_bridge::Exception& e) );
{ } catch (cv_bridge::Exception& e) {
LOG(WARNING) << "cv_bridge exception: %s", e.what(); LOG(WARNING) << "cv_bridge exception: %s", e.what();
return false; return false;
} }
cv_ptr->image.convertTo(*(sim_data_.images[0]), cv::DataType<ImageFloatType>::type, 1./255.); cv_ptr->image.convertTo(
sim_data_.timestamp = static_cast<Time>(m_img->header.stamp.toNSec()); *(sim_data_.images[0]),
cv::DataType<ImageFloatType>::type,
1. / 255.
);
sim_data_.timestamp =
static_cast<Time>(m_img->header.stamp.toNSec());
sim_data_.imu_updated = false; sim_data_.imu_updated = false;
sim_data_.images_updated = true; sim_data_.images_updated = true;
@ -209,32 +208,27 @@ bool DataProviderRosbag::cameraSpin(sensor_msgs::ImageConstPtr m_img,
sim_data_.poses_updated = false; sim_data_.poses_updated = false;
callback_(sim_data_); callback_(sim_data_);
} } else {
else LOG_FIRST_N(WARNING, 1)
{ << "Topic in bag that is not subscribed: " << m.getTopic();
LOG_FIRST_N(WARNING, 1) << "Topic in bag that is not subscribed: " << m.getTopic();
} }
return true; return true;
} }
bool DataProviderRosbag::ok() const bool DataProviderRosbag::ok() const {
{ if (!running_) {
if (!running_)
{
VLOG(1) << "Data Provider was paused/terminated."; VLOG(1) << "Data Provider was paused/terminated.";
return false; return false;
} }
if (bag_view_it_ == bag_view_->end()) if (bag_view_it_ == bag_view_->end()) {
{
VLOG(1) << "All data processed."; VLOG(1) << "All data processed.";
return false; return false;
} }
return true; return true;
} }
size_t DataProviderRosbag::size() const size_t DataProviderRosbag::size() const {
{
CHECK(bag_view_); CHECK(bag_view_);
return bag_view_->size(); return bag_view_->size();
} }

View File

@ -1,97 +1,160 @@
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <esim/data_provider/renderer_factory.hpp> #include <esim/data_provider/renderer_factory.hpp>
#include <esim/imp_planar_renderer/planar_renderer.hpp>
#include <esim/imp_panorama_renderer/panorama_renderer.hpp>
#include <esim/imp_opengl_renderer/opengl_renderer.hpp>
#include <esim/imp_unrealcv_renderer/unrealcv_renderer.hpp>
#include <esim/imp_multi_objects_2d/imp_multi_objects_2d_renderer.hpp> #include <esim/imp_multi_objects_2d/imp_multi_objects_2d_renderer.hpp>
#include <ze/cameras/camera_models.hpp> #include <esim/imp_opengl_renderer/opengl_renderer.hpp>
#include <ze/cameras/camera_impl.hpp> #include <esim/imp_panorama_renderer/panorama_renderer.hpp>
#include <ze/common/logging.hpp> #include <esim/imp_planar_renderer/planar_renderer.hpp>
#include <esim/imp_unrealcv_renderer/unrealcv_renderer.hpp>
#include <opencv2/imgcodecs/imgcodecs.hpp> #include <opencv2/imgcodecs/imgcodecs.hpp>
#include <opencv2/imgproc/imgproc.hpp> #include <opencv2/imgproc/imgproc.hpp>
#include <ze/cameras/camera_impl.hpp>
#include <ze/cameras/camera_models.hpp>
#include <ze/common/logging.hpp>
DEFINE_int32(renderer_type, 0, " 0: Planar renderer, 1: Panorama renderer, 2: OpenGL renderer"); DEFINE_int32(
renderer_type,
0,
" 0: Planar renderer, 1: Panorama renderer, 2: OpenGL renderer"
);
DEFINE_string(renderer_texture, "", DEFINE_string(
"Path to image which will be used to texture the plane"); renderer_texture,
"",
"Path to image which will be used to texture the plane"
);
DEFINE_double(renderer_hfov_cam_source_deg, 130.0, DEFINE_double(
"Horizontal FoV of the source camera (that captured the image on the plane)"); renderer_hfov_cam_source_deg,
130.0,
"Horizontal FoV of the source camera (that captured the image on "
"the plane)"
);
DEFINE_double(renderer_preprocess_median_blur, 0, DEFINE_double(
"Kernel size of the preprocessing median blur."); renderer_preprocess_median_blur,
0,
"Kernel size of the preprocessing median blur."
);
DEFINE_double(renderer_preprocess_gaussian_blur, 0, DEFINE_double(
"Amount of preprocessing Gaussian blur."); renderer_preprocess_gaussian_blur,
0,
"Amount of preprocessing Gaussian blur."
);
DEFINE_double(renderer_plane_x, 0.0, DEFINE_double(
"x position of the center of the plane, in world coordinates"); renderer_plane_x,
0.0,
"x position of the center of the plane, in world coordinates"
);
DEFINE_double(renderer_plane_y, 0.0, DEFINE_double(
"y position of the center of the plane, in world coordinates"); renderer_plane_y,
0.0,
"y position of the center of the plane, in world coordinates"
);
DEFINE_double(renderer_plane_z, -1.0, DEFINE_double(
"z position of the center of the plane, in world coordinates"); renderer_plane_z,
-1.0,
"z position of the center of the plane, in world coordinates"
);
DEFINE_double(renderer_plane_qw, 0.0, DEFINE_double(
"w component of the quaternion q_W_P (orientation of the plane with respect to the world)"); renderer_plane_qw,
0.0,
"w component of the quaternion q_W_P (orientation of the plane "
"with respect to the world)"
);
DEFINE_double(renderer_plane_qx, 1.0, DEFINE_double(
"x component of the quaternion q_W_P (orientation of the plane with respect to the world)"); renderer_plane_qx,
1.0,
"x component of the quaternion q_W_P (orientation of the plane "
"with respect to the world)"
);
DEFINE_double(renderer_plane_qy, 0.0, DEFINE_double(
"y component of the quaternion q_W_P (orientation of the plane with respect to the world)"); renderer_plane_qy,
0.0,
"y component of the quaternion q_W_P (orientation of the plane "
"with respect to the world)"
);
DEFINE_double(renderer_plane_qz, 0.0, DEFINE_double(
"z component of the quaternion q_W_P (orientation of the plane with respect to the world)"); renderer_plane_qz,
0.0,
"z component of the quaternion q_W_P (orientation of the plane "
"with respect to the world)"
);
DEFINE_double(renderer_z_min, 0.01, DEFINE_double(renderer_z_min, 0.01, "Minimum clipping distance.");
"Minimum clipping distance.");
DEFINE_double(renderer_z_max, 10.0, DEFINE_double(renderer_z_max, 10.0, "Maximum clipping distance.");
"Maximum clipping distance.");
DEFINE_bool(renderer_extend_border, false, DEFINE_bool(
"Whether to extend the borders of the plane to infinity or not."); renderer_extend_border,
false,
"Whether to extend the borders of the plane to infinity or not."
);
DEFINE_double(renderer_panorama_qw, 0.0, DEFINE_double(
"w component of the quaternion q_W_P (orientation of the panorama with respect to the world)"); renderer_panorama_qw,
0.0,
"w component of the quaternion q_W_P (orientation of the "
"panorama with respect to the world)"
);
DEFINE_double(renderer_panorama_qx, 1.0, DEFINE_double(
"x component of the quaternion q_W_P (orientation of the panorama with respect to the world)"); renderer_panorama_qx,
1.0,
"x component of the quaternion q_W_P (orientation of the "
"panorama with respect to the world)"
);
DEFINE_double(renderer_panorama_qy, 0.0, DEFINE_double(
"y component of the quaternion q_W_P (orientation of the panorama with respect to the world)"); renderer_panorama_qy,
0.0,
"y component of the quaternion q_W_P (orientation of the "
"panorama with respect to the world)"
);
DEFINE_double(renderer_panorama_qz, 0.0, DEFINE_double(
"z component of the quaternion q_W_P (orientation of the panorama with respect to the world)"); renderer_panorama_qz,
0.0,
"z component of the quaternion q_W_P (orientation of the "
"panorama with respect to the world)"
);
namespace event_camera_simulator { namespace event_camera_simulator {
bool loadPreprocessedImage(const std::string& path_to_img, cv::Mat* img) bool loadPreprocessedImage(const std::string& path_to_img, cv::Mat* img) {
{
CHECK(img); CHECK(img);
VLOG(1) << "Loading texture file from file: " << FLAGS_renderer_texture << "."; VLOG(1) << "Loading texture file from file: " << FLAGS_renderer_texture
<< ".";
*img = cv::imread(path_to_img, 0); *img = cv::imread(path_to_img, 0);
if(!img->data) if (!img->data) {
{ LOG(FATAL) << "Could not open image at: " << FLAGS_renderer_texture
LOG(FATAL) << "Could not open image at: " << FLAGS_renderer_texture << "."; << ".";
return false; return false;
} }
if(FLAGS_renderer_preprocess_median_blur > 1) if (FLAGS_renderer_preprocess_median_blur > 1) {
{
VLOG(1) << "Pre-filtering the texture with median filter of size: " VLOG(1) << "Pre-filtering the texture with median filter of size: "
<< FLAGS_renderer_preprocess_median_blur << "."; << FLAGS_renderer_preprocess_median_blur << ".";
cv::medianBlur(*img, *img, FLAGS_renderer_preprocess_median_blur); cv::medianBlur(*img, *img, FLAGS_renderer_preprocess_median_blur);
} }
if(FLAGS_renderer_preprocess_gaussian_blur > 0) if (FLAGS_renderer_preprocess_gaussian_blur > 0) {
{ VLOG(1
VLOG(1) << "Pre-filtering the texture with gaussian filter of size: " ) << "Pre-filtering the texture with gaussian filter of size: "
<< FLAGS_renderer_preprocess_gaussian_blur << "."; << FLAGS_renderer_preprocess_gaussian_blur << ".";
cv::GaussianBlur(*img, *img, cv::Size(21,21), FLAGS_renderer_preprocess_gaussian_blur); cv::GaussianBlur(
*img,
*img,
cv::Size(21, 21),
FLAGS_renderer_preprocess_gaussian_blur
);
} }
img->convertTo(*img, cv::DataType<ImageFloatType>::type, 1.0 / 255.0); img->convertTo(*img, cv::DataType<ImageFloatType>::type, 1.0 / 255.0);
@ -99,41 +162,54 @@ bool loadPreprocessedImage(const std::string& path_to_img, cv::Mat* img)
return true; return true;
} }
Renderer::Ptr loadRendererFromGflags() Renderer::Ptr loadRendererFromGflags() {
{
Renderer::Ptr renderer; Renderer::Ptr renderer;
switch (FLAGS_renderer_type) switch (FLAGS_renderer_type) {
{
case 0: // Planar renderer case 0: // Planar renderer
{ {
cv::Mat img_src; cv::Mat img_src;
if (!loadPreprocessedImage(FLAGS_renderer_texture, &img_src)) if (!loadPreprocessedImage(FLAGS_renderer_texture, &img_src))
{
return nullptr; return nullptr;
}
double f_src = hfovToFocalLength(FLAGS_renderer_hfov_cam_source_deg, img_src.cols); double f_src = hfovToFocalLength(
FLAGS_renderer_hfov_cam_source_deg,
img_src.cols
);
Camera::Ptr cam_src = std::make_shared<ze::PinholeCamera>( Camera::Ptr cam_src = std::make_shared<ze::PinholeCamera>(
img_src.cols, img_src.rows, ze::CameraType::Pinhole, img_src.cols,
(Vector4() << f_src, f_src, 0.5 * img_src.cols, 0.5 * img_src.rows).finished(), img_src.rows,
ze::VectorX()); ze::CameraType::Pinhole,
(Vector4() << f_src,
f_src,
0.5 * img_src.cols,
0.5 * img_src.rows)
.finished(),
ze::VectorX()
);
Transformation T_W_P; Transformation T_W_P;
T_W_P.getPosition() = ze::Position(FLAGS_renderer_plane_x, T_W_P.getPosition() = ze::Position(
FLAGS_renderer_plane_x,
FLAGS_renderer_plane_y, FLAGS_renderer_plane_y,
FLAGS_renderer_plane_z); FLAGS_renderer_plane_z
);
T_W_P.getRotation() = ze::Quaternion(FLAGS_renderer_plane_qw, T_W_P.getRotation() = ze::Quaternion(
FLAGS_renderer_plane_qw,
FLAGS_renderer_plane_qx, FLAGS_renderer_plane_qx,
FLAGS_renderer_plane_qy, FLAGS_renderer_plane_qy,
FLAGS_renderer_plane_qz); FLAGS_renderer_plane_qz
);
renderer.reset(new PlanarRenderer(img_src, cam_src, renderer.reset(new PlanarRenderer(
img_src,
cam_src,
T_W_P, T_W_P,
FLAGS_renderer_z_min, FLAGS_renderer_z_min,
FLAGS_renderer_z_max, FLAGS_renderer_z_max,
FLAGS_renderer_extend_border)); FLAGS_renderer_extend_border
));
break; break;
} }
@ -141,15 +217,15 @@ Renderer::Ptr loadRendererFromGflags()
{ {
cv::Mat img_src; cv::Mat img_src;
if (!loadPreprocessedImage(FLAGS_renderer_texture, &img_src)) if (!loadPreprocessedImage(FLAGS_renderer_texture, &img_src))
{
return nullptr; return nullptr;
}
Transformation::Rotation R_W_P; Transformation::Rotation R_W_P;
R_W_P = ze::Quaternion(FLAGS_renderer_panorama_qw, R_W_P = ze::Quaternion(
FLAGS_renderer_panorama_qw,
FLAGS_renderer_panorama_qx, FLAGS_renderer_panorama_qx,
FLAGS_renderer_panorama_qy, FLAGS_renderer_panorama_qy,
FLAGS_renderer_panorama_qz); FLAGS_renderer_panorama_qz
);
renderer.reset(new PanoramaRenderer(img_src, R_W_P)); renderer.reset(new PanoramaRenderer(img_src, R_W_P));
break; break;
@ -164,8 +240,7 @@ Renderer::Ptr loadRendererFromGflags()
renderer.reset(new UnrealCvRenderer()); renderer.reset(new UnrealCvRenderer());
break; break;
} }
default: default: {
{
LOG(FATAL) << "Renderer type is not known."; LOG(FATAL) << "Renderer type is not known.";
break; break;
} }
@ -174,20 +249,17 @@ Renderer::Ptr loadRendererFromGflags()
return renderer; return renderer;
} }
SimpleRenderer::Ptr loadSimpleRendererFromGflags() SimpleRenderer::Ptr loadSimpleRendererFromGflags() {
{
SimpleRenderer::Ptr renderer; SimpleRenderer::Ptr renderer;
switch (FLAGS_renderer_type) switch (FLAGS_renderer_type) {
{
case 0: // Multi-objects 2D renderer case 0: // Multi-objects 2D renderer
{ {
renderer.reset(new MultiObject2DRenderer()); renderer.reset(new MultiObject2DRenderer());
break; break;
} }
default: default: {
{
LOG(FATAL) << "Renderer type is not known."; LOG(FATAL) << "Renderer type is not known.";
break; break;
} }

View File

@ -1,32 +1,35 @@
#pragma once #pragma once
#include <ze/common/macros.hpp>
#include <esim/common/types.hpp> #include <esim/common/types.hpp>
#include <ze/common/macros.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
//! Represents a rendering engine that generates images (and other outputs, //! Represents a rendering engine that generates images (and other outputs,
//! such as depth maps, or optical flow maps) given a scene and a camera position. //! such as depth maps, or optical flow maps) given a scene and a camera
class Renderer //! position.
{ class Renderer {
public: public:
ZE_POINTER_TYPEDEFS(Renderer); ZE_POINTER_TYPEDEFS(Renderer);
Renderer() {} Renderer() {}
//! Render an image at a given pose. //! Render an image at a given pose.
virtual void render(const Transformation& T_W_C, virtual void render(
const Transformation& T_W_C,
const std::vector<Transformation>& T_W_OBJ, const std::vector<Transformation>& T_W_OBJ,
const ImagePtr& out_image, const ImagePtr& out_image,
const DepthmapPtr& out_depthmap) const = 0; const DepthmapPtr& out_depthmap
) const = 0;
//! Returns true if the rendering engine can compute optic flow, false
//! Returns true if the rendering engine can compute optic flow, false otherwise //! otherwise
virtual bool canComputeOpticFlow() const = 0; virtual bool canComputeOpticFlow() const = 0;
//! Render an image + depth map + optic flow map at a given pose, //! Render an image + depth map + optic flow map at a given pose,
//! given the camera linear and angular velocity //! given the camera linear and angular velocity
virtual void renderWithFlow(const Transformation& T_W_C, virtual void renderWithFlow(
const Transformation& T_W_C,
const LinearVelocity& v_WC, const LinearVelocity& v_WC,
const AngularVelocity& w_WC, const AngularVelocity& w_WC,
const std::vector<Transformation>& T_W_OBJ, const std::vector<Transformation>& T_W_OBJ,
@ -34,13 +37,16 @@ public:
const std::vector<AngularVelocity>& angular_velocity_obj, const std::vector<AngularVelocity>& angular_velocity_obj,
const ImagePtr& out_image, const ImagePtr& out_image,
const DepthmapPtr& out_depthmap, const DepthmapPtr& out_depthmap,
const OpticFlowPtr& optic_flow_map) const {} const OpticFlowPtr& optic_flow_map
) const {}
//! Sets the camera //! Sets the camera
virtual void setCamera(const ze::Camera::Ptr& camera) = 0; virtual void setCamera(const ze::Camera::Ptr& camera) = 0;
//! Get the camera rig //! Get the camera rig
const ze::Camera::Ptr& getCamera() const { return camera_; } const ze::Camera::Ptr& getCamera() const {
return camera_;
}
protected: protected:
ze::Camera::Ptr camera_; ze::Camera::Ptr camera_;

View File

@ -1,24 +1,27 @@
#pragma once #pragma once
#include <ze/common/macros.hpp>
#include <esim/common/types.hpp> #include <esim/common/types.hpp>
#include <ze/common/macros.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
//! Represents a rendering engine that generates images + optic flow maps //! Represents a rendering engine that generates images + optic flow maps
//! The rendering engine takes care of managing the environment and camera trajectory in the environment //! The rendering engine takes care of managing the environment and camera
class SimpleRenderer //! trajectory in the environment
{ class SimpleRenderer {
public: public:
ZE_POINTER_TYPEDEFS(SimpleRenderer); ZE_POINTER_TYPEDEFS(SimpleRenderer);
SimpleRenderer() {} SimpleRenderer() {}
//! Render an image + optic flow map at a given time t. //! Render an image + optic flow map at a given time t.
//! The rendering engine takes care of generating the camera trajectory, etc. //! The rendering engine takes care of generating the camera trajectory,
virtual bool render(const Time t, //! etc.
virtual bool render(
const Time t,
const ImagePtr& out_image, const ImagePtr& out_image,
const OpticFlowPtr& optic_flow_map) const = 0; const OpticFlowPtr& optic_flow_map
) const = 0;
//! Get the height of the image plane //! Get the height of the image plane
virtual int getWidth() const = 0; virtual int getWidth() const = 0;

View File

@ -1,5 +1,3 @@
#include <esim/rendering/renderer_base.hpp> #include <esim/rendering/renderer_base.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {} // namespace event_camera_simulator
} // namespace event_camera_simulator

View File

@ -1,5 +1,3 @@
#include <esim/rendering/simple_renderer_base.hpp> #include <esim/rendering/simple_renderer_base.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {} // namespace event_camera_simulator
} // namespace event_camera_simulator

View File

@ -1,11 +1,12 @@
#pragma once #pragma once
#include <gflags/gflags.h> #include <gflags/gflags.h>
#include <ze/vi_simulation/trajectory_simulator.hpp>
#include <ze/vi_simulation/imu_simulator.hpp> #include <ze/vi_simulation/imu_simulator.hpp>
#include <ze/vi_simulation/trajectory_simulator.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
ze::ImuSimulator::Ptr loadImuSimulatorFromGflags(const ze::TrajectorySimulator::Ptr &trajectory); ze::ImuSimulator::Ptr
loadImuSimulatorFromGflags(const ze::TrajectorySimulator::Ptr& trajectory);
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -5,6 +5,9 @@
namespace event_camera_simulator { namespace event_camera_simulator {
std::tuple<ze::TrajectorySimulator::Ptr, std::vector<ze::TrajectorySimulator::Ptr>> loadTrajectorySimulatorFromGflags(); std::tuple<
ze::TrajectorySimulator::Ptr,
std::vector<ze::TrajectorySimulator::Ptr>>
loadTrajectorySimulatorFromGflags();
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -1,10 +1,10 @@
#include <ze/common/logging.hpp>
#include <esim/trajectory/imu_factory.hpp> #include <esim/trajectory/imu_factory.hpp>
#include <ze/common/logging.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
ze::ImuSimulator::Ptr loadImuSimulatorFromGflags(const ze::TrajectorySimulator::Ptr& trajectory) ze::ImuSimulator::Ptr
{ loadImuSimulatorFromGflags(const ze::TrajectorySimulator::Ptr& trajectory) {
ze::ImuSimulator::Ptr imu; ze::ImuSimulator::Ptr imu;
const ze::real_t gyr_bias_noise_sigma = 0.0000266; const ze::real_t gyr_bias_noise_sigma = 0.0000266;
@ -15,20 +15,20 @@ ze::ImuSimulator::Ptr loadImuSimulatorFromGflags(const ze::TrajectorySimulator::
const ze::real_t gravity_magnitude = 9.81; const ze::real_t gravity_magnitude = 9.81;
ze::ImuBiasSimulator::Ptr bias; ze::ImuBiasSimulator::Ptr bias;
try try {
{
VLOG(1) << "Initialize bias ..."; VLOG(1) << "Initialize bias ...";
bias = std::make_shared<ze::ContinuousBiasSimulator>( bias = std::make_shared<ze::ContinuousBiasSimulator>(
ze::Vector3::Constant(gyr_bias_noise_sigma), ze::Vector3::Constant(gyr_bias_noise_sigma),
ze::Vector3::Constant(acc_bias_noise_sigma), ze::Vector3::Constant(acc_bias_noise_sigma),
trajectory->start(), trajectory->start(),
trajectory->end(), trajectory->end(),
100); // Results in malloc: (trajectory->end() - trajectory->start()) * imu_bandwidth_hz); 100
); // Results in malloc: (trajectory->end() - trajectory->start()) *
// imu_bandwidth_hz);
VLOG(1) << "done."; VLOG(1) << "done.";
} } catch (const std::bad_alloc& e) {
catch (const std::bad_alloc& e) LOG(FATAL
{ ) << "Could not create bias because number of samples is too high."
LOG(FATAL) << "Could not create bias because number of samples is too high."
<< " Allocation failed: " << e.what(); << " Allocation failed: " << e.what();
} }
@ -36,15 +36,19 @@ ze::ImuSimulator::Ptr loadImuSimulatorFromGflags(const ze::TrajectorySimulator::
imu = std::make_shared<ze::ImuSimulator>( imu = std::make_shared<ze::ImuSimulator>(
trajectory, trajectory,
bias, bias,
ze::RandomVectorSampler<3>::sigmas(ze::Vector3::Constant(acc_noise_sigma)), ze::RandomVectorSampler<3>::sigmas(
ze::RandomVectorSampler<3>::sigmas(ze::Vector3::Constant(gyr_noise_sigma)), ze::Vector3::Constant(acc_noise_sigma)
),
ze::RandomVectorSampler<3>::sigmas(
ze::Vector3::Constant(gyr_noise_sigma)
),
imu_bandwidth_hz, imu_bandwidth_hz,
imu_bandwidth_hz, imu_bandwidth_hz,
gravity_magnitude); gravity_magnitude
);
VLOG(1) << "done."; VLOG(1) << "done.";
return imu; return imu;
} }
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -2,87 +2,120 @@
#include <ze/common/csv_trajectory.hpp> #include <ze/common/csv_trajectory.hpp>
#include <ze/common/logging.hpp> #include <ze/common/logging.hpp>
DEFINE_int32(trajectory_type, 0, " 0: Random spline trajectory, 1: Load trajectory from CSV file"); DEFINE_int32(
trajectory_type,
0,
" 0: Random spline trajectory, 1: Load trajectory from CSV file"
);
DEFINE_double(trajectory_length_s, 100.0, DEFINE_double(
trajectory_length_s,
100.0,
"Length of the trajectory, in seconds" "Length of the trajectory, in seconds"
"(used when the trajectory type is random spline)"); "(used when the trajectory type is random spline)"
);
DEFINE_int32(trajectory_spline_order, 5, DEFINE_int32(trajectory_spline_order, 5, "Spline order of the trajectory");
"Spline order of the trajectory");
DEFINE_double(trajectory_sampling_frequency_hz, 5, DEFINE_double(
trajectory_sampling_frequency_hz,
5,
"Sampling frequency of the spline trajectory, i.e." "Sampling frequency of the spline trajectory, i.e."
"number of random poses generated per second along the trajectory"); "number of random poses generated per second along the trajectory"
);
DEFINE_int32(trajectory_num_spline_segments, 100, DEFINE_int32(
"Number of spline segments used for the trajectory"); trajectory_num_spline_segments,
100,
"Number of spline segments used for the trajectory"
);
DEFINE_double(trajectory_lambda, 0.1, DEFINE_double(
trajectory_lambda,
0.1,
"Smoothing factor for the spline trajectories." "Smoothing factor for the spline trajectories."
"Low value = less smooth, high value = more smooth"); "Low value = less smooth, high value = more smooth"
);
DEFINE_double(trajectory_multiplier_x, 1.0, DEFINE_double(
"Scaling factor for the X camera axis"); trajectory_multiplier_x, 1.0, "Scaling factor for the X camera axis"
);
DEFINE_double(trajectory_multiplier_y, 1.0, DEFINE_double(
"Scaling factor for the y camera axis"); trajectory_multiplier_y, 1.0, "Scaling factor for the y camera axis"
);
DEFINE_double(trajectory_multiplier_z, 1.0, DEFINE_double(
"Scaling factor for the z camera axis"); trajectory_multiplier_z, 1.0, "Scaling factor for the z camera axis"
);
DEFINE_double(trajectory_multiplier_wx, 1.0, DEFINE_double(
"Scaling factor for the x orientation axis"); trajectory_multiplier_wx, 1.0, "Scaling factor for the x orientation axis"
);
DEFINE_double(trajectory_multiplier_wy, 1.0, DEFINE_double(
"Scaling factor for the y orientation axis"); trajectory_multiplier_wy, 1.0, "Scaling factor for the y orientation axis"
);
DEFINE_double(trajectory_multiplier_wz, 1.0, DEFINE_double(
"Scaling factor for the z orientation axis"); trajectory_multiplier_wz, 1.0, "Scaling factor for the z orientation axis"
);
DEFINE_double(trajectory_offset_x, 0.0, DEFINE_double(trajectory_offset_x, 0.0, "Offset for the X camera axis");
"Offset for the X camera axis");
DEFINE_double(trajectory_offset_y, 0.0, DEFINE_double(trajectory_offset_y, 0.0, "Offset for the y camera axis");
"Offset for the y camera axis");
DEFINE_double(trajectory_offset_z, 0.0, DEFINE_double(trajectory_offset_z, 0.0, "Offset for the z camera axis");
"Offset for the z camera axis");
DEFINE_double(trajectory_offset_wx, 0.0, DEFINE_double(trajectory_offset_wx, 0.0, "Offset for the x orientation axis");
"Offset for the x orientation axis");
DEFINE_double(trajectory_offset_wy, 0.0, DEFINE_double(trajectory_offset_wy, 0.0, "Offset for the y orientation axis");
"Offset for the y orientation axis");
DEFINE_double(trajectory_offset_wz, 0.0, DEFINE_double(trajectory_offset_wz, 0.0, "Offset for the z orientation axis");
"Offset for the z orientation axis");
DEFINE_string(trajectory_csv_file, "", DEFINE_string(trajectory_csv_file, "", "CSV file containing the trajectory");
"CSV file containing the trajectory");
DEFINE_int32(trajectory_dynamic_objects_type, 1, " 0: Random spline trajectory, 1: Load trajectory from CSV file"); DEFINE_int32(
trajectory_dynamic_objects_type,
1,
" 0: Random spline trajectory, 1: Load trajectory from CSV file"
);
DEFINE_int32(trajectory_dynamic_objects_spline_order, 5, DEFINE_int32(
"Spline order of the trajectory"); trajectory_dynamic_objects_spline_order, 5, "Spline order of the trajectory"
);
DEFINE_int32(trajectory_dynamic_objects_num_spline_segments, 100, DEFINE_int32(
"Number of spline segments used for the trajectory"); trajectory_dynamic_objects_num_spline_segments,
100,
"Number of spline segments used for the trajectory"
);
DEFINE_double(trajectory_dynamic_objects_lambda, 0.1, DEFINE_double(
trajectory_dynamic_objects_lambda,
0.1,
"Smoothing factor for the spline trajectories." "Smoothing factor for the spline trajectories."
"Low value = less smooth, high value = more smooth"); "Low value = less smooth, high value = more smooth"
);
DEFINE_string(trajectory_dynamic_objects_csv_dir, "", DEFINE_string(
"Directory containing CSV file of trajectory for dynamic objects"); trajectory_dynamic_objects_csv_dir,
"",
"Directory containing CSV file of trajectory for dynamic objects"
);
DEFINE_string(trajectory_dynamic_objects_csv_file, "", DEFINE_string(
"CSV file containing the trajectory"); trajectory_dynamic_objects_csv_file,
"",
"CSV file containing the trajectory"
);
namespace event_camera_simulator { namespace event_camera_simulator {
std::tuple<ze::TrajectorySimulator::Ptr, std::vector<ze::TrajectorySimulator::Ptr>> loadTrajectorySimulatorFromGflags() std::tuple<
{ ze::TrajectorySimulator::Ptr,
std::vector<ze::TrajectorySimulator::Ptr>>
loadTrajectorySimulatorFromGflags() {
ze::TrajectorySimulator::Ptr trajectory; ze::TrajectorySimulator::Ptr trajectory;
std::vector<ze::TrajectorySimulator::Ptr> trajectories_dynamic_objects; std::vector<ze::TrajectorySimulator::Ptr> trajectories_dynamic_objects;
@ -90,19 +123,15 @@ std::tuple<ze::TrajectorySimulator::Ptr, std::vector<ze::TrajectorySimulator::Pt
size_t p_start, p_end; size_t p_start, p_end;
p_start = 0; p_start = 0;
while(1) while (1) {
{
int trajectory_type; int trajectory_type;
if (dynamic_object) if (dynamic_object) {
{
trajectory_type = FLAGS_trajectory_dynamic_objects_type; trajectory_type = FLAGS_trajectory_dynamic_objects_type;
if (trajectory_type != 1) if (trajectory_type != 1) {
{ LOG(FATAL) << "Only supporting trajectories of type 1 for "
LOG(FATAL) << "Only supporting trajectories of type 1 for dynamic objects!"; "dynamic objects!";
} }
} } else {
else
{
trajectory_type = FLAGS_trajectory_type; trajectory_type = FLAGS_trajectory_type;
} }
@ -110,35 +139,43 @@ std::tuple<ze::TrajectorySimulator::Ptr, std::vector<ze::TrajectorySimulator::Pt
std::string csv_path; std::string csv_path;
bool should_break = false; bool should_break = false;
if (dynamic_object) if (dynamic_object) {
{ if ((p_end = FLAGS_trajectory_dynamic_objects_csv_file
if ((p_end = FLAGS_trajectory_dynamic_objects_csv_file.find(";",p_start)) != std::string::npos) .find(";", p_start))
{ != std::string::npos) {
csv_path = FLAGS_trajectory_dynamic_objects_csv_dir + "/" + FLAGS_trajectory_dynamic_objects_csv_file.substr(p_start, p_end - p_start); csv_path = FLAGS_trajectory_dynamic_objects_csv_dir + "/"
+ FLAGS_trajectory_dynamic_objects_csv_file.substr(
p_start,
p_end - p_start
);
p_start = p_end + 1; p_start = p_end + 1;
} } else {
else csv_path = FLAGS_trajectory_dynamic_objects_csv_dir + "/"
{ + FLAGS_trajectory_dynamic_objects_csv_file.substr(
csv_path = FLAGS_trajectory_dynamic_objects_csv_dir + "/" + FLAGS_trajectory_dynamic_objects_csv_file.substr(p_start, p_end - p_start); p_start,
p_end - p_start
);
should_break = true; should_break = true;
} }
} } else {
else
{
csv_path = FLAGS_trajectory_csv_file; csv_path = FLAGS_trajectory_csv_file;
} }
switch (trajectory_type) switch (trajectory_type) {
{
case 0: // Random spline case 0: // Random spline
{ {
std::shared_ptr<ze::BSplinePoseMinimalRotationVector> pbs = std::shared_ptr<ze::BSplinePoseMinimalRotationVector> pbs =
std::make_shared<ze::BSplinePoseMinimalRotationVector>(FLAGS_trajectory_spline_order); std::make_shared<ze::BSplinePoseMinimalRotationVector>(
FLAGS_trajectory_spline_order
);
ze::MatrixX poses; ze::MatrixX poses;
ze::VectorX times = ze::VectorX::LinSpaced(FLAGS_trajectory_sampling_frequency_hz * FLAGS_trajectory_length_s, ze::VectorX times = ze::VectorX::LinSpaced(
FLAGS_trajectory_sampling_frequency_hz
* FLAGS_trajectory_length_s,
0, 0,
FLAGS_trajectory_length_s); FLAGS_trajectory_length_s
);
poses.resize(6, times.size()); poses.resize(6, times.size());
poses.setRandom(); poses.setRandom();
poses.row(0) *= FLAGS_trajectory_multiplier_x; poses.row(0) *= FLAGS_trajectory_multiplier_x;
@ -153,7 +190,12 @@ std::tuple<ze::TrajectorySimulator::Ptr, std::vector<ze::TrajectorySimulator::Pt
poses.row(3).array() += FLAGS_trajectory_offset_wx; poses.row(3).array() += FLAGS_trajectory_offset_wx;
poses.row(4).array() += FLAGS_trajectory_offset_wy; poses.row(4).array() += FLAGS_trajectory_offset_wy;
poses.row(5).array() += FLAGS_trajectory_offset_wz; poses.row(5).array() += FLAGS_trajectory_offset_wz;
pbs->initPoseSpline3(times, poses, FLAGS_trajectory_num_spline_segments, FLAGS_trajectory_lambda); pbs->initPoseSpline3(
times,
poses,
FLAGS_trajectory_num_spline_segments,
FLAGS_trajectory_lambda
);
trajectory.reset(new ze::SplineTrajectorySimulator(pbs)); trajectory.reset(new ze::SplineTrajectorySimulator(pbs));
break; break;
} }
@ -165,70 +207,65 @@ std::tuple<ze::TrajectorySimulator::Ptr, std::vector<ze::TrajectorySimulator::Pt
LOG(INFO) << "Reading trajectory from CSV file: " << csv_path; LOG(INFO) << "Reading trajectory from CSV file: " << csv_path;
pose_series.load(csv_path); pose_series.load(csv_path);
ze::StampedTransformationVector poses = pose_series.getStampedTransformationVector(); ze::StampedTransformationVector poses =
pose_series.getStampedTransformationVector();
// Set start time of trajectory to zero. // Set start time of trajectory to zero.
const int64_t offset = poses.at(0).first; const int64_t offset = poses.at(0).first;
for (ze::StampedTransformation& it : poses) for (ze::StampedTransformation& it : poses)
{
it.first -= offset; it.first -= offset;
}
LOG(INFO) << "Initializing spline from trajectory (this may take some sime)..."; LOG(INFO) << "Initializing spline from trajectory (this may "
"take some sime)...";
int spline_order, num_spline_segments; int spline_order, num_spline_segments;
double lambda; double lambda;
if (dynamic_object) if (dynamic_object) {
{ spline_order =
spline_order = FLAGS_trajectory_dynamic_objects_spline_order; FLAGS_trajectory_dynamic_objects_spline_order;
num_spline_segments = FLAGS_trajectory_dynamic_objects_num_spline_segments; num_spline_segments =
FLAGS_trajectory_dynamic_objects_num_spline_segments;
lambda = FLAGS_trajectory_dynamic_objects_lambda; lambda = FLAGS_trajectory_dynamic_objects_lambda;
} } else {
else
{
spline_order = FLAGS_trajectory_spline_order; spline_order = FLAGS_trajectory_spline_order;
num_spline_segments = FLAGS_trajectory_num_spline_segments; num_spline_segments = FLAGS_trajectory_num_spline_segments;
lambda = FLAGS_trajectory_lambda; lambda = FLAGS_trajectory_lambda;
} }
std::shared_ptr<ze::BSplinePoseMinimalRotationVector> bs = std::shared_ptr<ze::BSplinePoseMinimalRotationVector> bs =
std::make_shared<ze::BSplinePoseMinimalRotationVector>(spline_order); std::make_shared<ze::BSplinePoseMinimalRotationVector>(
spline_order
);
bs->initPoseSplinePoses(poses, num_spline_segments, lambda); bs->initPoseSplinePoses(poses, num_spline_segments, lambda);
if (dynamic_object) if (dynamic_object) {
{ trajectories_dynamic_objects.push_back(
trajectories_dynamic_objects.push_back(std::make_shared<ze::SplineTrajectorySimulator>(bs)); std::make_shared<ze::SplineTrajectorySimulator>(bs)
} );
else } else {
{ trajectory =
trajectory = std::make_shared<ze::SplineTrajectorySimulator>(bs); std::make_shared<ze::SplineTrajectorySimulator>(bs);
} }
LOG(INFO) << "Done!"; LOG(INFO) << "Done!";
break; break;
} }
default: default: {
{
LOG(FATAL) << "Trajectory type is not known."; LOG(FATAL) << "Trajectory type is not known.";
break; break;
} }
} }
if (!dynamic_object) if (!dynamic_object) {
{ if (!FLAGS_trajectory_dynamic_objects_csv_dir.empty()
if (!FLAGS_trajectory_dynamic_objects_csv_dir.empty() && !FLAGS_trajectory_dynamic_objects_csv_file.empty()) && !FLAGS_trajectory_dynamic_objects_csv_file.empty()) {
{
dynamic_object = true; dynamic_object = true;
} } else {
else
{
break; break;
} }
} }
if (should_break) if (should_break)
{
break; break;
} }
}
return std::make_tuple(trajectory, trajectories_dynamic_objects); return std::make_tuple(trajectory, trajectories_dynamic_objects);
} }

View File

@ -1,20 +1,16 @@
#pragma once #pragma once
#include <boost/algorithm/string/predicate.hpp>
#include <boost/asio.hpp> #include <boost/asio.hpp>
#include <boost/format.hpp> #include <boost/format.hpp>
#include <boost/algorithm/string/predicate.hpp> #include <chrono>
#include <string>
#include <functional> #include <functional>
#include <iostream> #include <iostream>
#include <istream> #include <istream>
#include <ostream>
#include <chrono>
#include <thread>
#include <opencv2/core/core.hpp> #include <opencv2/core/core.hpp>
#include <ostream>
#include <string>
#include <thread>
namespace event_camera_simulator { namespace event_camera_simulator {
@ -30,7 +26,6 @@ struct CameraData {
double_t z; double_t z;
}; };
class UnrealCvClient { class UnrealCvClient {
public: public:
UnrealCvClient(std::string host, std::string port); UnrealCvClient(std::string host, std::string port);
@ -44,11 +39,13 @@ public:
void setCameraSize(int width, int height); void setCameraSize(int width, int height);
protected: protected:
void sendCommand(std::string command); void sendCommand(std::string command);
template <typename Result> template <typename Result>
Result sendCommand(std::string command, std::function<Result(std::istream&, uint32_t)> mapf); Result sendCommand(
std::string command,
std::function<Result(std::istream&, uint32_t)> mapf
);
void send(std::string msg, uint32_t counter); void send(std::string msg, uint32_t counter);
@ -67,11 +64,12 @@ private:
void handleError(boost::system::error_code ec); void handleError(boost::system::error_code ec);
std::string istreamToString(std::istream& stream, uint32_t size); std::string istreamToString(std::istream& stream, uint32_t size);
void parse_npy_header(unsigned char* buffer, void parse_npy_header(
unsigned char* buffer,
size_t& word_size, size_t& word_size,
std::vector<size_t>& shape, std::vector<size_t>& shape,
bool& fortran_order); bool& fortran_order
);
}; };
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -1,26 +1,21 @@
#include <esim/unrealcv_bridge/unrealcv_bridge.hpp>
#include <opencv2/imgcodecs/imgcodecs.hpp>
#include <glog/logging.h>
#include <cmath> #include <cmath>
#include <esim/unrealcv_bridge/unrealcv_bridge.hpp>
#include <glog/logging.h>
#include <opencv2/imgcodecs/imgcodecs.hpp>
#include <regex> #include <regex>
namespace event_camera_simulator { namespace event_camera_simulator {
using boost::asio::ip::tcp; using boost::asio::ip::tcp;
// from: https://www.boost.org/doc/libs/1_47_0/doc/html/boost_asio/reference/connect/overload6.html // from:
struct unrealcv_server_connect_condition // https://www.boost.org/doc/libs/1_47_0/doc/html/boost_asio/reference/connect/overload6.html
{ struct unrealcv_server_connect_condition {
template <typename Iterator> template <typename Iterator>
Iterator operator()( Iterator
const boost::system::error_code& ec, operator()(const boost::system::error_code& ec, Iterator next) {
Iterator next)
{
if (ec) if (ec)
{
LOG(ERROR) << ec.message(); LOG(ERROR) << ec.message();
}
LOG(INFO) << "Trying: " << next->endpoint(); LOG(INFO) << "Trying: " << next->endpoint();
return next; return next;
} }
@ -31,24 +26,29 @@ UnrealCvClient::UnrealCvClient(std::string host, std::string port)
socket_(io_service_), socket_(io_service_),
counter_(0), counter_(0),
delay_(30) { delay_(30) {
tcp::resolver resolver(io_service_); tcp::resolver resolver(io_service_);
tcp::resolver::query query(host, port); tcp::resolver::query query(host, port);
tcp::resolver::iterator endpoint_iterator = resolver.resolve(query); tcp::resolver::iterator endpoint_iterator = resolver.resolve(query);
boost::system::error_code ec; boost::system::error_code ec;
boost::asio::connect(socket_, endpoint_iterator, unrealcv_server_connect_condition(), ec); boost::asio::connect(
if(ec) socket_,
{ endpoint_iterator,
unrealcv_server_connect_condition(),
ec
);
if (ec) {
LOG(FATAL) << "Could not connect to UnrealCV server"; LOG(FATAL) << "Could not connect to UnrealCV server";
return; return;
} }
sleep(500); // long sleep to initiate sleep(500); // long sleep to initiate
// receive the first "we are connected" string // receive the first "we are connected" string
receive<std::string>([this] (std::istream& stream, uint32_t size) -> std::string { receive<std::string>(
[this](std::istream& stream, uint32_t size) -> std::string {
return this->istreamToString(stream, size); return this->istreamToString(stream, size);
}); }
);
sleep(delay_); sleep(delay_);
sendCommand("vrun r.AmbientOcclusionLevels 0"); sendCommand("vrun r.AmbientOcclusionLevels 0");
@ -62,30 +62,34 @@ UnrealCvClient::~UnrealCvClient() {
socket_.close(); socket_.close();
} }
void UnrealCvClient::saveImage(uint32_t camid, std::string path) void UnrealCvClient::saveImage(uint32_t camid, std::string path) {
{ std::string req =
std::string req = (boost::format("vget /camera/%i/lit %s") % camid % path).str(); (boost::format("vget /camera/%i/lit %s") % camid % path).str();
sendCommand(req); sendCommand(req);
} }
cv::Mat UnrealCvClient::getImage(uint32_t camid) cv::Mat UnrealCvClient::getImage(uint32_t camid) {
{ std::string req =
std::string req = (boost::format("vget /camera/%i/lit png") % camid).str(); (boost::format("vget /camera/%i/lit png") % camid).str();
return sendCommand<cv::Mat>(req, [](std::istream& stream, uint32_t size){ return sendCommand<cv::Mat>(
req,
[](std::istream& stream, uint32_t size) {
std::vector<char> data(size); std::vector<char> data(size);
stream.read(data.data(), size); stream.read(data.data(), size);
cv::Mat matrixPng = cv::imdecode(cv::Mat(data), 1); cv::Mat matrixPng = cv::imdecode(cv::Mat(data), 1);
return matrixPng.clone(); return matrixPng.clone();
}); }
);
} }
cv::Mat UnrealCvClient::getDepth(uint32_t camid) cv::Mat UnrealCvClient::getDepth(uint32_t camid) {
{ std::string req =
std::string req = (boost::format("vget /camera/%i/depth npy") % camid).str(); (boost::format("vget /camera/%i/depth npy") % camid).str();
return sendCommand<cv::Mat>(req, [this](std::istream& stream, uint32_t size){
return sendCommand<cv::Mat>(
req,
[this](std::istream& stream, uint32_t size) {
std::vector<char> data(size); std::vector<char> data(size);
stream.read(data.data(), size); stream.read(data.data(), size);
unsigned char* buffer = (unsigned char*) data.data(); unsigned char* buffer = (unsigned char*) data.data();
@ -93,7 +97,8 @@ cv::Mat UnrealCvClient::getDepth(uint32_t camid)
/* /*
* Gather data from the header * Gather data from the header
*/ */
std::vector<size_t> img_dims; //if appending, the shape of existing + new data std::vector<size_t>
img_dims; // if appending, the shape of existing + new data
size_t word_size; size_t word_size;
bool fortran_order; bool fortran_order;
parse_npy_header(buffer, word_size, img_dims, fortran_order); parse_npy_header(buffer, word_size, img_dims, fortran_order);
@ -102,8 +107,12 @@ cv::Mat UnrealCvClient::getDepth(uint32_t camid)
std::string npy_magic(reinterpret_cast<char*>(buffer), 6); std::string npy_magic(reinterpret_cast<char*>(buffer), 6);
uint8_t major_version = *reinterpret_cast<uint8_t*>(buffer + 6); uint8_t major_version = *reinterpret_cast<uint8_t*>(buffer + 6);
uint8_t minor_version = *reinterpret_cast<uint8_t*>(buffer + 7); uint8_t minor_version = *reinterpret_cast<uint8_t*>(buffer + 7);
uint16_t header_str_len = *reinterpret_cast<uint16_t*>(buffer+8); uint16_t header_str_len =
std::string header(reinterpret_cast<char*>(buffer+9),header_str_len); *reinterpret_cast<uint16_t*>(buffer + 8);
std::string header(
reinterpret_cast<char*>(buffer + 9),
header_str_len
);
uint16_t header_total_len = 10 + header_str_len; uint16_t header_total_len = 10 + header_str_len;
uint32_t data_length = data.size() - header_total_len; uint32_t data_length = data.size() - header_total_len;
@ -112,66 +121,67 @@ cv::Mat UnrealCvClient::getDepth(uint32_t camid)
/* /*
* Ensure that the data is okay * Ensure that the data is okay
*/ */
if(!(major_version == 1 && if (!(major_version == 1 && minor_version == 0
minor_version == 0 && && npy_magic.find("NUMPY") != std::string::npos)) {
npy_magic.find("NUMPY") != std::string::npos)){
throw std::runtime_error("Npy header data not supported"); throw std::runtime_error("Npy header data not supported");
} }
if (!(data_length == (num_pixel * sizeof(float_t)))) { if (!(data_length == (num_pixel * sizeof(float_t)))) {
throw std::runtime_error("Npy array data shape does not match the image size"); throw std::runtime_error(
"Npy array data shape does not match the image size"
);
} }
/* /*
* Read and convert the data * Read and convert the data
*/ */
cv::Mat matrix_f32 = cv::Mat(img_dims.at(0), img_dims.at(1), cv::Mat matrix_f32 = cv::Mat(
CV_32F, buffer + header_total_len).clone(); img_dims.at(0),
img_dims.at(1),
CV_32F,
buffer + header_total_len
)
.clone();
return matrix_f32; return matrix_f32;
}
}); );
} }
void UnrealCvClient::setCamera(const CameraData& camera) {
void UnrealCvClient::setCamera(const CameraData & camera) std::string cam_pose_s =
{ (boost::format("vset /camera/%i/pose %.5f %.5f %.5f %.5f %.5f %.5f")
std::string cam_pose_s = (boost::format("vset /camera/%i/pose %.5f %.5f %.5f %.5f %.5f %.5f") % % camera.id % camera.x % camera.y % camera.z % camera.pitch
camera.id % % camera.yaw % camera.roll)
camera.x % .str();
camera.y %
camera.z %
camera.pitch %
camera.yaw %
camera.roll).str();
sendCommand(cam_pose_s); sendCommand(cam_pose_s);
} }
void UnrealCvClient::setCameraSize(int width, int height) void UnrealCvClient::setCameraSize(int width, int height) {
{
VLOG(1) << "Setting the camera size to: " << width << "x" << height; VLOG(1) << "Setting the camera size to: " << width << "x" << height;
std::string req_size = (boost::format("vrun r.setres %dx%d") % std::string req_size =
width % (boost::format("vrun r.setres %dx%d") % width % height).str();
height).str();
sendCommand(req_size); sendCommand(req_size);
} }
void UnrealCvClient::setCameraFOV(float hfov_deg) void UnrealCvClient::setCameraFOV(float hfov_deg) {
{ VLOG(1) << "Setting the camera horizontal field of view to: "
VLOG(1) << "Setting the camera horizontal field of view to: " << hfov_deg << " deg"; << hfov_deg << " deg";
const int cam_id = 0; const int cam_id = 0;
std::string req_fov = (boost::format("vset /camera/%i/horizontal_fieldofview %.5f") % std::string req_fov =
cam_id % (boost::format("vset /camera/%i/horizontal_fieldofview %.5f")
hfov_deg).str(); % cam_id % hfov_deg)
.str();
sendCommand(req_fov); sendCommand(req_fov);
} }
void UnrealCvClient::sendCommand(std::string command) void UnrealCvClient::sendCommand(std::string command) {
{ if (!(boost::starts_with(command, "vset")
if (!(boost::starts_with(command, "vset") || boost::starts_with(command, "vrun"))) { || boost::starts_with(command, "vrun"))) {
throw std::runtime_error( throw std::runtime_error(
"invalid command: command must start with vget or (vset, vrun)"); "invalid command: command must start with vget or (vset, vrun)"
);
} }
uint32_t tmpc = counter_++; uint32_t tmpc = counter_++;
@ -189,25 +199,27 @@ void UnrealCvClient::sendCommand(std::string command)
std::string result = receive<std::string>( std::string result = receive<std::string>(
[this](std::istream& stream, uint32_t size) -> std::string { [this](std::istream& stream, uint32_t size) -> std::string {
return this->istreamToString(stream, size); return this->istreamToString(stream, size);
}); }
);
if (!boost::starts_with(result, result_prefix + "ok")) { if (!boost::starts_with(result, result_prefix + "ok")) {
throw std::runtime_error("response identifier is incorrect"); throw std::runtime_error("response identifier is incorrect");
} else { } else {
VLOG(1) << "GET:" << tmpc << " " << "ok"; VLOG(1) << "GET:" << tmpc << " "
<< "ok";
} }
sleep(delay_); sleep(delay_);
} }
template <typename Result> template <typename Result>
Result UnrealCvClient::sendCommand(std::string command, std::function<Result(std::istream&, uint32_t)> mapf) Result UnrealCvClient::sendCommand(
{ std::string command, std::function<Result(std::istream&, uint32_t)> mapf
if (!(boost::starts_with(command, "vget"))) ) {
{ if (!(boost::starts_with(command, "vget"))) {
throw std::runtime_error( throw std::runtime_error(
"invalid command: command must start with vget or (vset, vrun)"); "invalid command: command must start with vget or (vset, vrun)"
);
} }
uint32_t tmpc = counter_++; uint32_t tmpc = counter_++;
@ -223,24 +235,26 @@ Result UnrealCvClient::sendCommand(std::string command, std::function<Result(std
*/ */
Result result = receive<Result>( Result result = receive<Result>(
[this, result_prefix, mapf] (std::istream& stream, uint32_t size) -> Result { [this,
result_prefix,
std::string prefix = istreamToString(stream, result_prefix.size()); mapf](std::istream& stream, uint32_t size) -> Result {
std::string prefix =
istreamToString(stream, result_prefix.size());
size -= result_prefix.size(); size -= result_prefix.size();
if(!boost::starts_with(prefix, result_prefix)) { if (!boost::starts_with(prefix, result_prefix))
throw std::runtime_error("response identifier is incorrect"); throw std::runtime_error("response identifier is incorrect"
} );
return mapf(stream, size); return mapf(stream, size);
}); }
);
sleep(delay_); sleep(delay_);
return result; return result;
} }
void UnrealCvClient::send(std::string msg, uint32_t counter) void UnrealCvClient::send(std::string msg, uint32_t counter) {
{
std::string out = std::to_string(counter) + ":" + msg; std::string out = std::to_string(counter) + ":" + msg;
uint32_t magic = 0x9E2B83C1; uint32_t magic = 0x9E2B83C1;
@ -255,21 +269,30 @@ void UnrealCvClient::send(std::string msg, uint32_t counter)
request_stream << out; request_stream << out;
// Send the request. // Send the request.
boost::asio::write(socket_, boost::asio::write(
socket_,
request, request,
boost::asio::transfer_exactly(request.size() + sizeof(magic) + sizeof(size)), boost::asio::transfer_exactly(
ec); request.size() + sizeof(magic) + sizeof(size)
),
ec
);
} }
template <typename Result> template <typename Result>
Result UnrealCvClient::receive(std::function<Result(std::istream&, uint32_t)> parser) Result UnrealCvClient::receive(
{ std::function<Result(std::istream&, uint32_t)> parser
) {
boost::system::error_code ec; boost::system::error_code ec;
boost::asio::streambuf response; boost::asio::streambuf response;
// first read the 8 byte header // first read the 8 byte header
boost::asio::read(socket_, response, boost::asio::transfer_exactly(8), ec); boost::asio::read(
socket_,
response,
boost::asio::transfer_exactly(8),
ec
);
handleError(ec); handleError(ec);
uint32_t magic; uint32_t magic;
@ -280,32 +303,31 @@ Result UnrealCvClient::receive(std::function<Result(std::istream&, uint32_t)> p
response_stream.read((char*) &magic, sizeof(magic)); response_stream.read((char*) &magic, sizeof(magic));
response_stream.read((char*) &size, sizeof(size)); response_stream.read((char*) &size, sizeof(size));
boost::asio::read(
boost::asio::read(socket_, response, boost::asio::transfer_exactly(size), ec); socket_,
response,
boost::asio::transfer_exactly(size),
ec
);
handleError(ec); handleError(ec);
Result res = parser(response_stream, size); Result res = parser(response_stream, size);
return res; return res;
} }
void UnrealCvClient::handleError(boost::system::error_code ec) void UnrealCvClient::handleError(boost::system::error_code ec) {
{ if (ec == boost::asio::error::eof)
if (ec == boost::asio::error::eof) {
throw boost::system::system_error(ec); // Some other error. throw boost::system::system_error(ec); // Some other error.
} else if (ec) { else if (ec)
throw boost::system::system_error(ec); // Some other error. throw boost::system::system_error(ec); // Some other error.
} }
}
void UnrealCvClient::sleep(uint32_t delay) { void UnrealCvClient::sleep(uint32_t delay) {
std::this_thread::sleep_for(std::chrono::milliseconds(delay)); std::this_thread::sleep_for(std::chrono::milliseconds(delay));
} }
std::string
std::string UnrealCvClient::istreamToString( UnrealCvClient::istreamToString(std::istream& stream, uint32_t size) {
std::istream& stream, uint32_t size)
{
char buffer[size]; char buffer[size];
stream.read(buffer, size); stream.read(buffer, size);
@ -316,12 +338,12 @@ std::string UnrealCvClient::istreamToString(
} }
// from cnpy: https://github.com/rogersce/cnpy/blob/master/cnpy.cpp // from cnpy: https://github.com/rogersce/cnpy/blob/master/cnpy.cpp
void UnrealCvClient::parse_npy_header(unsigned char* buffer, void UnrealCvClient::parse_npy_header(
unsigned char* buffer,
size_t& word_size, size_t& word_size,
std::vector<size_t>& shape, std::vector<size_t>& shape,
bool& fortran_order) bool& fortran_order
{ ) {
// std::string magic_string(buffer,6); // std::string magic_string(buffer,6);
uint8_t major_version = *reinterpret_cast<uint8_t*>(buffer + 6); uint8_t major_version = *reinterpret_cast<uint8_t*>(buffer + 6);
uint8_t minor_version = *reinterpret_cast<uint8_t*>(buffer + 7); uint8_t minor_version = *reinterpret_cast<uint8_t*>(buffer + 7);
@ -343,8 +365,7 @@ void UnrealCvClient::parse_npy_header(unsigned char* buffer,
shape.clear(); shape.clear();
std::string str_shape = header.substr(loc1 + 1, loc2 - loc1 - 1); std::string str_shape = header.substr(loc1 + 1, loc2 - loc1 - 1);
while(std::regex_search(str_shape, sm, num_regex)) while (std::regex_search(str_shape, sm, num_regex)) {
{
shape.push_back(std::stoi(sm[0].str())); shape.push_back(std::stoi(sm[0].str()));
str_shape = sm.suffix().str(); str_shape = sm.suffix().str();
} }
@ -353,7 +374,8 @@ void UnrealCvClient::parse_npy_header(unsigned char* buffer,
// byte order code | stands for not applicable. // byte order code | stands for not applicable.
// not sure when this applies except for byte array // not sure when this applies except for byte array
loc1 = header.find("descr") + 9; loc1 = header.find("descr") + 9;
bool littleEndian = (header[loc1] == '<' || header[loc1] == '|' ? true : false); bool littleEndian =
(header[loc1] == '<' || header[loc1] == '|' ? true : false);
assert(littleEndian); assert(littleEndian);
// char type = header[loc1+1]; // char type = header[loc1+1];
@ -364,5 +386,4 @@ void UnrealCvClient::parse_npy_header(unsigned char* buffer,
word_size = atoi(str_ws.substr(0, loc2).c_str()); word_size = atoi(str_ws.substr(0, loc2).c_str());
} }
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -10,15 +10,9 @@ int main() {
cv::namedWindow("Image", cv::WINDOW_AUTOSIZE); cv::namedWindow("Image", cv::WINDOW_AUTOSIZE);
cv::namedWindow("Depthmap", cv::WINDOW_AUTOSIZE); cv::namedWindow("Depthmap", cv::WINDOW_AUTOSIZE);
for(double y = 0.0; y<100.0; y+=10.0) for (double y = 0.0; y < 100.0; y += 10.0) {
{ event_camera_simulator::CameraData test =
event_camera_simulator::CameraData test = {0, {0, 0.0, 0.0, 0.0, 0.0, y, 100.0};
0.0,
0.0,
0.0,
0.0,
y,
100.0};
client.setCamera(test); client.setCamera(test);
cv::Mat img = client.getImage(0); cv::Mat img = client.getImage(0);

View File

@ -2,34 +2,58 @@
#include <esim/common/types.hpp> #include <esim/common/types.hpp>
#include <esim/visualization/publisher_interface.hpp> #include <esim/visualization/publisher_interface.hpp>
#include <fstream> #include <fstream>
namespace event_camera_simulator { namespace event_camera_simulator {
class AdaptiveSamplingBenchmarkPublisher : public Publisher class AdaptiveSamplingBenchmarkPublisher : public Publisher {
{
public: public:
using PixelLocation = std::pair<int, int>; using PixelLocation = std::pair<int, int>;
using PixelLocations = std::vector<PixelLocation>; using PixelLocations = std::vector<PixelLocation>;
AdaptiveSamplingBenchmarkPublisher(const std::string &benchmark_folder, AdaptiveSamplingBenchmarkPublisher(
const std::string &pixels_to_record_filename); const std::string& benchmark_folder,
const std::string& pixels_to_record_filename
);
~AdaptiveSamplingBenchmarkPublisher(); ~AdaptiveSamplingBenchmarkPublisher();
virtual void imageCallback(const ImagePtrVector& images, Time t) override; virtual void
imageCallback(const ImagePtrVector& images, Time t) override;
virtual void eventsCallback(const EventsVector& events) override; virtual void eventsCallback(const EventsVector& events) override;
virtual void opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) override; virtual void opticFlowCallback(
const OpticFlowPtrVector& optic_flows, Time t
) override;
virtual void imageCorruptedCallback(const ImagePtrVector& corrupted_images, Time t) override {} virtual void imageCorruptedCallback(
virtual void depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) override {} const ImagePtrVector& corrupted_images, Time t
virtual void poseCallback(const Transformation& T_W_B, const TransformationVector& T_W_Cs, Time t) override {} ) override {}
virtual void twistCallback(const AngularVelocityVector& ws, const LinearVelocityVector& vs, Time t) override {}
virtual void imuCallback(const Vector3& acc, const Vector3& gyr, Time t) override {} virtual void
virtual void cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t) override {} depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) override {}
virtual void pointcloudCallback(const PointCloudVector& pointclouds, Time t) override {}
virtual void poseCallback(
const Transformation& T_W_B,
const TransformationVector& T_W_Cs,
Time t
) override {}
virtual void twistCallback(
const AngularVelocityVector& ws,
const LinearVelocityVector& vs,
Time t
) override {}
virtual void
imuCallback(const Vector3& acc, const Vector3& gyr, Time t) override {}
virtual void cameraInfoCallback(
const ze::CameraRig::Ptr& camera_rig, Time t
) override {}
virtual void pointcloudCallback(
const PointCloudVector& pointclouds, Time t
) override {}
static Publisher::Ptr createFromGflags(); static Publisher::Ptr createFromGflags();

View File

@ -5,8 +5,7 @@
namespace event_camera_simulator { namespace event_camera_simulator {
class Publisher class Publisher {
{
public: public:
ZE_POINTER_TYPEDEFS(Publisher); ZE_POINTER_TYPEDEFS(Publisher);
@ -14,16 +13,39 @@ public:
virtual ~Publisher() = default; virtual ~Publisher() = default;
virtual void imageCallback(const ImagePtrVector& images, Time t) {} virtual void imageCallback(const ImagePtrVector& images, Time t) {}
virtual void imageCorruptedCallback(const ImagePtrVector& corrupted_images, Time t) {}
virtual void depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) {}
virtual void opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) {}
virtual void eventsCallback(const EventsVector& events) {}
virtual void poseCallback(const Transformation& T_W_B, const TransformationVector& T_W_Cs, Time t) {}
virtual void twistCallback(const AngularVelocityVector& ws, const LinearVelocityVector& vs, Time t) {}
virtual void imuCallback(const Vector3& acc, const Vector3& gyr, Time t) {}
virtual void cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t) {}
virtual void pointcloudCallback(const PointCloudVector& pointclouds, Time t) {}
virtual void
imageCorruptedCallback(const ImagePtrVector& corrupted_images, Time t) {
}
virtual void
depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) {}
virtual void
opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) {}
virtual void eventsCallback(const EventsVector& events) {}
virtual void poseCallback(
const Transformation& T_W_B,
const TransformationVector& T_W_Cs,
Time t
) {}
virtual void twistCallback(
const AngularVelocityVector& ws,
const LinearVelocityVector& vs,
Time t
) {}
virtual void
imuCallback(const Vector3& acc, const Vector3& gyr, Time t) {}
virtual void
cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t) {}
virtual void
pointcloudCallback(const PointCloudVector& pointclouds, Time t) {}
}; };
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -2,31 +2,48 @@
#include <esim/common/types.hpp> #include <esim/common/types.hpp>
#include <esim/visualization/publisher_interface.hpp> #include <esim/visualization/publisher_interface.hpp>
#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <geometry_msgs/TransformStamped.h> #include <geometry_msgs/TransformStamped.h>
#include <image_transport/image_transport.h>
#include <ros/ros.h>
#include <tf/tf.h> #include <tf/tf.h>
#include <tf/transform_broadcaster.h> #include <tf/transform_broadcaster.h>
namespace event_camera_simulator { namespace event_camera_simulator {
class RosPublisher : public Publisher class RosPublisher : public Publisher {
{
public: public:
RosPublisher(size_t num_cameras); RosPublisher(size_t num_cameras);
~RosPublisher(); ~RosPublisher();
virtual void imageCallback(const ImagePtrVector& images, Time t) override; virtual void
virtual void imageCorruptedCallback(const ImagePtrVector& corrupted_images, Time t) override; imageCallback(const ImagePtrVector& images, Time t) override;
virtual void depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) override; virtual void imageCorruptedCallback(
virtual void opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) override; const ImagePtrVector& corrupted_images, Time t
) override;
virtual void
depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) override;
virtual void opticFlowCallback(
const OpticFlowPtrVector& optic_flows, Time t
) override;
virtual void eventsCallback(const EventsVector& events) override; virtual void eventsCallback(const EventsVector& events) override;
virtual void poseCallback(const Transformation& T_W_B, const TransformationVector& T_W_Cs, Time t) override; virtual void poseCallback(
virtual void twistCallback(const AngularVelocityVector& ws, const LinearVelocityVector& vs, Time t) override; const Transformation& T_W_B,
virtual void imuCallback(const Vector3& acc, const Vector3& gyr, Time t) override; const TransformationVector& T_W_Cs,
virtual void cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t) override; Time t
virtual void pointcloudCallback(const PointCloudVector& pointclouds, Time t) override; ) override;
virtual void twistCallback(
const AngularVelocityVector& ws,
const LinearVelocityVector& vs,
Time t
) override;
virtual void
imuCallback(const Vector3& acc, const Vector3& gyr, Time t) override;
virtual void cameraInfoCallback(
const ze::CameraRig::Ptr& camera_rig, Time t
) override;
virtual void pointcloudCallback(
const PointCloudVector& pointclouds, Time t
) override;
private: private:
size_t num_cameras_; size_t num_cameras_;
@ -41,7 +58,8 @@ private:
std::vector<std::shared_ptr<ros::Publisher>> pointcloud_pub_; std::vector<std::shared_ptr<ros::Publisher>> pointcloud_pub_;
std::vector<std::shared_ptr<ros::Publisher>> camera_info_pub_; std::vector<std::shared_ptr<ros::Publisher>> camera_info_pub_;
std::vector<std::shared_ptr<image_transport::Publisher>> image_pub_; std::vector<std::shared_ptr<image_transport::Publisher>> image_pub_;
std::vector< std::shared_ptr<image_transport::Publisher> > image_corrupted_pub_; std::vector<std::shared_ptr<image_transport::Publisher>>
image_corrupted_pub_;
std::vector<std::shared_ptr<image_transport::Publisher>> depthmap_pub_; std::vector<std::shared_ptr<image_transport::Publisher>> depthmap_pub_;
std::vector<std::shared_ptr<ros::Publisher>> optic_flow_pub_; std::vector<std::shared_ptr<ros::Publisher>> optic_flow_pub_;
std::vector<std::shared_ptr<ros::Publisher>> twist_pub_; std::vector<std::shared_ptr<ros::Publisher>> twist_pub_;
@ -53,7 +71,6 @@ private:
Time last_published_depthmap_time_; Time last_published_depthmap_time_;
Time last_published_optic_flow_time_; Time last_published_optic_flow_time_;
Time last_published_pointcloud_time_; Time last_published_pointcloud_time_;
}; };
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -1,54 +1,66 @@
#pragma once #pragma once
#include <esim/common/types.hpp>
#include <pcl_ros/point_cloud.h>
#include <pcl/point_types.h>
#include <sensor_msgs/Image.h>
#include <dvs_msgs/EventArray.h> #include <dvs_msgs/EventArray.h>
#include <esim/common/types.hpp>
#include <esim_msgs/OpticFlow.h>
#include <geometry_msgs/PoseStamped.h> #include <geometry_msgs/PoseStamped.h>
#include <geometry_msgs/TwistStamped.h> #include <geometry_msgs/TwistStamped.h>
#include <sensor_msgs/Imu.h> #include <pcl/point_types.h>
#include <pcl_ros/point_cloud.h>
#include <sensor_msgs/CameraInfo.h> #include <sensor_msgs/CameraInfo.h>
#include <esim_msgs/OpticFlow.h> #include <sensor_msgs/Image.h>
#include <sensor_msgs/Imu.h>
namespace event_camera_simulator { namespace event_camera_simulator {
inline std::string getTopicName(int i, const std::string& suffix) inline std::string getTopicName(int i, const std::string& suffix) {
{
std::stringstream ss; std::stringstream ss;
ss << "cam" << i << "/" << suffix; ss << "cam" << i << "/" << suffix;
return ss.str(); return ss.str();
} }
inline std::string getTopicName(const std::string& prefix, int i, const std::string& suffix) inline std::string
{ getTopicName(const std::string& prefix, int i, const std::string& suffix) {
std::stringstream ss; std::stringstream ss;
ss << prefix << "/" << getTopicName(i, suffix); ss << prefix << "/" << getTopicName(i, suffix);
return ss.str(); return ss.str();
} }
inline ros::Time toRosTime(Time t) inline ros::Time toRosTime(Time t) {
{
ros::Time ros_time; ros::Time ros_time;
ros_time.fromNSec(t); ros_time.fromNSec(t);
return ros_time; return ros_time;
} }
void pointCloudToMsg(const PointCloud& pointcloud, const std::string& frame_id, Time t,pcl::PointCloud<pcl::PointXYZRGB>::Ptr& msg); void pointCloudToMsg(
const PointCloud& pointcloud,
const std::string& frame_id,
Time t,
pcl::PointCloud<pcl::PointXYZRGB>::Ptr& msg
);
void imageToMsg(const Image& image, Time t, sensor_msgs::ImagePtr& msg); void imageToMsg(const Image& image, Time t, sensor_msgs::ImagePtr& msg);
void depthmapToMsg(const Depthmap& depthmap, Time t, sensor_msgs::ImagePtr& msg); void
depthmapToMsg(const Depthmap& depthmap, Time t, sensor_msgs::ImagePtr& msg);
void opticFlowToMsg(const OpticFlow& flow, Time t, esim_msgs::OpticFlowPtr& msg); void
opticFlowToMsg(const OpticFlow& flow, Time t, esim_msgs::OpticFlowPtr& msg);
void eventsToMsg(const Events& events, int width, int height, dvs_msgs::EventArrayPtr& msg); void eventsToMsg(
const Events& events,
int width,
int height,
dvs_msgs::EventArrayPtr& msg
);
sensor_msgs::Imu imuToMsg(const Vector3& acc, const Vector3& gyr, Time t); sensor_msgs::Imu imuToMsg(const Vector3& acc, const Vector3& gyr, Time t);
geometry_msgs::TwistStamped twistToMsg(const AngularVelocity& w, const LinearVelocity& v, Time t); geometry_msgs::TwistStamped
twistToMsg(const AngularVelocity& w, const LinearVelocity& v, Time t);
void cameraToMsg(const ze::Camera::Ptr& camera, Time t, sensor_msgs::CameraInfoPtr& msg);
void cameraToMsg(
const ze::Camera::Ptr& camera, Time t, sensor_msgs::CameraInfoPtr& msg
);
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -6,23 +6,40 @@
namespace event_camera_simulator { namespace event_camera_simulator {
class RosbagWriter : public Publisher class RosbagWriter : public Publisher {
{
public: public:
RosbagWriter(const std::string& path_to_output_bag, RosbagWriter(const std::string& path_to_output_bag, size_t num_cameras);
size_t num_cameras);
~RosbagWriter(); ~RosbagWriter();
virtual void imageCallback(const ImagePtrVector& images, Time t) override; virtual void
virtual void imageCorruptedCallback(const ImagePtrVector& corrupted_images, Time t) override; imageCallback(const ImagePtrVector& images, Time t) override;
virtual void depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) override; virtual void imageCorruptedCallback(
virtual void opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) override; const ImagePtrVector& corrupted_images, Time t
) override;
virtual void
depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) override;
virtual void opticFlowCallback(
const OpticFlowPtrVector& optic_flows, Time t
) override;
virtual void eventsCallback(const EventsVector& events) override; virtual void eventsCallback(const EventsVector& events) override;
virtual void poseCallback(const Transformation& T_W_B, const TransformationVector& T_W_Cs, Time t) override; virtual void poseCallback(
virtual void twistCallback(const AngularVelocityVector& ws, const LinearVelocityVector& vs, Time t) override; const Transformation& T_W_B,
virtual void imuCallback(const Vector3& acc, const Vector3& gyr, Time t) override; const TransformationVector& T_W_Cs,
virtual void cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t) override; Time t
virtual void pointcloudCallback(const PointCloudVector& pointclouds, Time t) override; ) override;
virtual void twistCallback(
const AngularVelocityVector& ws,
const LinearVelocityVector& vs,
Time t
) override;
virtual void
imuCallback(const Vector3& acc, const Vector3& gyr, Time t) override;
virtual void cameraInfoCallback(
const ze::CameraRig::Ptr& camera_rig, Time t
) override;
virtual void pointcloudCallback(
const PointCloudVector& pointclouds, Time t
) override;
static Publisher::Ptr createBagWriterFromGflags(size_t num_cameras); static Publisher::Ptr createBagWriterFromGflags(size_t num_cameras);
@ -39,7 +56,6 @@ private:
Time last_published_depthmap_time_; Time last_published_depthmap_time_;
Time last_published_optic_flow_time_; Time last_published_optic_flow_time_;
Time last_published_pointcloud_time_; Time last_published_pointcloud_time_;
}; };
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -2,36 +2,58 @@
#include <esim/common/types.hpp> #include <esim/common/types.hpp>
#include <esim/visualization/publisher_interface.hpp> #include <esim/visualization/publisher_interface.hpp>
#include <fstream> #include <fstream>
namespace event_camera_simulator { namespace event_camera_simulator {
class SyntheticOpticFlowPublisher : public Publisher class SyntheticOpticFlowPublisher : public Publisher {
{
public: public:
SyntheticOpticFlowPublisher(const std::string& output_folder); SyntheticOpticFlowPublisher(const std::string& output_folder);
~SyntheticOpticFlowPublisher(); ~SyntheticOpticFlowPublisher();
virtual void imageCallback(const ImagePtrVector& images, Time t) override { virtual void
imageCallback(const ImagePtrVector& images, Time t) override {
CHECK_EQ(images.size(), 1); CHECK_EQ(images.size(), 1);
if (sensor_size_.width == 0 || sensor_size_.height == 0) if (sensor_size_.width == 0 || sensor_size_.height == 0)
{
sensor_size_ = images[0]->size(); sensor_size_ = images[0]->size();
} }
}
virtual void eventsCallback(const EventsVector& events) override; virtual void eventsCallback(const EventsVector& events) override;
virtual void opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) override {}
virtual void imageCorruptedCallback(const ImagePtrVector& corrupted_images, Time t) override {} virtual void opticFlowCallback(
virtual void depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) override {} const OpticFlowPtrVector& optic_flows, Time t
virtual void poseCallback(const Transformation& T_W_B, const TransformationVector& T_W_Cs, Time t) override {} ) override {}
virtual void twistCallback(const AngularVelocityVector& ws, const LinearVelocityVector& vs, Time t) override {}
virtual void imuCallback(const Vector3& acc, const Vector3& gyr, Time t) override {} virtual void imageCorruptedCallback(
virtual void cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t) override {} const ImagePtrVector& corrupted_images, Time t
virtual void pointcloudCallback(const PointCloudVector& pointclouds, Time t) override {} ) override {}
virtual void
depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) override {}
virtual void poseCallback(
const Transformation& T_W_B,
const TransformationVector& T_W_Cs,
Time t
) override {}
virtual void twistCallback(
const AngularVelocityVector& ws,
const LinearVelocityVector& vs,
Time t
) override {}
virtual void
imuCallback(const Vector3& acc, const Vector3& gyr, Time t) override {}
virtual void cameraInfoCallback(
const ze::CameraRig::Ptr& camera_rig, Time t
) override {}
virtual void pointcloudCallback(
const PointCloudVector& pointclouds, Time t
) override {}
static Publisher::Ptr createFromGflags(); static Publisher::Ptr createFromGflags();

View File

@ -1,69 +1,83 @@
#include <esim/visualization/adaptive_sampling_benchmark_publisher.hpp>
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <ze/common/path_utils.hpp> #include <esim/visualization/adaptive_sampling_benchmark_publisher.hpp>
#include <ze/common/file_utils.hpp>
#include <ze/common/time_conversions.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <gflags/gflags.h> #include <gflags/gflags.h>
#include <glog/logging.h> #include <glog/logging.h>
#include <opencv2/highgui/highgui.hpp>
#include <ze/common/file_utils.hpp>
#include <ze/common/path_utils.hpp>
#include <ze/common/time_conversions.hpp>
DEFINE_string(adaptive_sampling_benchmark_folder, "", DEFINE_string(
"Folder in which to output the results."); adaptive_sampling_benchmark_folder,
"",
"Folder in which to output the results."
);
DEFINE_string(adaptive_sampling_benchmark_pixels_to_record_file, "", DEFINE_string(
"File containing the pixel locations to record."); adaptive_sampling_benchmark_pixels_to_record_file,
"",
"File containing the pixel locations to record."
);
namespace event_camera_simulator { namespace event_camera_simulator {
AdaptiveSamplingBenchmarkPublisher::AdaptiveSamplingBenchmarkPublisher(const std::string& benchmark_folder, AdaptiveSamplingBenchmarkPublisher::AdaptiveSamplingBenchmarkPublisher(
const std::string& pixels_to_record_filename) const std::string& benchmark_folder,
: image_index_(0) const std::string& pixels_to_record_filename
{ )
ze::openOutputFileStream(ze::joinPath(benchmark_folder, "events.txt"), : image_index_(0) {
&events_file_); ze::openOutputFileStream(
ze::joinPath(benchmark_folder, "events.txt"),
&events_file_
);
ze::openOutputFileStream(ze::joinPath(benchmark_folder, "images.txt"), ze::openOutputFileStream(
&images_file_); ze::joinPath(benchmark_folder, "images.txt"),
&images_file_
);
ze::openOutputFileStream(ze::joinPath(benchmark_folder, "pixel_intensities.txt"), ze::openOutputFileStream(
&pixel_intensities_file_); ze::joinPath(benchmark_folder, "pixel_intensities.txt"),
&pixel_intensities_file_
);
ze::openOutputFileStream(ze::joinPath(benchmark_folder, "optic_flows.txt"), ze::openOutputFileStream(
&optic_flows_file_); ze::joinPath(benchmark_folder, "optic_flows.txt"),
&optic_flows_file_
);
// Load and parse the file containing the list of pixel locations // Load and parse the file containing the list of pixel locations
// whose intensity values to record // whose intensity values to record
std::ifstream pixels_to_record_file; std::ifstream pixels_to_record_file;
if(pixels_to_record_filename != "") if (pixels_to_record_filename != "") {
{ ze::openFileStream(
ze::openFileStream(pixels_to_record_filename, &pixels_to_record_file); pixels_to_record_filename,
&pixels_to_record_file
);
int x, y; int x, y;
LOG(INFO) << "Pixels that will be recorded: "; LOG(INFO) << "Pixels that will be recorded: ";
while(pixels_to_record_file >> x >> y) while (pixels_to_record_file >> x >> y) {
{
LOG(INFO) << x << " , " << y; LOG(INFO) << x << " , " << y;
pixels_to_record_.push_back(PixelLocation(x, y)); pixels_to_record_.push_back(PixelLocation(x, y));
} }
} }
} }
Publisher::Ptr AdaptiveSamplingBenchmarkPublisher::createFromGflags() Publisher::Ptr AdaptiveSamplingBenchmarkPublisher::createFromGflags() {
{ if (FLAGS_adaptive_sampling_benchmark_folder == "") {
if(FLAGS_adaptive_sampling_benchmark_folder == "") LOG(WARNING) << "Empty benchmark folder string: will not write "
{ "benchmark files";
LOG(WARNING) << "Empty benchmark folder string: will not write benchmark files";
return nullptr; return nullptr;
} }
return std::make_shared<AdaptiveSamplingBenchmarkPublisher>(FLAGS_adaptive_sampling_benchmark_folder, return std::make_shared<AdaptiveSamplingBenchmarkPublisher>(
FLAGS_adaptive_sampling_benchmark_pixels_to_record_file); FLAGS_adaptive_sampling_benchmark_folder,
FLAGS_adaptive_sampling_benchmark_pixels_to_record_file
);
} }
AdaptiveSamplingBenchmarkPublisher::~AdaptiveSamplingBenchmarkPublisher() AdaptiveSamplingBenchmarkPublisher::~AdaptiveSamplingBenchmarkPublisher() {
{
// finish writing files // finish writing files
events_file_.close(); events_file_.close();
images_file_.close(); images_file_.close();
@ -71,8 +85,9 @@ AdaptiveSamplingBenchmarkPublisher::~AdaptiveSamplingBenchmarkPublisher()
optic_flows_file_.close(); optic_flows_file_.close();
} }
void AdaptiveSamplingBenchmarkPublisher::imageCallback(const ImagePtrVector& images, Time t) void AdaptiveSamplingBenchmarkPublisher::imageCallback(
{ const ImagePtrVector& images, Time t
) {
CHECK_EQ(images.size(), 1); CHECK_EQ(images.size(), 1);
images_file_ << t << std::endl; images_file_ << t << std::endl;
@ -80,55 +95,56 @@ void AdaptiveSamplingBenchmarkPublisher::imageCallback(const ImagePtrVector& ima
cv::Mat img_8bit; cv::Mat img_8bit;
img->convertTo(img_8bit, CV_8U, 255); img->convertTo(img_8bit, CV_8U, 255);
if(image_index_ == 0) if (image_index_ == 0) {
{ static const std::vector<int> compression_params = {
static const std::vector<int> compression_params = {cv::IMWRITE_PNG_COMPRESSION, 0}; cv::IMWRITE_PNG_COMPRESSION,
0};
std::stringstream ss; std::stringstream ss;
ss << ze::joinPath(FLAGS_adaptive_sampling_benchmark_folder, "image_"); ss << ze::joinPath(
FLAGS_adaptive_sampling_benchmark_folder,
"image_"
);
ss << image_index_ << ".png"; ss << image_index_ << ".png";
LOG(INFO) << ss.str(); LOG(INFO) << ss.str();
cv::imwrite(ss.str(), img_8bit, compression_params); cv::imwrite(ss.str(), img_8bit, compression_params);
} }
for(const PixelLocation& pixel_loc : pixels_to_record_) for (const PixelLocation& pixel_loc : pixels_to_record_) {
{
// write line in the form "x y I(x,y)" // write line in the form "x y I(x,y)"
const int x = pixel_loc.first; const int x = pixel_loc.first;
const int y = pixel_loc.second; const int y = pixel_loc.second;
pixel_intensities_file_ << x << " " pixel_intensities_file_ << x << " " << y << " "
<< y << " "
<< (*images[0])(y, x) << std::endl; << (*images[0])(y, x) << std::endl;
} }
image_index_++; image_index_++;
} }
void AdaptiveSamplingBenchmarkPublisher::opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) void AdaptiveSamplingBenchmarkPublisher::opticFlowCallback(
{ const OpticFlowPtrVector& optic_flows, Time t
) {
CHECK_EQ(optic_flows.size(), 1); CHECK_EQ(optic_flows.size(), 1);
for(const PixelLocation& pixel_loc : pixels_to_record_) for (const PixelLocation& pixel_loc : pixels_to_record_) {
{ // write line in the form "x y optic_flow(x,y)[0]
// write line in the form "x y optic_flow(x,y)[0] optic_flow(x,y)[1]" // optic_flow(x,y)[1]"
const int x = pixel_loc.first; const int x = pixel_loc.first;
const int y = pixel_loc.second; const int y = pixel_loc.second;
optic_flows_file_ << x << " " optic_flows_file_ << x << " " << y << " "
<< y << " "
<< (*optic_flows[0])(y, x)[0] << " " << (*optic_flows[0])(y, x)[0] << " "
<< (*optic_flows[0])(y,x)[1] << (*optic_flows[0])(y, x)[1] << std::endl;
<< std::endl;
} }
} }
void AdaptiveSamplingBenchmarkPublisher::eventsCallback(
void AdaptiveSamplingBenchmarkPublisher::eventsCallback(const EventsVector& events) const EventsVector& events
{ ) {
CHECK_EQ(events.size(), 1); CHECK_EQ(events.size(), 1);
for(const Event& e : events[0]) for (const Event& e : events[0]) {
{ events_file_ << e.t << " " << e.x << " " << e.y << " "
events_file_ << e.t << " " << e.x << " " << e.y << " " << (e.pol? 1 : 0) << std::endl; << (e.pol ? 1 : 0) << std::endl;
} }
} }

View File

@ -1,86 +1,116 @@
#include <esim/visualization/ros_publisher.hpp>
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <ze/common/time_conversions.hpp> #include <esim/visualization/ros_publisher.hpp>
#include <esim/visualization/ros_utils.hpp> #include <esim/visualization/ros_utils.hpp>
#include <minkindr_conversions/kindr_msg.h>
#include <minkindr_conversions/kindr_tf.h>
#include <gflags/gflags.h> #include <gflags/gflags.h>
#include <glog/logging.h> #include <glog/logging.h>
#include <minkindr_conversions/kindr_msg.h>
#include <minkindr_conversions/kindr_tf.h>
#include <ze/common/time_conversions.hpp>
DEFINE_double(ros_publisher_camera_info_rate, 0, DEFINE_double(
"Camera info (maximum) publish rate, in Hz"); ros_publisher_camera_info_rate,
0,
"Camera info (maximum) publish rate, in Hz"
);
DEFINE_double(ros_publisher_frame_rate, 30, DEFINE_double(ros_publisher_frame_rate, 30, "(Maximum) frame rate, in Hz");
"(Maximum) frame rate, in Hz");
DEFINE_double(ros_publisher_depth_rate, 0, DEFINE_double(
"(Maximum) depthmap publish rate, in Hz"); ros_publisher_depth_rate, 0, "(Maximum) depthmap publish rate, in Hz"
);
DEFINE_double(ros_publisher_pointcloud_rate, 0, DEFINE_double(
"(Maximum) point cloud publish rate, in Hz"); ros_publisher_pointcloud_rate,
0,
"(Maximum) point cloud publish rate, in Hz"
);
DEFINE_double(ros_publisher_optic_flow_rate, 0, DEFINE_double(
"(Maximum) optic flow map publish rate, in Hz"); ros_publisher_optic_flow_rate,
0,
"(Maximum) optic flow map publish rate, in Hz"
);
namespace event_camera_simulator { namespace event_camera_simulator {
RosPublisher::RosPublisher(size_t num_cameras) RosPublisher::RosPublisher(size_t num_cameras) {
{
CHECK_GE(num_cameras, 1); CHECK_GE(num_cameras, 1);
num_cameras_ = num_cameras; num_cameras_ = num_cameras;
sensor_sizes_ = std::vector<cv::Size>(num_cameras_); sensor_sizes_ = std::vector<cv::Size>(num_cameras_);
// Initialize ROS if it was not initialized before. // Initialize ROS if it was not initialized before.
if(!ros::isInitialized()) if (!ros::isInitialized()) {
{
VLOG(1) << "Initializing ROS"; VLOG(1) << "Initializing ROS";
int argc = 0; int argc = 0;
ros::init(argc, nullptr, std::string("ros_publisher"), ros::init_options::NoSigintHandler); ros::init(
argc,
nullptr,
std::string("ros_publisher"),
ros::init_options::NoSigintHandler
);
} }
// Create node and subscribe. // Create node and subscribe.
nh_.reset(new ros::NodeHandle("")); nh_.reset(new ros::NodeHandle(""));
it_.reset(new image_transport::ImageTransport(*nh_)); it_.reset(new image_transport::ImageTransport(*nh_));
// Setup ROS publishers for images, events, poses, depth maps, camera info, etc. // Setup ROS publishers for images, events, poses, depth maps, camera
for(size_t i=0; i<num_cameras_; ++i) // info, etc.
{ for (size_t i = 0; i < num_cameras_; ++i) {
event_pub_.emplace_back( event_pub_.emplace_back(
new ros::Publisher( new ros::Publisher(nh_->advertise<dvs_msgs::EventArray>(
nh_->advertise<dvs_msgs::EventArray> (getTopicName(i, "events"), 0))); getTopicName(i, "events"),
0
))
);
image_pub_.emplace_back( image_pub_.emplace_back(new image_transport::Publisher(
new image_transport::Publisher( it_->advertise(getTopicName(i, "image_raw"), 0)
it_->advertise(getTopicName(i, "image_raw"), 0))); ));
image_corrupted_pub_.emplace_back( image_corrupted_pub_.emplace_back(new image_transport::Publisher(
new image_transport::Publisher( it_->advertise(getTopicName(i, "image_corrupted"), 0)
it_->advertise(getTopicName(i, "image_corrupted"), 0))); ));
depthmap_pub_.emplace_back( depthmap_pub_.emplace_back(new image_transport::Publisher(
new image_transport::Publisher( it_->advertise(getTopicName(i, "depthmap"), 0)
it_->advertise(getTopicName(i, "depthmap"), 0))); ));
optic_flow_pub_.emplace_back( optic_flow_pub_.emplace_back(
new ros::Publisher( new ros::Publisher(nh_->advertise<esim_msgs::OpticFlow>(
nh_->advertise<esim_msgs::OpticFlow> (getTopicName(i, "optic_flow"), 0))); getTopicName(i, "optic_flow"),
0
))
);
camera_info_pub_.emplace_back( camera_info_pub_.emplace_back(
new ros::Publisher( new ros::Publisher(nh_->advertise<sensor_msgs::CameraInfo>(
nh_->advertise<sensor_msgs::CameraInfo> (getTopicName(i, "camera_info"), 0))); getTopicName(i, "camera_info"),
0
))
);
twist_pub_.emplace_back( twist_pub_.emplace_back(
new ros::Publisher( new ros::Publisher(nh_->advertise<geometry_msgs::TwistStamped>(
nh_->advertise<geometry_msgs::TwistStamped> (getTopicName(i, "twist"), 0))); getTopicName(i, "twist"),
0
))
);
pointcloud_pub_.emplace_back( pointcloud_pub_.emplace_back(new ros::Publisher(
new ros::Publisher( nh_->advertise<pcl::PointCloud<pcl::PointXYZ>>(
nh_->advertise<pcl::PointCloud<pcl::PointXYZ>> (getTopicName(i, "pointcloud"), 0))); getTopicName(i, "pointcloud"),
0
)
));
} }
pose_pub_.reset(new ros::Publisher(nh_->advertise<geometry_msgs::PoseStamped> ("pose", 0))); pose_pub_.reset(new ros::Publisher(
imu_pub_.reset(new ros::Publisher(nh_->advertise<sensor_msgs::Imu> ("imu", 0))); nh_->advertise<geometry_msgs::PoseStamped>("pose", 0)
));
imu_pub_.reset(
new ros::Publisher(nh_->advertise<sensor_msgs::Imu>("imu", 0))
);
tf_broadcaster_.reset(new tf::TransformBroadcaster()); tf_broadcaster_.reset(new tf::TransformBroadcaster());
last_published_camera_info_time_ = 0; last_published_camera_info_time_ = 0;
@ -91,10 +121,8 @@ RosPublisher::RosPublisher(size_t num_cameras)
last_published_pointcloud_time_ = 0; last_published_pointcloud_time_ = 0;
} }
RosPublisher::~RosPublisher() RosPublisher::~RosPublisher() {
{ for (size_t i = 0; i < num_cameras_; ++i) {
for(size_t i=0; i<num_cameras_; ++i)
{
event_pub_[i]->shutdown(); event_pub_[i]->shutdown();
image_pub_[i]->shutdown(); image_pub_[i]->shutdown();
image_corrupted_pub_[i]->shutdown(); image_corrupted_pub_[i]->shutdown();
@ -109,30 +137,31 @@ RosPublisher::~RosPublisher()
ros::shutdown(); ros::shutdown();
} }
void RosPublisher::pointcloudCallback(const PointCloudVector& pointclouds, Time t) void RosPublisher::pointcloudCallback(
{ const PointCloudVector& pointclouds, Time t
) {
CHECK_EQ(pointcloud_pub_.size(), num_cameras_); CHECK_EQ(pointcloud_pub_.size(), num_cameras_);
CHECK_EQ(pointclouds.size(), num_cameras_); CHECK_EQ(pointclouds.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{
const PointCloud& pcl_camera = pointclouds[i]; const PointCloud& pcl_camera = pointclouds[i];
CHECK(pointcloud_pub_[i]); CHECK(pointcloud_pub_[i]);
if (pointcloud_pub_[i]->getNumSubscribers() == 0) if (pointcloud_pub_[i]->getNumSubscribers() == 0)
{
continue; continue;
}
Duration min_time_interval_between_published_pointclouds_ Duration min_time_interval_between_published_pointclouds_ =
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_pointcloud_rate); ze::secToNanosec(1.0 / FLAGS_ros_publisher_pointcloud_rate);
if(last_published_pointcloud_time_ > 0 && t - last_published_pointcloud_time_ < min_time_interval_between_published_pointclouds_) if (last_published_pointcloud_time_ > 0
{ && t - last_published_pointcloud_time_
< min_time_interval_between_published_pointclouds_) {
return; return;
} }
pcl::PointCloud<pcl::PointXYZRGB>::Ptr msg (new pcl::PointCloud<pcl::PointXYZRGB>); pcl::PointCloud<pcl::PointXYZRGB>::Ptr
std::stringstream ss; ss << "cam" << i; msg(new pcl::PointCloud<pcl::PointXYZRGB>);
std::stringstream ss;
ss << "cam" << i;
pointCloudToMsg(pointclouds[i], ss.str(), t, msg); pointCloudToMsg(pointclouds[i], ss.str(), t, msg);
pointcloud_pub_[i]->publish(msg); pointcloud_pub_[i]->publish(msg);
} }
@ -140,29 +169,25 @@ void RosPublisher::pointcloudCallback(const PointCloudVector& pointclouds, Time
last_published_pointcloud_time_ = t; last_published_pointcloud_time_ = t;
} }
void RosPublisher::imageCallback(const ImagePtrVector& images, Time t) void RosPublisher::imageCallback(const ImagePtrVector& images, Time t) {
{
CHECK_EQ(image_pub_.size(), num_cameras_); CHECK_EQ(image_pub_.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{
sensor_sizes_[i] = images[i]->size(); sensor_sizes_[i] = images[i]->size();
CHECK(image_pub_[i]); CHECK(image_pub_[i]);
if (image_pub_[i]->getNumSubscribers() == 0) if (image_pub_[i]->getNumSubscribers() == 0)
{
continue; continue;
}
static const Duration min_time_interval_between_published_images_ static const Duration min_time_interval_between_published_images_ =
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_frame_rate); ze::secToNanosec(1.0 / FLAGS_ros_publisher_frame_rate);
if(last_published_image_time_ > 0 && t - last_published_image_time_ < min_time_interval_between_published_images_) if (last_published_image_time_ > 0
{ && t - last_published_image_time_
< min_time_interval_between_published_images_) {
return; return;
} }
if(images[i]) if (images[i]) {
{
sensor_msgs::ImagePtr msg; sensor_msgs::ImagePtr msg;
imageToMsg(*images[i], t, msg); imageToMsg(*images[i], t, msg);
image_pub_[i]->publish(msg); image_pub_[i]->publish(msg);
@ -172,27 +197,25 @@ void RosPublisher::imageCallback(const ImagePtrVector& images, Time t)
last_published_image_time_ = t; last_published_image_time_ = t;
} }
void RosPublisher::imageCorruptedCallback(const ImagePtrVector& corrupted_images, Time t) void RosPublisher::imageCorruptedCallback(
{ const ImagePtrVector& corrupted_images, Time t
) {
CHECK_EQ(image_corrupted_pub_.size(), num_cameras_); CHECK_EQ(image_corrupted_pub_.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{
CHECK(image_corrupted_pub_[i]); CHECK(image_corrupted_pub_[i]);
if (image_corrupted_pub_[i]->getNumSubscribers() == 0) if (image_corrupted_pub_[i]->getNumSubscribers() == 0)
{
continue; continue;
}
static const Duration min_time_interval_between_published_images_ static const Duration min_time_interval_between_published_images_ =
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_frame_rate); ze::secToNanosec(1.0 / FLAGS_ros_publisher_frame_rate);
if(last_published_corrupted_image_time_ > 0 && t - last_published_corrupted_image_time_ < min_time_interval_between_published_images_) if (last_published_corrupted_image_time_ > 0
{ && t - last_published_corrupted_image_time_
< min_time_interval_between_published_images_) {
return; return;
} }
if(corrupted_images[i]) if (corrupted_images[i]) {
{
sensor_msgs::ImagePtr msg; sensor_msgs::ImagePtr msg;
imageToMsg(*corrupted_images[i], t, msg); imageToMsg(*corrupted_images[i], t, msg);
image_corrupted_pub_[i]->publish(msg); image_corrupted_pub_[i]->publish(msg);
@ -202,27 +225,25 @@ void RosPublisher::imageCorruptedCallback(const ImagePtrVector& corrupted_images
last_published_corrupted_image_time_ = t; last_published_corrupted_image_time_ = t;
} }
void RosPublisher::depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) void
{ RosPublisher::depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) {
CHECK_EQ(depthmap_pub_.size(), num_cameras_); CHECK_EQ(depthmap_pub_.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{
CHECK(depthmap_pub_[i]); CHECK(depthmap_pub_[i]);
if (depthmap_pub_[i]->getNumSubscribers() == 0) if (depthmap_pub_[i]->getNumSubscribers() == 0)
{
continue; continue;
}
static const Duration min_time_interval_between_published_depthmaps_ static const Duration
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_depth_rate); min_time_interval_between_published_depthmaps_ =
if(last_published_depthmap_time_ > 0 && t - last_published_depthmap_time_ < min_time_interval_between_published_depthmaps_) ze::secToNanosec(1.0 / FLAGS_ros_publisher_depth_rate);
{ if (last_published_depthmap_time_ > 0
&& t - last_published_depthmap_time_
< min_time_interval_between_published_depthmaps_) {
return; return;
} }
if(depthmaps[i]) if (depthmaps[i]) {
{
sensor_msgs::ImagePtr msg; sensor_msgs::ImagePtr msg;
depthmapToMsg(*depthmaps[i], t, msg); depthmapToMsg(*depthmaps[i], t, msg);
depthmap_pub_[i]->publish(msg); depthmap_pub_[i]->publish(msg);
@ -232,28 +253,31 @@ void RosPublisher::depthmapCallback(const DepthmapPtrVector& depthmaps, Time t)
last_published_depthmap_time_ = t; last_published_depthmap_time_ = t;
} }
void RosPublisher::opticFlowCallback(
void RosPublisher::opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) const OpticFlowPtrVector& optic_flows, Time t
{ ) {
CHECK_EQ(optic_flow_pub_.size(), num_cameras_); CHECK_EQ(optic_flow_pub_.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{
CHECK(optic_flow_pub_[i]); CHECK(optic_flow_pub_[i]);
if (optic_flow_pub_[i]->getNumSubscribers() == 0) if (optic_flow_pub_[i]->getNumSubscribers() == 0)
{
continue; continue;
}
static const Duration min_time_interval_between_published_optic_flows_ static const Duration
= (min_time_interval_between_published_optic_flows_ > 0) ? ze::secToNanosec(1.0 / FLAGS_ros_publisher_optic_flow_rate) : 0; min_time_interval_between_published_optic_flows_ =
if(min_time_interval_between_published_optic_flows_ > 0 && last_published_optic_flow_time_ > 0 && t - last_published_optic_flow_time_ < min_time_interval_between_published_optic_flows_) (min_time_interval_between_published_optic_flows_ > 0)
{ ? ze::secToNanosec(
1.0 / FLAGS_ros_publisher_optic_flow_rate
)
: 0;
if (min_time_interval_between_published_optic_flows_ > 0
&& last_published_optic_flow_time_ > 0
&& t - last_published_optic_flow_time_
< min_time_interval_between_published_optic_flows_) {
return; return;
} }
if(optic_flows[i]) if (optic_flows[i]) {
{
esim_msgs::OpticFlow::Ptr msg; esim_msgs::OpticFlow::Ptr msg;
msg.reset(new esim_msgs::OpticFlow); msg.reset(new esim_msgs::OpticFlow);
opticFlowToMsg(*optic_flows[i], t, msg); opticFlowToMsg(*optic_flows[i], t, msg);
@ -264,42 +288,38 @@ void RosPublisher::opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time
last_published_optic_flow_time_ = t; last_published_optic_flow_time_ = t;
} }
void RosPublisher::eventsCallback(const EventsVector& events) void RosPublisher::eventsCallback(const EventsVector& events) {
{
CHECK_EQ(event_pub_.size(), num_cameras_); CHECK_EQ(event_pub_.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{
if (sensor_sizes_[i].width == 0 || sensor_sizes_[i].height == 0) if (sensor_sizes_[i].width == 0 || sensor_sizes_[i].height == 0)
{
continue; continue;
}
if (events[i].empty()) if (events[i].empty())
{
continue; continue;
}
CHECK(event_pub_[i]); CHECK(event_pub_[i]);
if (event_pub_[i]->getNumSubscribers() == 0) if (event_pub_[i]->getNumSubscribers() == 0)
{
continue; continue;
}
dvs_msgs::EventArrayPtr msg; dvs_msgs::EventArrayPtr msg;
msg.reset(new dvs_msgs::EventArray); msg.reset(new dvs_msgs::EventArray);
eventsToMsg(events[i], sensor_sizes_[i].width, sensor_sizes_[i].height, msg); eventsToMsg(
events[i],
sensor_sizes_[i].width,
sensor_sizes_[i].height,
msg
);
event_pub_[i]->publish(msg); event_pub_[i]->publish(msg);
} }
} }
void RosPublisher::poseCallback(const Transformation& T_W_B, void RosPublisher::poseCallback(
const TransformationVector& T_W_Cs, const Transformation& T_W_B, const TransformationVector& T_W_Cs, Time t
Time t) ) {
{ if (T_W_Cs.size() != num_cameras_) {
if(T_W_Cs.size() != num_cameras_) LOG(WARNING
{ ) << "Number of poses is different than number of cameras."
LOG(WARNING) << "Number of poses is different than number of cameras."
<< "Will not output poses."; << "Will not output poses.";
return; return;
} }
@ -312,8 +332,7 @@ void RosPublisher::poseCallback(const Transformation& T_W_B,
tf::transformKindrToTF(T_W_B, &bt); tf::transformKindrToTF(T_W_B, &bt);
tf_broadcaster_->sendTransform(bt); tf_broadcaster_->sendTransform(bt);
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{
std::stringstream ss; std::stringstream ss;
ss << "cam" << i; ss << "cam" << i;
tf::StampedTransform bt; tf::StampedTransform bt;
@ -326,16 +345,21 @@ void RosPublisher::poseCallback(const Transformation& T_W_B,
// Publish pose message // Publish pose message
geometry_msgs::PoseStamped pose_stamped_msg; geometry_msgs::PoseStamped pose_stamped_msg;
tf::poseStampedKindrToMsg(T_W_B, toRosTime(t), "map", &pose_stamped_msg); tf::poseStampedKindrToMsg(
T_W_B,
toRosTime(t),
"map",
&pose_stamped_msg
);
pose_pub_->publish(pose_stamped_msg); pose_pub_->publish(pose_stamped_msg);
} }
void RosPublisher::twistCallback(const AngularVelocityVector &ws, const LinearVelocityVector &vs, Time t) void RosPublisher::twistCallback(
{ const AngularVelocityVector& ws, const LinearVelocityVector& vs, Time t
if(ws.size() != num_cameras_ ) {
|| vs.size() != num_cameras_) if (ws.size() != num_cameras_ || vs.size() != num_cameras_) {
{ LOG(WARNING
LOG(WARNING) << "Number of twists is different than number of cameras." ) << "Number of twists is different than number of cameras."
<< "Will not output twists."; << "Will not output twists.";
return; return;
} }
@ -343,49 +367,43 @@ void RosPublisher::twistCallback(const AngularVelocityVector &ws, const LinearVe
CHECK_EQ(vs.size(), num_cameras_); CHECK_EQ(vs.size(), num_cameras_);
CHECK_EQ(twist_pub_.size(), num_cameras_); CHECK_EQ(twist_pub_.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{
CHECK(twist_pub_[i]); CHECK(twist_pub_[i]);
if (twist_pub_[i]->getNumSubscribers() == 0) if (twist_pub_[i]->getNumSubscribers() == 0)
{
continue; continue;
}
const geometry_msgs::TwistStamped msg = twistToMsg(ws[i], vs[i], t); const geometry_msgs::TwistStamped msg = twistToMsg(ws[i], vs[i], t);
twist_pub_[i]->publish(msg); twist_pub_[i]->publish(msg);
} }
} }
void RosPublisher::imuCallback(const Vector3& acc, const Vector3& gyr, Time t) void
{ RosPublisher::imuCallback(const Vector3& acc, const Vector3& gyr, Time t) {
if (imu_pub_->getNumSubscribers() == 0) if (imu_pub_->getNumSubscribers() == 0)
{
return; return;
}
const sensor_msgs::Imu msg = imuToMsg(acc, gyr, t); const sensor_msgs::Imu msg = imuToMsg(acc, gyr, t);
imu_pub_->publish(msg); imu_pub_->publish(msg);
} }
void RosPublisher::cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t) void RosPublisher::cameraInfoCallback(
{ const ze::CameraRig::Ptr& camera_rig, Time t
) {
CHECK(camera_rig); CHECK(camera_rig);
CHECK_EQ(camera_rig->size(), num_cameras_); CHECK_EQ(camera_rig->size(), num_cameras_);
static const Duration min_time_interval_between_published_camera_info_ static const Duration min_time_interval_between_published_camera_info_ =
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_camera_info_rate); ze::secToNanosec(1.0 / FLAGS_ros_publisher_camera_info_rate);
if(last_published_camera_info_time_ > 0 && t - last_published_camera_info_time_ < min_time_interval_between_published_camera_info_) if (last_published_camera_info_time_ > 0
{ && t - last_published_camera_info_time_
< min_time_interval_between_published_camera_info_) {
return; return;
} }
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{
CHECK(camera_info_pub_[i]); CHECK(camera_info_pub_[i]);
if (camera_info_pub_[i]->getNumSubscribers() == 0) if (camera_info_pub_[i]->getNumSubscribers() == 0)
{
continue; continue;
}
sensor_msgs::CameraInfoPtr msg; sensor_msgs::CameraInfoPtr msg;
msg.reset(new sensor_msgs::CameraInfo); msg.reset(new sensor_msgs::CameraInfo);
@ -394,7 +412,6 @@ void RosPublisher::cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time
} }
last_published_camera_info_time_ = t; last_published_camera_info_time_ = t;
} }
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -1,18 +1,21 @@
#include <esim/visualization/ros_utils.hpp>
#include <esim/common/utils.hpp>
#include <pcl_conversions/pcl_conversions.h>
#include <cv_bridge/cv_bridge.h> #include <cv_bridge/cv_bridge.h>
#include <esim/common/utils.hpp>
#include <esim/visualization/ros_utils.hpp>
#include <pcl_conversions/pcl_conversions.h>
namespace event_camera_simulator { namespace event_camera_simulator {
void pointCloudToMsg(const PointCloud& pointcloud, const std::string& frame_id, Time t, pcl::PointCloud<pcl::PointXYZRGB>::Ptr& msg) void pointCloudToMsg(
{ const PointCloud& pointcloud,
const std::string& frame_id,
Time t,
pcl::PointCloud<pcl::PointXYZRGB>::Ptr& msg
) {
CHECK(msg); CHECK(msg);
msg->header.frame_id = frame_id; msg->header.frame_id = frame_id;
msg->height = pointcloud.size(); msg->height = pointcloud.size();
msg->width = 1; msg->width = 1;
for(auto& p_c : pointcloud) for (auto& p_c : pointcloud) {
{
pcl::PointXYZRGB p; pcl::PointXYZRGB p;
p.x = p_c.xyz(0); p.x = p_c.xyz(0);
p.y = p_c.xyz(1); p.y = p_c.xyz(1);
@ -26,8 +29,7 @@ void pointCloudToMsg(const PointCloud& pointcloud, const std::string& frame_id,
pcl_conversions::toPCL(toRosTime(t), msg->header.stamp); pcl_conversions::toPCL(toRosTime(t), msg->header.stamp);
} }
void imageToMsg(const Image& image, Time t, sensor_msgs::ImagePtr& msg) void imageToMsg(const Image& image, Time t, sensor_msgs::ImagePtr& msg) {
{
cv_bridge::CvImage cv_image; cv_bridge::CvImage cv_image;
image.convertTo(cv_image.image, CV_8U, 255.0); image.convertTo(cv_image.image, CV_8U, 255.0);
cv_image.encoding = "mono8"; cv_image.encoding = "mono8";
@ -35,8 +37,9 @@ void imageToMsg(const Image& image, Time t, sensor_msgs::ImagePtr& msg)
msg = cv_image.toImageMsg(); msg = cv_image.toImageMsg();
} }
void depthmapToMsg(const Depthmap& depthmap, Time t, sensor_msgs::ImagePtr& msg) void depthmapToMsg(
{ const Depthmap& depthmap, Time t, sensor_msgs::ImagePtr& msg
) {
cv_bridge::CvImage cv_depthmap; cv_bridge::CvImage cv_depthmap;
depthmap.convertTo(cv_depthmap.image, CV_32FC1); depthmap.convertTo(cv_depthmap.image, CV_32FC1);
cv_depthmap.encoding = "32FC1"; cv_depthmap.encoding = "32FC1";
@ -44,8 +47,9 @@ void depthmapToMsg(const Depthmap& depthmap, Time t, sensor_msgs::ImagePtr& msg)
msg = cv_depthmap.toImageMsg(); msg = cv_depthmap.toImageMsg();
} }
void opticFlowToMsg(const OpticFlow& flow, Time t, esim_msgs::OpticFlowPtr& msg) void opticFlowToMsg(
{ const OpticFlow& flow, Time t, esim_msgs::OpticFlowPtr& msg
) {
CHECK(msg); CHECK(msg);
msg->header.stamp = toRosTime(t); msg->header.stamp = toRosTime(t);
@ -56,22 +60,23 @@ void opticFlowToMsg(const OpticFlow& flow, Time t, esim_msgs::OpticFlowPtr& msg)
msg->flow_x.resize(height * width); msg->flow_x.resize(height * width);
msg->flow_y.resize(height * width); msg->flow_y.resize(height * width);
for(int y=0; y<height; ++y) for (int y = 0; y < height; ++y) {
{ for (int x = 0; x < width; ++x) {
for(int x=0; x<width; ++x)
{
msg->flow_x[x + y * width] = static_cast<float>(flow(y, x)[0]); msg->flow_x[x + y * width] = static_cast<float>(flow(y, x)[0]);
msg->flow_y[x + y * width] = static_cast<float>(flow(y, x)[1]); msg->flow_y[x + y * width] = static_cast<float>(flow(y, x)[1]);
} }
} }
} }
void eventsToMsg(const Events& events, int width, int height, dvs_msgs::EventArrayPtr& msg) void eventsToMsg(
{ const Events& events,
int width,
int height,
dvs_msgs::EventArrayPtr& msg
) {
CHECK(msg); CHECK(msg);
std::vector<dvs_msgs::Event> events_list; std::vector<dvs_msgs::Event> events_list;
for(const Event& e : events) for (const Event& e : events) {
{
dvs_msgs::Event ev; dvs_msgs::Event ev;
ev.x = e.x; ev.x = e.x;
ev.y = e.y; ev.y = e.y;
@ -87,8 +92,7 @@ void eventsToMsg(const Events& events, int width, int height, dvs_msgs::EventArr
msg->header.stamp = events_list.back().ts; msg->header.stamp = events_list.back().ts;
} }
sensor_msgs::Imu imuToMsg(const Vector3& acc, const Vector3& gyr, Time t) sensor_msgs::Imu imuToMsg(const Vector3& acc, const Vector3& gyr, Time t) {
{
sensor_msgs::Imu imu; sensor_msgs::Imu imu;
imu.header.stamp = toRosTime(t); imu.header.stamp = toRosTime(t);
@ -103,8 +107,8 @@ sensor_msgs::Imu imuToMsg(const Vector3& acc, const Vector3& gyr, Time t)
return imu; return imu;
} }
geometry_msgs::TwistStamped twistToMsg(const AngularVelocity& w, const LinearVelocity& v, Time t) geometry_msgs::TwistStamped
{ twistToMsg(const AngularVelocity& w, const LinearVelocity& v, Time t) {
geometry_msgs::TwistStamped twist; geometry_msgs::TwistStamped twist;
twist.header.stamp = toRosTime(t); twist.header.stamp = toRosTime(t);
@ -119,8 +123,9 @@ geometry_msgs::TwistStamped twistToMsg(const AngularVelocity& w, const LinearVel
return twist; return twist;
} }
void cameraToMsg(const ze::Camera::Ptr& camera, Time t, sensor_msgs::CameraInfoPtr& msg) void cameraToMsg(
{ const ze::Camera::Ptr& camera, Time t, sensor_msgs::CameraInfoPtr& msg
) {
CHECK(msg); CHECK(msg);
msg->width = camera->width(); msg->width = camera->width();
msg->height = camera->height(); msg->height = camera->height();
@ -130,15 +135,10 @@ void cameraToMsg(const ze::Camera::Ptr& camera, Time t, sensor_msgs::CameraInfoP
boost::array<double, 9> K_vec; boost::array<double, 9> K_vec;
std::vector<double> D_vec; std::vector<double> D_vec;
for (int i = 0; i < 3; ++i) for (int i = 0; i < 3; ++i)
{
for (int j = 0; j < 3; ++j) for (int j = 0; j < 3; ++j)
{
K_vec[j + i * 3] = static_cast<double>(K(i, j)); K_vec[j + i * 3] = static_cast<double>(K(i, j));
}
}
switch(camera->type()) switch (camera->type()) {
{
case ze::CameraType::PinholeRadialTangential: case ze::CameraType::PinholeRadialTangential:
case ze::CameraType::Pinhole: case ze::CameraType::Pinhole:
msg->distortion_model = "plumb_bob"; msg->distortion_model = "plumb_bob";
@ -155,20 +155,15 @@ void cameraToMsg(const ze::Camera::Ptr& camera, Time t, sensor_msgs::CameraInfoP
break; break;
} }
for(int j=0; j<camera->distortionParameters().rows(); ++j) for (int j = 0; j < camera->distortionParameters().rows(); ++j) {
{ D_vec.push_back(static_cast<double>(camera->distortionParameters()(j
D_vec.push_back(static_cast<double>(camera->distortionParameters()(j))); // @TODO: use the distortion params from the camera ))); // @TODO: use the distortion params from the camera
} }
msg->K = K_vec; msg->K = K_vec;
msg->D = D_vec; msg->D = D_vec;
msg->P = {K(0,0), 0, K(0,2), 0, msg->P = {K(0, 0), 0, K(0, 2), 0, 0, K(1, 1), K(1, 2), 0, 0, 0, 1, 0};
0, K(1,1), K(1,2), 0, msg->R = {1, 0, 0, 0, 1, 0, 0, 0, 1};
0, 0, 1, 0};
msg->R = {1, 0, 0,
0, 1, 0,
0, 0, 1};
} }
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -1,13 +1,12 @@
#include <esim/visualization/rosbag_writer.hpp>
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <ze/common/time_conversions.hpp>
#include <esim/visualization/ros_utils.hpp> #include <esim/visualization/ros_utils.hpp>
#include <esim/visualization/rosbag_writer.hpp>
#include <gflags/gflags.h>
#include <glog/logging.h>
#include <minkindr_conversions/kindr_msg.h> #include <minkindr_conversions/kindr_msg.h>
#include <minkindr_conversions/kindr_tf.h> #include <minkindr_conversions/kindr_tf.h>
#include <tf/tfMessage.h> #include <tf/tfMessage.h>
#include <ze/common/time_conversions.hpp>
#include <gflags/gflags.h>
#include <glog/logging.h>
DECLARE_double(ros_publisher_camera_info_rate); DECLARE_double(ros_publisher_camera_info_rate);
DECLARE_double(ros_publisher_frame_rate); DECLARE_double(ros_publisher_frame_rate);
@ -15,24 +14,24 @@ DECLARE_double(ros_publisher_depth_rate);
DECLARE_double(ros_publisher_pointcloud_rate); DECLARE_double(ros_publisher_pointcloud_rate);
DECLARE_double(ros_publisher_optic_flow_rate); DECLARE_double(ros_publisher_optic_flow_rate);
DEFINE_string(path_to_output_bag, "", DEFINE_string(
"Path to which save the output bag file."); path_to_output_bag, "", "Path to which save the output bag file."
);
namespace event_camera_simulator { namespace event_camera_simulator {
RosbagWriter::RosbagWriter(const std::string& path_to_output_bag, size_t num_cameras) RosbagWriter::RosbagWriter(
{ const std::string& path_to_output_bag, size_t num_cameras
) {
CHECK_GE(num_cameras, 1); CHECK_GE(num_cameras, 1);
num_cameras_ = num_cameras; num_cameras_ = num_cameras;
sensor_sizes_ = std::vector<cv::Size>(num_cameras_); sensor_sizes_ = std::vector<cv::Size>(num_cameras_);
try try {
{
bag_.open(path_to_output_bag, rosbag::bagmode::Write); bag_.open(path_to_output_bag, rosbag::bagmode::Write);
} } catch (rosbag::BagIOException e) {
catch(rosbag::BagIOException e) LOG(FATAL) << "Error: could not open rosbag: "
{ << FLAGS_path_to_output_bag << std::endl;
LOG(FATAL) << "Error: could not open rosbag: " << FLAGS_path_to_output_bag << std::endl;
return; return;
} }
@ -46,180 +45,192 @@ RosbagWriter::RosbagWriter(const std::string& path_to_output_bag, size_t num_cam
last_published_pointcloud_time_ = 0; last_published_pointcloud_time_ = 0;
} }
Publisher::Ptr RosbagWriter::createBagWriterFromGflags(size_t num_cameras) Publisher::Ptr RosbagWriter::createBagWriterFromGflags(size_t num_cameras) {
{ if (FLAGS_path_to_output_bag == "") {
if(FLAGS_path_to_output_bag == "")
{
LOG(INFO) << "Empty output bag string: will not write to rosbag"; LOG(INFO) << "Empty output bag string: will not write to rosbag";
return nullptr; return nullptr;
} }
return std::make_shared<RosbagWriter>(FLAGS_path_to_output_bag, num_cameras); return std::make_shared<RosbagWriter>(
FLAGS_path_to_output_bag,
num_cameras
);
} }
RosbagWriter::~RosbagWriter() RosbagWriter::~RosbagWriter() {
{
LOG(INFO) << "Finalizing the bag..."; LOG(INFO) << "Finalizing the bag...";
bag_.close(); bag_.close();
LOG(INFO) << "Finished writing to bag: " << FLAGS_path_to_output_bag; LOG(INFO) << "Finished writing to bag: " << FLAGS_path_to_output_bag;
} }
void RosbagWriter::pointcloudCallback(const PointCloudVector& pointclouds, Time t) void RosbagWriter::pointcloudCallback(
{ const PointCloudVector& pointclouds, Time t
) {
CHECK_EQ(pointclouds.size(), num_cameras_); CHECK_EQ(pointclouds.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{ Duration min_time_interval_between_published_pointclouds_ =
Duration min_time_interval_between_published_pointclouds_ ze::secToNanosec(1.0 / FLAGS_ros_publisher_pointcloud_rate);
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_pointcloud_rate); if (last_published_pointcloud_time_ > 0
if(last_published_pointcloud_time_ > 0 && t - last_published_pointcloud_time_ < min_time_interval_between_published_pointclouds_) && t - last_published_pointcloud_time_
{ < min_time_interval_between_published_pointclouds_) {
return; return;
} }
pcl::PointCloud<pcl::PointXYZRGB>::Ptr msg (new pcl::PointCloud<pcl::PointXYZRGB>); pcl::PointCloud<pcl::PointXYZRGB>::Ptr
std::stringstream ss; ss << "cam" << i; msg(new pcl::PointCloud<pcl::PointXYZRGB>);
std::stringstream ss;
ss << "cam" << i;
pointCloudToMsg(pointclouds[i], ss.str(), t, msg); pointCloudToMsg(pointclouds[i], ss.str(), t, msg);
bag_.write(getTopicName(topic_name_prefix_, i, "pointcloud"), bag_.write(
toRosTime(t), msg); getTopicName(topic_name_prefix_, i, "pointcloud"),
toRosTime(t),
msg
);
} }
last_published_pointcloud_time_ = t; last_published_pointcloud_time_ = t;
} }
void RosbagWriter::imageCallback(const ImagePtrVector& images, Time t) void RosbagWriter::imageCallback(const ImagePtrVector& images, Time t) {
{ for (size_t i = 0; i < num_cameras_; ++i) {
for(size_t i=0; i<num_cameras_; ++i)
{
sensor_sizes_[i] = images[i]->size(); sensor_sizes_[i] = images[i]->size();
static const Duration min_time_interval_between_published_images_ static const Duration min_time_interval_between_published_images_ =
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_frame_rate); ze::secToNanosec(1.0 / FLAGS_ros_publisher_frame_rate);
if(last_published_image_time_ > 0 && t - last_published_image_time_ < min_time_interval_between_published_images_) if (last_published_image_time_ > 0
{ && t - last_published_image_time_
< min_time_interval_between_published_images_) {
return; return;
} }
if(images[i]) if (images[i]) {
{
sensor_msgs::ImagePtr msg; sensor_msgs::ImagePtr msg;
imageToMsg(*images[i], t, msg); imageToMsg(*images[i], t, msg);
bag_.write(getTopicName(topic_name_prefix_, i, "image_raw"), bag_.write(
msg->header.stamp, msg); getTopicName(topic_name_prefix_, i, "image_raw"),
msg->header.stamp,
msg
);
} }
} }
last_published_image_time_ = t; last_published_image_time_ = t;
} }
void RosbagWriter::imageCorruptedCallback(const ImagePtrVector& images_corrupted, Time t) void RosbagWriter::imageCorruptedCallback(
{ const ImagePtrVector& images_corrupted, Time t
for(size_t i=0; i<num_cameras_; ++i) ) {
{ for (size_t i = 0; i < num_cameras_; ++i) {
static const Duration min_time_interval_between_published_images_ static const Duration min_time_interval_between_published_images_ =
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_frame_rate); ze::secToNanosec(1.0 / FLAGS_ros_publisher_frame_rate);
if(last_published_corrupted_image_time_ > 0 && t - last_published_corrupted_image_time_ < min_time_interval_between_published_images_) if (last_published_corrupted_image_time_ > 0
{ && t - last_published_corrupted_image_time_
< min_time_interval_between_published_images_) {
return; return;
} }
if(images_corrupted[i]) if (images_corrupted[i]) {
{
sensor_msgs::ImagePtr msg; sensor_msgs::ImagePtr msg;
imageToMsg(*images_corrupted[i], t, msg); imageToMsg(*images_corrupted[i], t, msg);
bag_.write(getTopicName(topic_name_prefix_, i, "image_corrupted"), bag_.write(
msg->header.stamp, msg); getTopicName(topic_name_prefix_, i, "image_corrupted"),
msg->header.stamp,
msg
);
} }
} }
last_published_corrupted_image_time_ = t; last_published_corrupted_image_time_ = t;
} }
void
void RosbagWriter::depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) RosbagWriter::depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) {
{
if (depthmaps.size() != num_cameras_) if (depthmaps.size() != num_cameras_)
{ return;
for (size_t i = 0; i < num_cameras_; ++i) {
static const Duration
min_time_interval_between_published_depthmaps_ =
ze::secToNanosec(1.0 / FLAGS_ros_publisher_depth_rate);
if (last_published_depthmap_time_ > 0
&& t - last_published_depthmap_time_
< min_time_interval_between_published_depthmaps_) {
return; return;
} }
for(size_t i=0; i<num_cameras_; ++i) if (depthmaps[i]) {
{
static const Duration min_time_interval_between_published_depthmaps_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_depth_rate);
if(last_published_depthmap_time_ > 0 && t - last_published_depthmap_time_ < min_time_interval_between_published_depthmaps_)
{
return;
}
if(depthmaps[i])
{
sensor_msgs::ImagePtr msg; sensor_msgs::ImagePtr msg;
depthmapToMsg(*depthmaps[i], t, msg); depthmapToMsg(*depthmaps[i], t, msg);
bag_.write(getTopicName(topic_name_prefix_, i, "depthmap"), bag_.write(
msg->header.stamp, msg); getTopicName(topic_name_prefix_, i, "depthmap"),
msg->header.stamp,
msg
);
} }
} }
last_published_depthmap_time_ = t; last_published_depthmap_time_ = t;
} }
void RosbagWriter::opticFlowCallback(
void RosbagWriter::opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) const OpticFlowPtrVector& optic_flows, Time t
{ ) {
if (optic_flows.size() != num_cameras_) if (optic_flows.size() != num_cameras_)
{ return;
for (size_t i = 0; i < num_cameras_; ++i) {
static const Duration
min_time_interval_between_published_optic_flows_ =
ze::secToNanosec(1.0 / FLAGS_ros_publisher_optic_flow_rate);
if (last_published_optic_flow_time_ > 0
&& t - last_published_optic_flow_time_
< min_time_interval_between_published_optic_flows_) {
return; return;
} }
for(size_t i=0; i<num_cameras_; ++i) if (optic_flows[i]) {
{
static const Duration min_time_interval_between_published_optic_flows_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_optic_flow_rate);
if(last_published_optic_flow_time_ > 0 && t - last_published_optic_flow_time_ < min_time_interval_between_published_optic_flows_)
{
return;
}
if(optic_flows[i])
{
esim_msgs::OpticFlow::Ptr msg; esim_msgs::OpticFlow::Ptr msg;
msg.reset(new esim_msgs::OpticFlow); msg.reset(new esim_msgs::OpticFlow);
opticFlowToMsg(*optic_flows[i], t, msg); opticFlowToMsg(*optic_flows[i], t, msg);
bag_.write(getTopicName(topic_name_prefix_, i, "optic_flow"), bag_.write(
msg->header.stamp, msg); getTopicName(topic_name_prefix_, i, "optic_flow"),
msg->header.stamp,
msg
);
} }
} }
last_published_optic_flow_time_ = t; last_published_optic_flow_time_ = t;
} }
void RosbagWriter::eventsCallback(const EventsVector& events) void RosbagWriter::eventsCallback(const EventsVector& events) {
{ for (size_t i = 0; i < num_cameras_; ++i) {
for(size_t i=0; i<num_cameras_; ++i)
{
if (sensor_sizes_[i].width == 0 || sensor_sizes_[i].height == 0) if (sensor_sizes_[i].width == 0 || sensor_sizes_[i].height == 0)
{
continue; continue;
}
if (events[i].empty()) if (events[i].empty())
{
continue; continue;
}
dvs_msgs::EventArrayPtr msg; dvs_msgs::EventArrayPtr msg;
msg.reset(new dvs_msgs::EventArray); msg.reset(new dvs_msgs::EventArray);
eventsToMsg(events[i], sensor_sizes_[i].width, sensor_sizes_[i].height, msg); eventsToMsg(
events[i],
sensor_sizes_[i].width,
sensor_sizes_[i].height,
msg
);
bag_.write(getTopicName(topic_name_prefix_, i, "events"), bag_.write(
msg->header.stamp, msg); getTopicName(topic_name_prefix_, i, "events"),
msg->header.stamp,
msg
);
} }
} }
void RosbagWriter::poseCallback(const Transformation& T_W_B, void RosbagWriter::poseCallback(
const TransformationVector& T_W_Cs, const Transformation& T_W_B, const TransformationVector& T_W_Cs, Time t
Time t) ) {
{ if (T_W_Cs.size() != num_cameras_) {
if(T_W_Cs.size() != num_cameras_) LOG(WARNING
{ ) << "Number of poses is different than number of cameras."
LOG(WARNING) << "Number of poses is different than number of cameras."
<< "Will not output poses."; << "Will not output poses.";
return; return;
} }
@ -229,17 +240,28 @@ void RosbagWriter::poseCallback(const Transformation& T_W_B,
transform_stamped_msg.header.stamp = toRosTime(t); transform_stamped_msg.header.stamp = toRosTime(t);
tf::tfMessage tf_msg; tf::tfMessage tf_msg;
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{
// Write pose to bag // Write pose to bag
tf::poseStampedKindrToMsg(T_W_Cs[i], toRosTime(t), "map", &pose_stamped_msg); tf::poseStampedKindrToMsg(
bag_.write(getTopicName(topic_name_prefix_, i, "pose"), T_W_Cs[i],
toRosTime(t), pose_stamped_msg); toRosTime(t),
"map",
&pose_stamped_msg
);
bag_.write(
getTopicName(topic_name_prefix_, i, "pose"),
toRosTime(t),
pose_stamped_msg
);
// Write tf transform to bag // Write tf transform to bag
std::stringstream ss; ss << "cam" << i; std::stringstream ss;
ss << "cam" << i;
transform_stamped_msg.child_frame_id = ss.str(); transform_stamped_msg.child_frame_id = ss.str();
tf::transformKindrToMsg(T_W_Cs[i], &transform_stamped_msg.transform); tf::transformKindrToMsg(
T_W_Cs[i],
&transform_stamped_msg.transform
);
tf_msg.transforms.push_back(transform_stamped_msg); tf_msg.transforms.push_back(transform_stamped_msg);
} }
@ -250,59 +272,63 @@ void RosbagWriter::poseCallback(const Transformation& T_W_B,
bag_.write("/tf", toRosTime(t), tf_msg); bag_.write("/tf", toRosTime(t), tf_msg);
} }
void RosbagWriter::twistCallback(const AngularVelocityVector &ws, const LinearVelocityVector &vs, Time t) void RosbagWriter::twistCallback(
{ const AngularVelocityVector& ws, const LinearVelocityVector& vs, Time t
if(ws.size() != num_cameras_ ) {
|| vs.size() != num_cameras_) if (ws.size() != num_cameras_ || vs.size() != num_cameras_) {
{ LOG(WARNING
LOG(WARNING) << "Number of twists is different than number of cameras." ) << "Number of twists is different than number of cameras."
<< "Will not output twists."; << "Will not output twists.";
return; return;
} }
CHECK_EQ(ws.size(), num_cameras_); CHECK_EQ(ws.size(), num_cameras_);
CHECK_EQ(vs.size(), num_cameras_); CHECK_EQ(vs.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{
const geometry_msgs::TwistStamped msg = twistToMsg(ws[i], vs[i], t); const geometry_msgs::TwistStamped msg = twistToMsg(ws[i], vs[i], t);
bag_.write(getTopicName(topic_name_prefix_, i, "twist"), bag_.write(
msg.header.stamp, msg); getTopicName(topic_name_prefix_, i, "twist"),
msg.header.stamp,
msg
);
} }
} }
void RosbagWriter::imuCallback(const Vector3& acc, const Vector3& gyr, Time t) void
{ RosbagWriter::imuCallback(const Vector3& acc, const Vector3& gyr, Time t) {
VLOG_EVERY_N(1, 500) << "t = " << ze::nanosecToSecTrunc(t) << " s"; VLOG_EVERY_N(1, 500) << "t = " << ze::nanosecToSecTrunc(t) << " s";
const sensor_msgs::Imu msg = imuToMsg(acc, gyr, t); const sensor_msgs::Imu msg = imuToMsg(acc, gyr, t);
const std::string imu_topic = "/imu"; const std::string imu_topic = "/imu";
bag_.write(imu_topic, bag_.write(imu_topic, msg.header.stamp, msg);
msg.header.stamp, msg);
} }
void RosbagWriter::cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t) void RosbagWriter::cameraInfoCallback(
{ const ze::CameraRig::Ptr& camera_rig, Time t
) {
CHECK(camera_rig); CHECK(camera_rig);
CHECK_EQ(camera_rig->size(), num_cameras_); CHECK_EQ(camera_rig->size(), num_cameras_);
static const Duration min_time_interval_between_published_camera_info_ static const Duration min_time_interval_between_published_camera_info_ =
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_camera_info_rate); ze::secToNanosec(1.0 / FLAGS_ros_publisher_camera_info_rate);
if(last_published_camera_info_time_ > 0 && t - last_published_camera_info_time_ < min_time_interval_between_published_camera_info_) if (last_published_camera_info_time_ > 0
{ && t - last_published_camera_info_time_
< min_time_interval_between_published_camera_info_) {
return; return;
} }
for(size_t i=0; i<num_cameras_; ++i) for (size_t i = 0; i < num_cameras_; ++i) {
{
sensor_msgs::CameraInfoPtr msg; sensor_msgs::CameraInfoPtr msg;
msg.reset(new sensor_msgs::CameraInfo); msg.reset(new sensor_msgs::CameraInfo);
cameraToMsg(camera_rig->atShared(i), t, msg); cameraToMsg(camera_rig->atShared(i), t, msg);
bag_.write(getTopicName(topic_name_prefix_, i, "camera_info"), bag_.write(
msg->header.stamp, msg); getTopicName(topic_name_prefix_, i, "camera_info"),
msg->header.stamp,
msg
);
} }
last_published_camera_info_time_ = t; last_published_camera_info_time_ = t;
} }
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -1,52 +1,60 @@
#include <esim/visualization/synthetic_optic_flow_publisher.hpp>
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <ze/common/path_utils.hpp> #include <esim/visualization/synthetic_optic_flow_publisher.hpp>
#include <ze/common/file_utils.hpp>
#include <ze/common/time_conversions.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <gflags/gflags.h> #include <gflags/gflags.h>
#include <glog/logging.h> #include <glog/logging.h>
#include <opencv2/highgui/highgui.hpp>
#include <ze/common/file_utils.hpp>
#include <ze/common/path_utils.hpp>
#include <ze/common/time_conversions.hpp>
DEFINE_string(synthetic_optic_flow_output_folder, "", DEFINE_string(
"Folder in which to output the events."); synthetic_optic_flow_output_folder,
"",
"Folder in which to output the events."
);
namespace event_camera_simulator { namespace event_camera_simulator {
/** /**
* This publisher was designed with the purpose of generating simulation data * This publisher was designed with the purpose of generating simulation
* with ground truth labels, for the task of optic flow estimation. * data with ground truth labels, for the task of optic flow estimation.
* *
* It assumes that it will receive a relatively small sequence of events (corresponding, for example, * It assumes that it will receive a relatively small sequence of events
* to all the events in between two frames), and will write all the events to disk in its destructor, * (corresponding, for example, to all the events in between two frames),
* in three forms: * and will write all the events to disk in its destructor, in three forms:
* - an "events.txt" file that contains all the events in "t x y pol" format (one event per line) * - an "events.txt" file that contains all the events in "t x y pol"
* - an "event_count.png" image that whose first two channels contain the counts of the positive (resp. negative) event counts at each pixel * format (one event per line)
* - two "timestamps images" in which each pixel contains the timestamp at the last event that fell on the pixel. * - an "event_count.png" image that whose first two channels contain the
* (since the timestamp is a floating point value, it is split in 3 8-bit values so that the timestamp images * counts of the positive (resp. negative) event counts at each pixel
* - two "timestamps images" in which each pixel contains the timestamp at
* the last event that fell on the pixel. (since the timestamp is a floating
* point value, it is split in 3 8-bit values so that the timestamp images
* can be saved in a single 3-channel image). * can be saved in a single 3-channel image).
*/ */
SyntheticOpticFlowPublisher::SyntheticOpticFlowPublisher(const std::string& output_folder) SyntheticOpticFlowPublisher::SyntheticOpticFlowPublisher(
: output_folder_(output_folder) const std::string& output_folder
{ )
ze::openOutputFileStream(ze::joinPath(output_folder, "events.txt"), : output_folder_(output_folder) {
&events_file_); ze::openOutputFileStream(
ze::joinPath(output_folder, "events.txt"),
&events_file_
);
} }
Publisher::Ptr SyntheticOpticFlowPublisher::createFromGflags() Publisher::Ptr SyntheticOpticFlowPublisher::createFromGflags() {
{ if (FLAGS_synthetic_optic_flow_output_folder == "") {
if(FLAGS_synthetic_optic_flow_output_folder == "") LOG(WARNING
{ ) << "Empty output folder string: will not write synthetic "
LOG(WARNING) << "Empty output folder string: will not write synthetic optic flow files"; "optic flow files";
return nullptr; return nullptr;
} }
return std::make_shared<SyntheticOpticFlowPublisher>(FLAGS_synthetic_optic_flow_output_folder); return std::make_shared<SyntheticOpticFlowPublisher>(
FLAGS_synthetic_optic_flow_output_folder
);
} }
SyntheticOpticFlowPublisher::~SyntheticOpticFlowPublisher() SyntheticOpticFlowPublisher::~SyntheticOpticFlowPublisher() {
{
// Create an event count image using all the events collected // Create an event count image using all the events collected
cv::Mat event_count_image = cv::Mat::zeros(sensor_size_, CV_8UC3); cv::Mat event_count_image = cv::Mat::zeros(sensor_size_, CV_8UC3);
@ -56,34 +64,46 @@ SyntheticOpticFlowPublisher::~SyntheticOpticFlowPublisher()
int remapped_timestamp_fraction; int remapped_timestamp_fraction;
double timestamp_fraction; double timestamp_fraction;
for(Event e : events_) for (Event e : events_) {
{
event_count_image.at<cv::Vec3b>(e.y, e.x)[int(e.pol)]++; event_count_image.at<cv::Vec3b>(e.y, e.x)[int(e.pol)]++;
cv::Mat& curr_timestamp_image = e.pol ? timestamps_pos : timestamps_neg; cv::Mat& curr_timestamp_image =
e.pol ? timestamps_pos : timestamps_neg;
// remap value // remap value
timestamp_fraction = double(e.t - events_[0].t) / (events_[events_.size()-1].t - events_[0].t); timestamp_fraction = double(e.t - events_[0].t)
remapped_timestamp_fraction = timestamp_fraction * std::pow(2,24); // remap 0-1 to 0 - 2^24 / (events_[events_.size() - 1].t - events_[0].t);
remapped_timestamp_fraction =
timestamp_fraction * std::pow(2, 24); // remap 0-1 to 0 - 2^24
// distribute the 24 bit number (remapped_timestamp_fraction) to 3 channel 8 bit image // distribute the 24 bit number (remapped_timestamp_fraction) to 3
for (int i=0; i<3; i++) // channel 8 bit image
{ for (int i = 0; i < 3; i++) {
curr_timestamp_image.at<cv::Vec3b>(e.y,e.x)[i] = (int) remapped_timestamp_fraction & 0xFF; // bit mask of 0000 0000 0000 0000 1111 1111 curr_timestamp_image.at<cv::Vec3b>(e.y, e.x)[i] =
remapped_timestamp_fraction = remapped_timestamp_fraction >> 8; // shifts bits to right by 8 (int) remapped_timestamp_fraction
& 0xFF; // bit mask of 0000 0000 0000 0000 1111 1111
remapped_timestamp_fraction = remapped_timestamp_fraction
>> 8; // shifts bits to right by 8
} }
} }
// Write event count image + the two timestamps images to disk // Write event count image + the two timestamps images to disk
std::string path_event_count_image = ze::joinPath(output_folder_, "event_count.png"); std::string path_event_count_image =
std::string path_timestamps_pos = ze::joinPath(output_folder_, "event_time_stamps_pos.png"); ze::joinPath(output_folder_, "event_count.png");
std::string path_timestamps_neg = ze::joinPath(output_folder_, "event_time_stamps_neg.png"); std::string path_timestamps_pos =
ze::joinPath(output_folder_, "event_time_stamps_pos.png");
std::string path_timestamps_neg =
ze::joinPath(output_folder_, "event_time_stamps_neg.png");
std::vector<int> compression_params; std::vector<int> compression_params;
compression_params.push_back(cv::IMWRITE_PNG_COMPRESSION); compression_params.push_back(cv::IMWRITE_PNG_COMPRESSION);
compression_params.push_back(0); compression_params.push_back(0);
cv::imwrite(path_event_count_image, event_count_image, compression_params); cv::imwrite(
path_event_count_image,
event_count_image,
compression_params
);
cv::imwrite(path_timestamps_pos, timestamps_pos, compression_params); cv::imwrite(path_timestamps_pos, timestamps_pos, compression_params);
cv::imwrite(path_timestamps_neg, timestamps_neg, compression_params); cv::imwrite(path_timestamps_neg, timestamps_neg, compression_params);
@ -91,15 +111,15 @@ SyntheticOpticFlowPublisher::~SyntheticOpticFlowPublisher()
events_file_.close(); events_file_.close();
} }
void SyntheticOpticFlowPublisher::eventsCallback(const EventsVector& events) void SyntheticOpticFlowPublisher::eventsCallback(const EventsVector& events
{ ) {
CHECK_EQ(events.size(), 1); CHECK_EQ(events.size(), 1);
// Simply aggregate the events into the events_ buffer. // Simply aggregate the events into the events_ buffer.
// At the destruction of this object, everything will be saved to disk. // At the destruction of this object, everything will be saved to disk.
for(const Event& e : events[0]) for (const Event& e : events[0]) {
{ events_file_ << e.t << " " << e.x << " " << e.y << " "
events_file_ << e.t << " " << e.x << " " << e.y << " " << (e.pol? 1 : 0) << std::endl; << (e.pol ? 1 : 0) << std::endl;
events_.push_back(e); events_.push_back(e);
} }
} }

View File

@ -1,13 +1,12 @@
#pragma once #pragma once
#include <esim/rendering/simple_renderer_base.hpp>
#include <esim/imp_multi_objects_2d/object.hpp> #include <esim/imp_multi_objects_2d/object.hpp>
#include <esim/rendering/simple_renderer_base.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
//! A rendering engine for planar scenes //! A rendering engine for planar scenes
class MultiObject2DRenderer : public SimpleRenderer class MultiObject2DRenderer : public SimpleRenderer {
{
public: public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW EIGEN_MAKE_ALIGNED_OPERATOR_NEW
@ -15,16 +14,21 @@ public:
~MultiObject2DRenderer(); ~MultiObject2DRenderer();
virtual bool render(const Time t, virtual bool render(
const Time t,
const ImagePtr& out_image, const ImagePtr& out_image,
const OpticFlowPtr& optic_flow_map) const; const OpticFlowPtr& optic_flow_map
) const;
virtual int getWidth() const { return width_; } virtual int getWidth() const {
return width_;
}
virtual int getHeight() const { return height_; } virtual int getHeight() const {
return height_;
}
protected: protected:
void outputGroundTruthData(); void outputGroundTruthData();
std::vector<std::shared_ptr<Object>> objects_; std::vector<std::shared_ptr<Object>> objects_;

View File

@ -4,21 +4,31 @@
namespace event_camera_simulator { namespace event_camera_simulator {
bool loadTexture(const std::string& path_to_img, cv::Mat *img, bool loadTexture(
double median_blur, double gaussian_blur); const std::string& path_to_img,
cv::Mat* img,
double median_blur,
double gaussian_blur
);
using Affine = cv::Matx<FloatType, 3, 3>; using Affine = cv::Matx<FloatType, 3, 3>;
using AffineWithJacobian = std::pair<Affine, Affine>; using AffineWithJacobian = std::pair<Affine, Affine>;
class MotionParameters class MotionParameters {
{
public: public:
MotionParameters(FloatType tmax, MotionParameters(
FloatType theta0_deg, FloatType theta1_deg, FloatType tmax,
FloatType x0, FloatType x1, FloatType theta0_deg,
FloatType y0, FloatType y1, FloatType theta1_deg,
FloatType sx0, FloatType sx1, FloatType x0,
FloatType sy0, FloatType sy1) FloatType x1,
FloatType y0,
FloatType y1,
FloatType sx0,
FloatType sx1,
FloatType sy0,
FloatType sy1
)
: tmax_(tmax), : tmax_(tmax),
x0_(x0), x0_(x0),
x1_(x1), x1_(x1),
@ -29,12 +39,9 @@ public:
sx0_(sx0), sx0_(sx0),
sx1_(sx1), sx1_(sx1),
sy0_(sy0), sy0_(sy0),
sy1_(sy1) sy1_(sy1) {}
{
}
AffineWithJacobian getAffineTransformationWithJacobian(ze::real_t t) AffineWithJacobian getAffineTransformationWithJacobian(ze::real_t t) {
{
// constants // constants
const ze::real_t dtheta = theta1_ - theta0_; const ze::real_t dtheta = theta1_ - theta0_;
const ze::real_t dx = x1_ - x0_; const ze::real_t dx = x1_ - x0_;
@ -52,9 +59,8 @@ public:
const ze::real_t ctheta = std::cos(theta); const ze::real_t ctheta = std::cos(theta);
Affine A; Affine A;
A << sx * ctheta, -sy * stheta, x, A << sx * ctheta, -sy * stheta, x, sx * stheta, sy * ctheta, y, 0,
sx * stheta, sy * ctheta, y, 0, 1;
0, 0, 1;
// computation of dparameter_dt(t) // computation of dparameter_dt(t)
const ze::real_t dtheta_dt = 1. / tmax_ * dtheta; const ze::real_t dtheta_dt = 1. / tmax_ * dtheta;
@ -64,9 +70,10 @@ public:
const ze::real_t dsy_dt = 1. / tmax_ * dsy; const ze::real_t dsy_dt = 1. / tmax_ * dsy;
cv::Matx<FloatType, 3, 3> dAdt; cv::Matx<FloatType, 3, 3> dAdt;
dAdt << dsx_dt * ctheta - dtheta_dt * stheta * sx, -dsy_dt * stheta - dtheta_dt * ctheta * sy, dx_dt, dAdt << dsx_dt * ctheta - dtheta_dt * stheta * sx,
dsx_dt * stheta + dtheta_dt * ctheta * sx, dsy_dt * ctheta - dtheta_dt * stheta * sy, dy_dt, -dsy_dt * stheta - dtheta_dt * ctheta * sy, dx_dt,
0.0, 0.0, 0.0; dsx_dt * stheta + dtheta_dt * ctheta * sx,
dsy_dt * ctheta - dtheta_dt * stheta * sy, dy_dt, 0.0, 0.0, 0.0;
return AffineWithJacobian(A, dAdt); return AffineWithJacobian(A, dAdt);
} }
@ -79,21 +86,32 @@ public:
FloatType sy0_, sy1_; FloatType sy0_, sy1_;
}; };
class Object class Object {
{
public: public:
Object(const std::string path_to_texture, const cv::Size& dst_size, const MotionParameters& motion_params, Object(
double median_blur, double gaussian_blur); const std::string path_to_texture,
const cv::Size& dst_size,
const MotionParameters& motion_params,
double median_blur,
double gaussian_blur
);
void draw(Time t, bool is_first_layer = false); void draw(Time t, bool is_first_layer = false);
cv::Mat canvas_; cv::Mat canvas_;
OpticFlow flow_; OpticFlow flow_;
MotionParameters getMotionParameters() const { return p_; } MotionParameters getMotionParameters() const {
return p_;
}
Affine getK0() const { return K0_; } Affine getK0() const {
Affine getK1() const { return K1_; } return K0_;
}
Affine getK1() const {
return K1_;
}
private: private:
cv::Mat texture_; cv::Mat texture_;
@ -103,15 +121,15 @@ private:
Affine K0_, K1_; Affine K0_, K1_;
}; };
void getIntensityAndAlpha(const cv::Mat& image, void getIntensityAndAlpha(
int x, int y, const cv::Mat& image,
int x,
int y,
ImageFloatType* intensity, ImageFloatType* intensity,
ImageFloatType* alpha); ImageFloatType* alpha
);
inline ImageFloatType bgrToGrayscale(uchar b, inline ImageFloatType bgrToGrayscale(uchar b, uchar g, uchar r) {
uchar g,
uchar r)
{
// https://www.johndcook.com/blog/2009/08/24/algorithms-convert-color-grayscale/ // https://www.johndcook.com/blog/2009/08/24/algorithms-convert-color-grayscale/
return 0.07 * static_cast<ImageFloatType>(b) return 0.07 * static_cast<ImageFloatType>(b)
+ 0.72 * static_cast<ImageFloatType>(g) + 0.72 * static_cast<ImageFloatType>(g)

View File

@ -1,11 +1,10 @@
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <esim/imp_multi_objects_2d/imp_multi_objects_2d_renderer.hpp> #include <esim/imp_multi_objects_2d/imp_multi_objects_2d_renderer.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/imgcodecs/imgcodecs.hpp>
#include <glog/logging.h> #include <glog/logging.h>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/core/eigen.hpp> #include <opencv2/core/eigen.hpp>
#include <opencv2/imgcodecs/imgcodecs.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <ze/common/file_utils.hpp> #include <ze/common/file_utils.hpp>
#include <ze/common/time_conversions.hpp> #include <ze/common/time_conversions.hpp>
@ -13,19 +12,29 @@ DECLARE_double(renderer_preprocess_median_blur);
DECLARE_double(renderer_preprocess_gaussian_blur); DECLARE_double(renderer_preprocess_gaussian_blur);
DEFINE_string(path_to_sequence_file, "", DEFINE_string(
"Path to the sequence file that describes the 2D, multi-object scene."); path_to_sequence_file,
"",
"Path to the sequence file that describes the 2D, multi-object scene."
);
DEFINE_string(path_to_output_folder, "", DEFINE_string(
"Path to the output folder where the flow ground truth files will be written"); path_to_output_folder,
"",
"Path to the output folder where the flow ground truth files "
"will be written"
);
DEFINE_bool(output_reverse_displacement_map, false, DEFINE_bool(
"Whether to output the reverse displacement map (i.e., the displacement field that maps pixels at t=tmax to pixels at t=t0"); output_reverse_displacement_map,
false,
"Whether to output the reverse displacement map (i.e., the "
"displacement field that maps pixels at t=tmax to pixels at t=t0"
);
namespace event_camera_simulator { namespace event_camera_simulator {
MultiObject2DRenderer::MultiObject2DRenderer() MultiObject2DRenderer::MultiObject2DRenderer() {
{
// Load sequence from file // Load sequence from file
const std::string path_to_sequence_file = FLAGS_path_to_sequence_file; const std::string path_to_sequence_file = FLAGS_path_to_sequence_file;
@ -33,7 +42,8 @@ MultiObject2DRenderer::MultiObject2DRenderer()
ze::openFileStream(path_to_sequence_file, &sequence_file); ze::openFileStream(path_to_sequence_file, &sequence_file);
std::string path_to_texture; std::string path_to_texture;
double median_blur, gaussian_blur, theta0, theta1, x0, x1, y0, y1, sx0, sx1, sy0, sy1; double median_blur, gaussian_blur, theta0, theta1, x0, x1, y0, y1, sx0,
sx1, sy0, sy1;
sequence_file >> width_ >> height_ >> tmax_; sequence_file >> width_ >> height_ >> tmax_;
@ -44,47 +54,45 @@ MultiObject2DRenderer::MultiObject2DRenderer()
VLOG(1) << "width = " << width_ << ", height = " << height_; VLOG(1) << "width = " << width_ << ", height = " << height_;
VLOG(1) << "tmax = " << tmax_; VLOG(1) << "tmax = " << tmax_;
while(sequence_file >> path_to_texture while (sequence_file >> path_to_texture >> median_blur >> gaussian_blur
>> median_blur >> gaussian_blur >> theta0 >> theta1 >> x0 >> x1 >> y0 >> y1 >> sx0 >> sx1 >> sy0
>> theta0 >> theta1 >> sy1) {
>> x0 >> x1 MotionParameters params(
>> y0 >> y1 tmax_,
>> sx0 >> sx1 theta0,
>> sy0 >> sy1) theta1,
{ x0,
MotionParameters params(tmax_, x1,
theta0, theta1, y0,
x0, x1, y1,
y0, y1, sx0,
sx0, sx1, sx1,
sy0, sy1); sy0,
sy1
);
objects_.emplace_back(std::make_shared<Object>(path_to_texture, objects_.emplace_back(std::make_shared<Object>(
path_to_texture,
cv::Size(width_, height_), cv::Size(width_, height_),
params, params,
median_blur, median_blur,
gaussian_blur)); gaussian_blur
));
} }
if (FLAGS_path_to_output_folder != "") if (FLAGS_path_to_output_folder != "")
{
outputGroundTruthData(); outputGroundTruthData();
} }
}
MultiObject2DRenderer::~MultiObject2DRenderer() MultiObject2DRenderer::~MultiObject2DRenderer() {}
{
}
bool MultiObject2DRenderer::render(const Time t, bool MultiObject2DRenderer::render(
const Time t,
const ImagePtr& out_image, const ImagePtr& out_image,
const OpticFlowPtr& optic_flow_map) const { const OpticFlowPtr& optic_flow_map
) const {
if (ze::nanosecToSecTrunc(t) > tmax_) if (ze::nanosecToSecTrunc(t) > tmax_)
{
return true; return true;
}
CHECK_EQ(out_image->rows, height_); CHECK_EQ(out_image->rows, height_);
CHECK_EQ(out_image->cols, width_); CHECK_EQ(out_image->cols, width_);
@ -92,8 +100,7 @@ bool MultiObject2DRenderer::render(const Time t,
out_image->setTo(0); out_image->setTo(0);
optic_flow_map->setTo(0.); optic_flow_map->setTo(0.);
for(int i=0; i<objects_.size(); ++i) for (int i = 0; i < objects_.size(); ++i) {
{
const bool is_first_layer = (i == 0); const bool is_first_layer = (i == 0);
objects_[i]->draw(t, is_first_layer); objects_[i]->draw(t, is_first_layer);
} }
@ -105,81 +112,93 @@ bool MultiObject2DRenderer::render(const Time t,
ImageFloatType intensity; ImageFloatType intensity;
ImageFloatType alpha; ImageFloatType alpha;
for(int i=0; i<objects_.size(); ++i) for (int i = 0; i < objects_.size(); ++i) {
{
const std::shared_ptr<Object>& object = objects_[i]; const std::shared_ptr<Object>& object = objects_[i];
const cv::Mat& image = object->canvas_; const cv::Mat& image = object->canvas_;
const OpticFlow& flow = object->flow_; const OpticFlow& flow = object->flow_;
for(int y=0; y<out_image->rows; ++y) for (int y = 0; y < out_image->rows; ++y) {
{ for (int x = 0; x < out_image->cols; ++x) {
for(int x=0; x<out_image->cols; ++x)
{
getIntensityAndAlpha(image, x, y, &intensity, &alpha); getIntensityAndAlpha(image, x, y, &intensity, &alpha);
(*out_image)(y,x) = alpha * intensity + (1.-alpha) * (*out_image)(y,x); (*out_image)(y, x) =
alpha * intensity + (1. - alpha) * (*out_image)(y, x);
if (alpha > 0) if (alpha > 0)
{
(*optic_flow_map)(y, x) = flow(y, x); (*optic_flow_map)(y, x) = flow(y, x);
} }
} }
} }
}
return false; return false;
} }
void MultiObject2DRenderer::outputGroundTruthData() void MultiObject2DRenderer::outputGroundTruthData() {
{ // This function generates some ground truth information and output if
// This function generates some ground truth information and output if to the FLAGS_path_to_output_folder // to the FLAGS_path_to_output_folder In principle, this information
// In principle, this information could be transmitted in a SimulationData structure and forwarded // could be transmitted in a SimulationData structure and forwarded to a
// to a Publisher object that writes it to disk. // Publisher object that writes it to disk. However, for technical
// However, for technical reasons, it is more convenient to write the displacement maps here, rather than integrate instantaneous // reasons, it is more convenient to write the displacement maps here,
// optic flow maps in a Publisher. // rather than integrate instantaneous optic flow maps in a Publisher.
// The following ground truth information is computed below and output to the ground truth folder: // The following ground truth information is computed below and output
// to the ground truth folder:
// - Displacement maps from t=0 to t=tmax (and vice-versa) // - Displacement maps from t=0 to t=tmax (and vice-versa)
// - Images 0 and images 1 // - Images 0 and images 1
VLOG(1) << "Will output ground truth data to folder: " << FLAGS_path_to_output_folder; VLOG(1) << "Will output ground truth data to folder: "
<< FLAGS_path_to_output_folder;
// Notation: the frame at time t=0 is denoted as frame 0, and the frame at time t=tmax is denoted as frame 1 // Notation: the frame at time t=0 is denoted as frame 0, and the frame
// Compute the entire displacement field from 0 to 1, for every layer // at time t=tmax is denoted as frame 1 Compute the entire displacement
// field from 0 to 1, for every layer
const ze::real_t t0 = 0.0; const ze::real_t t0 = 0.0;
const ze::real_t t1 = tmax_; const ze::real_t t1 = tmax_;
// Every entry in this vector is a displacement map that maps pixel locations in image 0 // Every entry in this vector is a displacement map that maps pixel
// to the corresponding pixel location in image 1, for the layer i // locations in image 0 to the corresponding pixel location in image 1,
// for the layer i
// //
// this individual displacement layers are then merged together to form the final // this individual displacement layers are then merged together to form
// displacement map later // the final displacement map later
// //
// Note: the reverse layer-wise displacement map can be computed as follows: // Note: the reverse layer-wise displacement map can be computed as
// follows:
// layer_displacement_01[i] = -layer_displacement_10[i] // layer_displacement_01[i] = -layer_displacement_10[i]
std::vector<OpticFlow> layer_displacements_10(objects_.size()); std::vector<OpticFlow> layer_displacements_10(objects_.size());
for(int i=0; i<objects_.size(); ++i) for (int i = 0; i < objects_.size(); ++i) {
{
layer_displacements_10[i] = OpticFlow(height_, width_); layer_displacements_10[i] = OpticFlow(height_, width_);
const Object object = *(objects_[i]); const Object object = *(objects_[i]);
Affine A_t0_src = object.getMotionParameters().getAffineTransformationWithJacobian(t0).first; Affine A_t0_src = object.getMotionParameters()
Affine A_t1_src = object.getMotionParameters().getAffineTransformationWithJacobian(t1).first; .getAffineTransformationWithJacobian(t0)
.first;
Affine A_t1_src = object.getMotionParameters()
.getAffineTransformationWithJacobian(t1)
.first;
Affine A_t1_t0 = A_t1_src * A_t0_src.inv(); Affine A_t1_t0 = A_t1_src * A_t0_src.inv();
// M_t1_t0 maps any point on the first image to its position in the last image // M_t1_t0 maps any point on the first image to its position in the
// last image
Affine M_t1_t0 = object.getK1() * A_t1_t0 * object.getK1().inv(); Affine M_t1_t0 = object.getK1() * A_t1_t0 * object.getK1().inv();
for(int y=0; y<height_; ++y) for (int y = 0; y < height_; ++y) {
{ for (int x = 0; x < width_; ++x) {
for(int x=0; x<width_; ++x) FloatType x_t1 =
{ M_t1_t0(0, 0) * x + M_t1_t0(0, 1) * y + M_t1_t0(0, 2);
FloatType x_t1 = M_t1_t0(0,0) * x + M_t1_t0(0,1) * y + M_t1_t0(0,2); FloatType y_t1 =
FloatType y_t1 = M_t1_t0(1,0) * x + M_t1_t0(1,1) * y + M_t1_t0(1,2); M_t1_t0(1, 0) * x + M_t1_t0(1, 1) * y + M_t1_t0(1, 2);
FloatType displacement_x, displacement_y; FloatType displacement_x, displacement_y;
// if the pixel went out of the field of view, we assign a displacement of 0 (convention) // if the pixel went out of the field of view, we assign a
displacement_x = (x_t1 >= 0 && x_t1 < width_) ? (ImageFloatType) x_t1 - (ImageFloatType) x : 0.; // displacement of 0 (convention)
displacement_y = (y_t1 >= 0 && y_t1 < height_) ? (ImageFloatType) y_t1 - (ImageFloatType) y : 0.; displacement_x =
(x_t1 >= 0 && x_t1 < width_)
? (ImageFloatType) x_t1 - (ImageFloatType) x
: 0.;
displacement_y =
(y_t1 >= 0 && y_t1 < height_)
? (ImageFloatType) y_t1 - (ImageFloatType) y
: 0.;
layer_displacements_10[i](y, x)[0] = displacement_x; layer_displacements_10[i](y, x)[0] = displacement_x;
layer_displacements_10[i](y, x)[1] = displacement_y; layer_displacements_10[i](y, x)[1] = displacement_y;
@ -187,94 +206,118 @@ void MultiObject2DRenderer::outputGroundTruthData()
} }
} }
ImageFloatType intensity, alpha; ImageFloatType intensity, alpha;
// First, merge the displacement map from 0 to 1 // First, merge the displacement map from 0 to 1
OpticFlow displacement_map_10(height_, width_); // displacement map from 0 to 1 OpticFlow displacement_map_10(
height_,
width_
); // displacement map from 0 to 1
Image image0(height_, width_); Image image0(height_, width_);
image0.setTo(0); image0.setTo(0);
for(int i=0; i<objects_.size(); ++i) for (int i = 0; i < objects_.size(); ++i) {
{
const bool is_first_layer = (i == 0); const bool is_first_layer = (i == 0);
objects_[i]->draw(t0, is_first_layer); objects_[i]->draw(t0, is_first_layer);
} }
for(int i=0; i<objects_.size(); ++i) for (int i = 0; i < objects_.size(); ++i) {
{
const std::shared_ptr<Object>& object = objects_[i]; const std::shared_ptr<Object>& object = objects_[i];
for(int y=0; y<height_; ++y) for (int y = 0; y < height_; ++y) {
{ for (int x = 0; x < width_; ++x) {
for(int x=0; x<width_; ++x) getIntensityAndAlpha(
{ object->canvas_,
getIntensityAndAlpha(object->canvas_, x, y, &intensity, &alpha); x,
image0(y,x) = alpha * intensity + (1.-alpha) * image0(y,x); y,
if(alpha > 0) &intensity,
{ &alpha
displacement_map_10(y,x) = layer_displacements_10[i](y,x); );
image0(y, x) =
alpha * intensity + (1. - alpha) * image0(y, x);
if (alpha > 0) {
displacement_map_10(y, x) =
layer_displacements_10[i](y, x);
} }
} }
} }
} }
// Second, create displacement map from 1 to 0 // Second, create displacement map from 1 to 0
OpticFlow displacement_map_01(height_, width_); // displacement map from 1 to 0 OpticFlow displacement_map_01(
height_,
width_
); // displacement map from 1 to 0
Image image1(height_, width_); Image image1(height_, width_);
image1.setTo(0); image1.setTo(0);
for(int i=0; i<objects_.size(); ++i) for (int i = 0; i < objects_.size(); ++i) {
{
const bool is_first_layer = (i == 0); const bool is_first_layer = (i == 0);
objects_[i]->draw(ze::secToNanosec(t1), is_first_layer); objects_[i]->draw(ze::secToNanosec(t1), is_first_layer);
} }
for(int i=0; i<objects_.size(); ++i) for (int i = 0; i < objects_.size(); ++i) {
{ for (int y = 0; y < height_; ++y) {
for(int y=0; y<height_; ++y) for (int x = 0; x < width_; ++x) {
{ getIntensityAndAlpha(
for(int x=0; x<width_; ++x) objects_[i]->canvas_,
{ x,
getIntensityAndAlpha(objects_[i]->canvas_, x, y, &intensity, &alpha); y,
image1(y,x) = alpha * intensity + (1.-alpha) * image1(y,x); &intensity,
if(alpha > 0) &alpha
{ );
displacement_map_01(y,x) = -layer_displacements_10[i](y,x); image1(y, x) =
alpha * intensity + (1. - alpha) * image1(y, x);
if (alpha > 0) {
displacement_map_01(y, x) =
-layer_displacements_10[i](y, x);
} }
} }
} }
} }
std::ofstream displacement_file_10, displacement_file_01; std::ofstream displacement_file_10, displacement_file_01;
ze::openOutputFileStream(ze::joinPath(FLAGS_path_to_output_folder, "displacement_10.txt"), &displacement_file_10); ze::openOutputFileStream(
ze::joinPath(FLAGS_path_to_output_folder, "displacement_10.txt"),
&displacement_file_10
);
if(FLAGS_output_reverse_displacement_map) if (FLAGS_output_reverse_displacement_map) {
{ ze::openOutputFileStream(
ze::openOutputFileStream(ze::joinPath(FLAGS_path_to_output_folder, "displacement_01.txt"), &displacement_file_01); ze::joinPath(
FLAGS_path_to_output_folder,
"displacement_01.txt"
),
&displacement_file_01
);
} }
for(int y=0; y<height_; ++y) for (int y = 0; y < height_; ++y) {
{ for (int x = 0; x < width_; ++x) {
for(int x=0; x<width_; ++x) displacement_file_10 << displacement_map_10(y, x)[0] << " "
{ << displacement_map_10(y, x)[1]
displacement_file_10 << displacement_map_10(y,x)[0] << " " << displacement_map_10(y,x)[1] << std::endl; << std::endl;
if(FLAGS_output_reverse_displacement_map) if (FLAGS_output_reverse_displacement_map) {
{ displacement_file_01 << displacement_map_01(y, x)[0] << " "
displacement_file_01 << displacement_map_01(y,x)[0] << " " << displacement_map_01(y,x)[1] << std::endl; << displacement_map_01(y, x)[1]
<< std::endl;
} }
} }
} }
displacement_file_10.close(); displacement_file_10.close();
if (FLAGS_output_reverse_displacement_map) if (FLAGS_output_reverse_displacement_map)
{
displacement_file_01.close(); displacement_file_01.close();
}
cv::Mat disp_image0, disp_image1; cv::Mat disp_image0, disp_image1;
image0.convertTo(disp_image0, CV_8U, 255); image0.convertTo(disp_image0, CV_8U, 255);
image1.convertTo(disp_image1, CV_8U, 255); image1.convertTo(disp_image1, CV_8U, 255);
cv::imwrite(ze::joinPath(FLAGS_path_to_output_folder, "image0.png"), disp_image0); cv::imwrite(
cv::imwrite(ze::joinPath(FLAGS_path_to_output_folder, "image1.png"), disp_image1); ze::joinPath(FLAGS_path_to_output_folder, "image0.png"),
disp_image0
);
cv::imwrite(
ze::joinPath(FLAGS_path_to_output_folder, "image1.png"),
disp_image1
);
} }
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -1,56 +1,57 @@
#include <esim/imp_multi_objects_2d/object.hpp> #include <esim/imp_multi_objects_2d/object.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/imgcodecs/imgcodecs.hpp> #include <opencv2/imgcodecs/imgcodecs.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <ze/common/time_conversions.hpp> #include <ze/common/time_conversions.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
bool loadTexture(const std::string& path_to_img, cv::Mat* img, bool loadTexture(
double median_blur, double gaussian_blur) const std::string& path_to_img,
{ cv::Mat* img,
double median_blur,
double gaussian_blur
) {
CHECK(img); CHECK(img);
VLOG(1) << "Loading texture file from file: " << path_to_img << "."; VLOG(1) << "Loading texture file from file: " << path_to_img << ".";
*img = cv::imread(path_to_img, cv::IMREAD_UNCHANGED); *img = cv::imread(path_to_img, cv::IMREAD_UNCHANGED);
if(!img->data) if (!img->data) {
{
LOG(FATAL) << "Could not open image at: " << path_to_img << "."; LOG(FATAL) << "Could not open image at: " << path_to_img << ".";
return false; return false;
} }
if(median_blur > 1) if (median_blur > 1) {
{
VLOG(1) << "Pre-filtering the texture with median filter of size: " VLOG(1) << "Pre-filtering the texture with median filter of size: "
<< median_blur << "."; << median_blur << ".";
cv::medianBlur(*img, *img, median_blur); cv::medianBlur(*img, *img, median_blur);
} }
if(gaussian_blur > 0) if (gaussian_blur > 0) {
{ VLOG(1
VLOG(1) << "Pre-filtering the texture with gaussian filter of size: " ) << "Pre-filtering the texture with gaussian filter of size: "
<< gaussian_blur << "."; << gaussian_blur << ".";
cv::GaussianBlur(*img, *img, cv::Size(21, 21), gaussian_blur); cv::GaussianBlur(*img, *img, cv::Size(21, 21), gaussian_blur);
} }
return true; return true;
} }
Object::Object(const std::string path_to_texture, const cv::Size& dst_size, const MotionParameters& motion_params, Object::Object(
double median_blur, double gaussian_blur) const std::string path_to_texture,
const cv::Size& dst_size,
const MotionParameters& motion_params,
double median_blur,
double gaussian_blur
)
: dst_size_(dst_size), : dst_size_(dst_size),
p_(motion_params) p_(motion_params) {
{ loadTexture(path_to_texture, &texture_, median_blur, gaussian_blur);
loadTexture(path_to_texture, &texture_,
median_blur,
gaussian_blur);
K0_ << texture_.cols, 0, 0.5 * texture_.cols, K0_ << texture_.cols, 0, 0.5 * texture_.cols, 0, texture_.rows,
0, texture_.rows, 0.5 * texture_.rows, 0.5 * texture_.rows, 0, 0, 1;
0, 0, 1;
K1_ << dst_size_.width, 0, 0.5 * dst_size_.width, K1_ << dst_size_.width, 0, 0.5 * dst_size_.width, 0, dst_size_.height,
0, dst_size_.height, 0.5 * dst_size_.height, 0.5 * dst_size_.height, 0, 0, 1;
0, 0, 1;
canvas_ = cv::Mat(dst_size, CV_8UC4); canvas_ = cv::Mat(dst_size, CV_8UC4);
canvas_.setTo(0); canvas_.setTo(0);
@ -58,8 +59,7 @@ Object::Object(const std::string path_to_texture, const cv::Size& dst_size, cons
flow_.setTo(0); flow_.setTo(0);
} }
void Object::draw(Time t, bool is_first_layer) void Object::draw(Time t, bool is_first_layer) {
{
canvas_.setTo(0); canvas_.setTo(0);
ze::real_t ts = ze::nanosecToSecTrunc(t); ze::real_t ts = ze::nanosecToSecTrunc(t);
@ -73,8 +73,9 @@ void Object::draw(Time t, bool is_first_layer)
// test jacobian // test jacobian
// const ze::real_t h = 1e-5; // const ze::real_t h = 1e-5;
// AffineWithJacobian A = p_.getAffineTransformationWithJacobian(ts); // AffineWithJacobian A = p_.getAffineTransformationWithJacobian(ts);
// AffineWithJacobian Ah = p_.getAffineTransformationWithJacobian(ts+h); // AffineWithJacobian Ah =
// Affine dAdt_numeric = 1./h * (Ah.first-A.first); // p_.getAffineTransformationWithJacobian(ts+h); Affine dAdt_numeric
// = 1./h * (Ah.first-A.first);
// LOG(INFO) << dAdt_numeric; // LOG(INFO) << dAdt_numeric;
// LOG(INFO) << A.second; // LOG(INFO) << A.second;
@ -82,44 +83,45 @@ void Object::draw(Time t, bool is_first_layer)
Affine M_10 = K1_ * A10 * K0_.inv(); Affine M_10 = K1_ * A10 * K0_.inv();
Affine dM10_dt = K1_ * dA10dt * K0_.inv(); Affine dM10_dt = K1_ * dA10dt * K0_.inv();
// warpAffine requires M_dst_src unless the WARP_INVERSE_MAP flag is passed // warpAffine requires M_dst_src unless the WARP_INVERSE_MAP flag is
// in which case it will require M_src_dst // passed in which case it will require M_src_dst
// TODO: can we do something more efficient than fully warping the 4 channels (BGR+alpha)? // TODO: can we do something more efficient than fully warping the 4
int border_mode = is_first_layer ? cv::BORDER_REFLECT101 : cv::BORDER_CONSTANT; // channels (BGR+alpha)?
cv::warpPerspective(texture_, int border_mode =
is_first_layer ? cv::BORDER_REFLECT101 : cv::BORDER_CONSTANT;
cv::warpPerspective(
texture_,
canvas_, canvas_,
M_10, M_10,
canvas_.size(), canvas_.size(),
cv::INTER_LINEAR, cv::INTER_LINEAR,
border_mode); border_mode
);
cv::Matx<FloatType, 3, 3> SS = dM10_dt * M_10.inv(); cv::Matx<FloatType, 3, 3> SS = dM10_dt * M_10.inv();
for(int y=0; y<flow_.rows; ++y) for (int y = 0; y < flow_.rows; ++y) {
{ for (int x = 0; x < flow_.cols; ++x) {
for(int x=0; x<flow_.cols; ++x)
{
flow_(y, x)[0] = SS(0, 0) * x + SS(0, 1) * y + SS(0, 2); flow_(y, x)[0] = SS(0, 0) * x + SS(0, 1) * y + SS(0, 2);
flow_(y, x)[1] = SS(1, 0) * x + SS(1, 1) * y + SS(1, 2); flow_(y, x)[1] = SS(1, 0) * x + SS(1, 1) * y + SS(1, 2);
} }
} }
} }
void getIntensityAndAlpha(const cv::Mat& image, void getIntensityAndAlpha(
int x, int y, const cv::Mat& image,
int x,
int y,
ImageFloatType* intensity, ImageFloatType* intensity,
ImageFloatType* alpha) ImageFloatType* alpha
{ ) {
CHECK(image.type() == CV_8UC3 || image.type() == CV_8UC4); CHECK(image.type() == CV_8UC3 || image.type() == CV_8UC4);
if(image.type() == CV_8UC3) if (image.type() == CV_8UC3) {
{
cv::Vec3b val = image.at<cv::Vec3b>(y, x); cv::Vec3b val = image.at<cv::Vec3b>(y, x);
*intensity = bgrToGrayscale(val[0], val[1], val[2]) / 255.; *intensity = bgrToGrayscale(val[0], val[1], val[2]) / 255.;
*alpha = 1.; *alpha = 1.;
} } else {
else
{
cv::Vec4b val = image.at<cv::Vec4b>(y, x); cv::Vec4b val = image.at<cv::Vec4b>(y, x);
*intensity = bgrToGrayscale(val[0], val[1], val[2]) / 255.; *intensity = bgrToGrayscale(val[0], val[1], val[2]) / 255.;
*alpha = static_cast<ImageFloatType>(val[3]) / 255.; *alpha = static_cast<ImageFloatType>(val[3]) / 255.;

View File

@ -9,8 +9,7 @@ class GLFWwindow; // fwd
namespace event_camera_simulator { namespace event_camera_simulator {
//! Rendering engine based on OpenGL //! Rendering engine based on OpenGL
class OpenGLRenderer : public Renderer class OpenGLRenderer : public Renderer {
{
public: public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW EIGEN_MAKE_ALIGNED_OPERATOR_NEW
@ -21,17 +20,23 @@ public:
~OpenGLRenderer(); ~OpenGLRenderer();
//! Render an image at a given pose. //! Render an image at a given pose.
virtual void render(const Transformation& T_W_C, virtual void render(
const Transformation& T_W_C,
const std::vector<Transformation>& T_W_OBJ, const std::vector<Transformation>& T_W_OBJ,
const ImagePtr& out_image, const ImagePtr& out_image,
const DepthmapPtr& out_depthmap) const; const DepthmapPtr& out_depthmap
) const;
//! Returns true if the rendering engine can compute optic flow, false otherwise //! Returns true if the rendering engine can compute optic flow, false
virtual bool canComputeOpticFlow() const override { return true; } //! otherwise
virtual bool canComputeOpticFlow() const override {
return true;
}
//! Render an image + depth map + optic flow map at a given pose, //! Render an image + depth map + optic flow map at a given pose,
//! given the camera linear and angular velocity //! given the camera linear and angular velocity
virtual void renderWithFlow(const Transformation& T_W_C, virtual void renderWithFlow(
const Transformation& T_W_C,
const LinearVelocity& v_WC, const LinearVelocity& v_WC,
const AngularVelocity& w_WC, const AngularVelocity& w_WC,
const std::vector<Transformation>& T_W_OBJ, const std::vector<Transformation>& T_W_OBJ,
@ -39,20 +44,23 @@ public:
const std::vector<AngularVelocity>& angular_velocity_obj, const std::vector<AngularVelocity>& angular_velocity_obj,
const ImagePtr& out_image, const ImagePtr& out_image,
const DepthmapPtr& out_depthmap, const DepthmapPtr& out_depthmap,
const OpticFlowPtr& optic_flow_map) const override; const OpticFlowPtr& optic_flow_map
) const override;
//! Sets the camera //! Sets the camera
virtual void setCamera(const ze::Camera::Ptr& camera); virtual void setCamera(const ze::Camera::Ptr& camera);
protected: protected:
void init(); void init();
/** /**
@brief basic function to produce an OpenGL projection matrix and associated viewport parameters @brief basic function to produce an OpenGL projection matrix and
which match a given set of camera intrinsics. This is currently written for the Eigen linear associated viewport parameters which match a given set of camera
algebra library, however it should be straightforward to port to any 4x4 matrix class. intrinsics. This is currently written for the Eigen linear algebra
@param[out] frustum Eigen::Matrix4d projection matrix. Eigen stores these matrices in column-major (i.e. OpenGL) order. library, however it should be straightforward to port to any 4x4 matrix
class.
@param[out] frustum Eigen::Matrix4d projection matrix. Eigen stores
these matrices in column-major (i.e. OpenGL) order.
@param[in] alpha x-axis focal length, from camera intrinsic matrix @param[in] alpha x-axis focal length, from camera intrinsic matrix
@param[in] alpha y-axis focal length, from camera intrinsic matrix @param[in] alpha y-axis focal length, from camera intrinsic matrix
@param[in] skew x and y axis skew, from camera intrinsic matrix @param[in] skew x and y axis skew, from camera intrinsic matrix
@ -60,24 +68,37 @@ protected:
@param[in] v0 image origin y-coordinate, from camera intrinsic matrix @param[in] v0 image origin y-coordinate, from camera intrinsic matrix
@param[in] img_width image width, in pixels @param[in] img_width image width, in pixels
@param[in] img_height image height, in pixels @param[in] img_height image height, in pixels
@param[in] near_clip near clipping plane z-location, can be set arbitrarily > 0, controls the mapping of z-coordinates for OpenGL @param[in] near_clip near clipping plane z-location, can be set
@param[in] far_clip far clipping plane z-location, can be set arbitrarily > near_clip, controls the mapping of z-coordinate for OpenGL arbitrarily > 0, controls the mapping of z-coordinates for OpenGL
@param[in] far_clip far clipping plane z-location, can be set
arbitrarily > near_clip, controls the mapping of z-coordinate for
OpenGL
Code adapted from: Code adapted from:
- http://ksimek.github.io/2013/06/03/calibrated_cameras_in_opengl/ - http://ksimek.github.io/2013/06/03/calibrated_cameras_in_opengl/
- https://pastebin.com/h8nYNWJY - https://pastebin.com/h8nYNWJY
*/ */
void build_opengl_projection_for_intrinsics(Eigen::Matrix4d &frustum, double alpha, double beta, double u0, double v0, int img_width, int img_height, double near_clip, double far_clip) const { void build_opengl_projection_for_intrinsics(
Eigen::Matrix4d& frustum,
// These parameters define the final viewport that is rendered into by double alpha,
// the camera. double beta,
double u0,
double v0,
int img_width,
int img_height,
double near_clip,
double far_clip
) const {
// These parameters define the final viewport that is rendered into
// by the camera.
double L = 0; double L = 0;
double R = img_width; double R = img_width;
double B = 0; double B = 0;
double T = img_height; double T = img_height;
// near and far clipping planes, these only matter for the mapping from // near and far clipping planes, these only matter for the mapping
// world-space z-coordinate into the depth coordinate for OpenGL // from world-space z-coordinate into the depth coordinate for
// OpenGL
double N = near_clip; double N = near_clip;
double F = far_clip; double F = far_clip;
@ -88,9 +109,12 @@ protected:
// [-1, 1]. OpenGL then maps coordinates in NDC to the current // [-1, 1]. OpenGL then maps coordinates in NDC to the current
// viewport // viewport
Eigen::Matrix4d ortho = Eigen::Matrix4d::Zero(); Eigen::Matrix4d ortho = Eigen::Matrix4d::Zero();
ortho(0,0) = 2.0/(R-L); ortho(0,3) = -(R+L)/(R-L); ortho(0, 0) = 2.0 / (R - L);
ortho(1,1) = 2.0/(T-B); ortho(1,3) = -(T+B)/(T-B); ortho(0, 3) = -(R + L) / (R - L);
ortho(2,2) = -2.0/(F-N); ortho(2,3) = -(F+N)/(F-N); ortho(1, 1) = 2.0 / (T - B);
ortho(1, 3) = -(T + B) / (T - B);
ortho(2, 2) = -2.0 / (F - N);
ortho(2, 3) = -(F + N) / (F - N);
ortho(3, 3) = 1.0; ortho(3, 3) = 1.0;
// construct a projection matrix, this is identical to the // construct a projection matrix, this is identical to the
@ -98,9 +122,13 @@ protected:
// additional row is inserted to map the z-coordinate to // additional row is inserted to map the z-coordinate to
// OpenGL. // OpenGL.
Eigen::Matrix4d tproj = Eigen::Matrix4d::Zero(); Eigen::Matrix4d tproj = Eigen::Matrix4d::Zero();
tproj(0,0) = alpha; tproj(0,1) = skew; tproj(0,2) = -u0; tproj(0, 0) = alpha;
tproj(1,1) = beta; tproj(1,2) = -v0; tproj(0, 1) = skew;
tproj(2,2) = N+F; tproj(2,3) = N*F; tproj(0, 2) = -u0;
tproj(1, 1) = beta;
tproj(1, 2) = -v0;
tproj(2, 2) = N + F;
tproj(2, 3) = N * F;
tproj(3, 2) = -1.0; tproj(3, 2) = -1.0;
// resulting OpenGL frustum is the product of the orthographic // resulting OpenGL frustum is the product of the orthographic
@ -124,8 +152,10 @@ protected:
unsigned int texture2; unsigned int texture2;
unsigned int VBO, VAO; unsigned int VBO, VAO;
unsigned int multisampled_fbo, multisampled_color_buf, multisampled_depth_buf; unsigned int multisampled_fbo, multisampled_color_buf,
unsigned int fbo, color_buf, depth_buf, depth_buf_of; // framebuffer for color and depth images multisampled_depth_buf;
unsigned int fbo, color_buf, depth_buf,
depth_buf_of; // framebuffer for color and depth images
unsigned int fbo_of, of_buf; // framebuffer for optic flow unsigned int fbo_of, of_buf; // framebuffer for optic flow
float zmin = 0.1f; float zmin = 0.1f;

View File

@ -1,40 +1,46 @@
#include <esim/imp_opengl_renderer/opengl_renderer.hpp>
#include <glad/glad.h>
#include <learnopengl/shader.h>
#include <learnopengl/model.h>
#include <GLFW/glfw3.h> #include <GLFW/glfw3.h>
#include <esim/imp_opengl_renderer/opengl_renderer.hpp>
#include <glad/glad.h>
#include <glm/glm.hpp> #include <glm/glm.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <glm/gtc/matrix_transform.hpp> #include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <learnopengl/filesystem.h> #include <learnopengl/filesystem.h>
#include <learnopengl/model.h>
#include <learnopengl/shader.h>
#include <opencv2/core/core.hpp> #include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp> #include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp> #include <opencv2/imgproc/imgproc.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
DEFINE_string(renderer_scene, "", DEFINE_string(
"Path to scene file (.obj) which will be used as simulation environment"); renderer_scene,
"",
"Path to scene file (.obj) which will be used as simulation environment"
);
DEFINE_double(renderer_zmin, 0.1, "Near clipping distance."); DEFINE_double(renderer_zmin, 0.1, "Near clipping distance.");
DEFINE_double(renderer_zmax, 10, "Far clipping distance."); DEFINE_double(renderer_zmax, 10, "Far clipping distance.");
DEFINE_string(renderer_dynamic_objects_dir, "", "Path to directory that contains files of objects (.obj) that will be simulated"); DEFINE_string(
DEFINE_string(renderer_dynamic_objects, "", "Files to be included as dynamic objects (.obj), separated by ;"); renderer_dynamic_objects_dir,
"",
"Path to directory that contains files of objects (.obj) that "
"will be simulated"
);
DEFINE_string(
renderer_dynamic_objects,
"",
"Files to be included as dynamic objects (.obj), separated by ;"
);
OpenGLRenderer::OpenGLRenderer() OpenGLRenderer::OpenGLRenderer()
: is_initialized_(false), : is_initialized_(false),
zmin(FLAGS_renderer_zmin), zmin(FLAGS_renderer_zmin),
zmax(FLAGS_renderer_zmax) zmax(FLAGS_renderer_zmax) {}
{
}
void OpenGLRenderer::init() void OpenGLRenderer::init() {
{
CHECK_GT(width_, 0); CHECK_GT(width_, 0);
CHECK_GT(height_, 0); CHECK_GT(height_, 0);
@ -47,14 +53,17 @@ void OpenGLRenderer::init()
glfwWindowHint(GLFW_VISIBLE, GL_FALSE); glfwWindowHint(GLFW_VISIBLE, GL_FALSE);
#ifdef __APPLE__ #ifdef __APPLE__
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // uncomment this statement to fix compilation on OS X glfwWindowHint(
GLFW_OPENGL_FORWARD_COMPAT,
GL_TRUE
); // uncomment this statement to fix compilation on OS X
#endif #endif
// glfw window creation // glfw window creation
// -------------------- // --------------------
window = glfwCreateWindow(width_, height_, "OpenGLRenderer", NULL, NULL); window =
if (window == NULL) glfwCreateWindow(width_, height_, "OpenGLRenderer", NULL, NULL);
{ if (window == NULL) {
LOG(FATAL) << "Failed to create GLFW window"; LOG(FATAL) << "Failed to create GLFW window";
glfwTerminate(); glfwTerminate();
return; return;
@ -64,8 +73,7 @@ void OpenGLRenderer::init()
// glad: load all OpenGL function pointers // glad: load all OpenGL function pointers
// --------------------------------------- // ---------------------------------------
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress)) if (!gladLoadGLLoader((GLADloadproc) glfwGetProcAddress)) {
{
LOG(FATAL) << "Failed to initialize GLAD"; LOG(FATAL) << "Failed to initialize GLAD";
return; return;
} }
@ -74,25 +82,31 @@ void OpenGLRenderer::init()
// ----------- // -----------
our_model.reset(new Model(FLAGS_renderer_scene)); our_model.reset(new Model(FLAGS_renderer_scene));
// load dynamics objects // load dynamics objects
// --------------------- // ---------------------
if (!FLAGS_renderer_dynamic_objects_dir.empty() && !FLAGS_renderer_dynamic_objects.empty()) if (!FLAGS_renderer_dynamic_objects_dir.empty()
{ && !FLAGS_renderer_dynamic_objects.empty()) {
size_t p_start, p_end; size_t p_start, p_end;
p_start = 0; p_start = 0;
while ((p_end = FLAGS_renderer_dynamic_objects.find(";",p_start)) != std::string::npos) while ((p_end = FLAGS_renderer_dynamic_objects.find(";", p_start))
{ != std::string::npos) {
dynamic_objects_model.push_back(std::unique_ptr<Model> dynamic_objects_model.push_back(
(new Model(FLAGS_renderer_dynamic_objects_dir + "/" + FLAGS_renderer_dynamic_objects.substr(p_start, p_end - p_start)))); std::unique_ptr<Model>(new Model(
FLAGS_renderer_dynamic_objects_dir + "/"
+ FLAGS_renderer_dynamic_objects
.substr(p_start, p_end - p_start)
))
);
p_start = p_end + 1; p_start = p_end + 1;
} }
dynamic_objects_model.push_back(std::unique_ptr<Model> dynamic_objects_model.push_back(std::unique_ptr<Model>(new Model(
(new Model(FLAGS_renderer_dynamic_objects_dir + "/" + FLAGS_renderer_dynamic_objects.substr(p_start, p_end - p_start)))); FLAGS_renderer_dynamic_objects_dir + "/"
+ FLAGS_renderer_dynamic_objects
.substr(p_start, p_end - p_start)
)));
} }
// create multisampled framebuffer object // create multisampled framebuffer object
static const int num_samples = 4; static const int num_samples = 4;
glGenFramebuffers(1, &multisampled_fbo); glGenFramebuffers(1, &multisampled_fbo);
@ -101,21 +115,42 @@ void OpenGLRenderer::init()
// create and attach a multisampled color buffer // create and attach a multisampled color buffer
glGenRenderbuffers(1, &multisampled_color_buf); glGenRenderbuffers(1, &multisampled_color_buf);
glBindRenderbuffer(GL_RENDERBUFFER, multisampled_color_buf); glBindRenderbuffer(GL_RENDERBUFFER, multisampled_color_buf);
glRenderbufferStorageMultisample(GL_RENDERBUFFER, num_samples, GL_RGBA8, width_, height_); // set format glRenderbufferStorageMultisample(
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, multisampled_color_buf); // attach color buffer to FBO GL_RENDERBUFFER,
num_samples,
GL_RGBA8,
width_,
height_
); // set format
glFramebufferRenderbuffer(
GL_FRAMEBUFFER,
GL_COLOR_ATTACHMENT0,
GL_RENDERBUFFER,
multisampled_color_buf
); // attach color buffer to FBO
// create and attach a multisampled depth buffer // create and attach a multisampled depth buffer
glGenRenderbuffers(1, &multisampled_depth_buf); glGenRenderbuffers(1, &multisampled_depth_buf);
glBindRenderbuffer(GL_RENDERBUFFER, multisampled_depth_buf); glBindRenderbuffer(GL_RENDERBUFFER, multisampled_depth_buf);
glRenderbufferStorageMultisample(GL_RENDERBUFFER, num_samples, GL_DEPTH_COMPONENT24, width_, height_); // set format glRenderbufferStorageMultisample(
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, multisampled_depth_buf); // attach depth buffer to FBO GL_RENDERBUFFER,
num_samples,
GL_DEPTH_COMPONENT24,
width_,
height_
); // set format
glFramebufferRenderbuffer(
GL_FRAMEBUFFER,
GL_DEPTH_ATTACHMENT,
GL_RENDERBUFFER,
multisampled_depth_buf
); // attach depth buffer to FBO
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE) if (status != GL_FRAMEBUFFER_COMPLETE)
{
LOG(FATAL) << "ERROR: failed to set up multisampled framebuffer"; LOG(FATAL) << "ERROR: failed to set up multisampled framebuffer";
} LOG(INFO
LOG(INFO) << "Successfully set up multisampled color and depth framebuffers"; ) << "Successfully set up multisampled color and depth framebuffers";
// create framebuffer object // create framebuffer object
glGenFramebuffers(1, &fbo); glGenFramebuffers(1, &fbo);
@ -124,20 +159,38 @@ void OpenGLRenderer::init()
// create and attach a color buffer // create and attach a color buffer
glGenRenderbuffers(1, &color_buf); glGenRenderbuffers(1, &color_buf);
glBindRenderbuffer(GL_RENDERBUFFER, color_buf); glBindRenderbuffer(GL_RENDERBUFFER, color_buf);
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8, width_, height_); // set format glRenderbufferStorage(
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, color_buf); // attach color buffer to FBO GL_RENDERBUFFER,
GL_RGBA8,
width_,
height_
); // set format
glFramebufferRenderbuffer(
GL_FRAMEBUFFER,
GL_COLOR_ATTACHMENT0,
GL_RENDERBUFFER,
color_buf
); // attach color buffer to FBO
// create and attach a depth buffer // create and attach a depth buffer
glGenRenderbuffers(1, &depth_buf); glGenRenderbuffers(1, &depth_buf);
glBindRenderbuffer(GL_RENDERBUFFER, depth_buf); glBindRenderbuffer(GL_RENDERBUFFER, depth_buf);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, width_, height_); // set format glRenderbufferStorage(
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depth_buf); // attach depth buffer to FBO GL_RENDERBUFFER,
GL_DEPTH_COMPONENT24,
width_,
height_
); // set format
glFramebufferRenderbuffer(
GL_FRAMEBUFFER,
GL_DEPTH_ATTACHMENT,
GL_RENDERBUFFER,
depth_buf
); // attach depth buffer to FBO
status = glCheckFramebufferStatus(GL_FRAMEBUFFER); status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE) if (status != GL_FRAMEBUFFER_COMPLETE)
{
LOG(FATAL) << "ERROR: failed to set up framebuffer"; LOG(FATAL) << "ERROR: failed to set up framebuffer";
}
LOG(INFO) << "Successfully set up color and depth framebuffers"; LOG(INFO) << "Successfully set up color and depth framebuffers";
// create framebuffer object for optic flow // create framebuffer object for optic flow
@ -147,28 +200,50 @@ void OpenGLRenderer::init()
// create and attach a color buffer for optic flow // create and attach a color buffer for optic flow
glGenRenderbuffers(1, &of_buf); glGenRenderbuffers(1, &of_buf);
glBindRenderbuffer(GL_RENDERBUFFER, of_buf); glBindRenderbuffer(GL_RENDERBUFFER, of_buf);
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA32F, width_, height_); // set format glRenderbufferStorage(
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, of_buf); // attach optic flow buffer to frame buffer object GL_RENDERBUFFER,
GL_RGBA32F,
width_,
height_
); // set format
glFramebufferRenderbuffer(
GL_FRAMEBUFFER,
GL_COLOR_ATTACHMENT0,
GL_RENDERBUFFER,
of_buf
); // attach optic flow buffer to frame buffer object
// create and attach a depth buffer for optic flow // create and attach a depth buffer for optic flow
glGenRenderbuffers(1, &depth_buf_of); glGenRenderbuffers(1, &depth_buf_of);
glBindRenderbuffer(GL_RENDERBUFFER, depth_buf_of); glBindRenderbuffer(GL_RENDERBUFFER, depth_buf_of);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, width_, height_); // set format glRenderbufferStorage(
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depth_buf_of); // attach depth buffer to FBO GL_RENDERBUFFER,
GL_DEPTH_COMPONENT24,
width_,
height_
); // set format
glFramebufferRenderbuffer(
GL_FRAMEBUFFER,
GL_DEPTH_ATTACHMENT,
GL_RENDERBUFFER,
depth_buf_of
); // attach depth buffer to FBO
GLenum status_of = glCheckFramebufferStatus(GL_FRAMEBUFFER); GLenum status_of = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status_of != GL_FRAMEBUFFER_COMPLETE) if (status_of != GL_FRAMEBUFFER_COMPLETE)
{
LOG(FATAL) << "ERROR: failed to set up framebuffer for optic flow"; LOG(FATAL) << "ERROR: failed to set up framebuffer for optic flow";
}
LOG(INFO) << "Successfully set up optic flow framebuffer"; LOG(INFO) << "Successfully set up optic flow framebuffer";
// create shader program // create shader program
shader.reset(new Shader(FileSystem::getPath("src/shader.vert").c_str(), shader.reset(new Shader(
FileSystem::getPath("src/shader.frag").c_str())); FileSystem::getPath("src/shader.vert").c_str(),
FileSystem::getPath("src/shader.frag").c_str()
));
optic_flow_shader.reset(new Shader(FileSystem::getPath("src/shader.vert").c_str(), optic_flow_shader.reset(new Shader(
FileSystem::getPath("src/optic_flow_shader.frag").c_str())); FileSystem::getPath("src/shader.vert").c_str(),
FileSystem::getPath("src/optic_flow_shader.frag").c_str()
));
glEnable(GL_DEPTH_TEST); glEnable(GL_DEPTH_TEST);
glEnable(GL_MULTISAMPLE); glEnable(GL_MULTISAMPLE);
@ -176,8 +251,7 @@ void OpenGLRenderer::init()
is_initialized_ = true; is_initialized_ = true;
} }
OpenGLRenderer::~OpenGLRenderer() OpenGLRenderer::~OpenGLRenderer() {
{
// cleanup // cleanup
glDeleteFramebuffers(1, &multisampled_fbo); glDeleteFramebuffers(1, &multisampled_fbo);
glDeleteFramebuffers(1, &fbo); glDeleteFramebuffers(1, &fbo);
@ -195,17 +269,19 @@ OpenGLRenderer::~OpenGLRenderer()
glfwTerminate(); glfwTerminate();
} }
void OpenGLRenderer::setCamera(const ze::Camera::Ptr& camera) void OpenGLRenderer::setCamera(const ze::Camera::Ptr& camera) {
{ CHECK(!is_initialized_)
CHECK(!is_initialized_) << "setCamera() was called but the OpenGL renderer was already setup before"; << "setCamera() was called but the OpenGL renderer "
"was already setup before";
CHECK(camera); CHECK(camera);
camera_ = camera; camera_ = camera;
width_ = camera_->width(); width_ = camera_->width();
height_ = camera_->height(); height_ = camera_->height();
if(camera->type() != ze::CameraType::Pinhole) if (camera->type() != ze::CameraType::Pinhole) {
{ LOG(WARNING
LOG(WARNING) << "The OpenGL rendering engine does not support camera distortion. The distortion parameters will be ignored."; ) << "The OpenGL rendering engine does not support camera "
"distortion. The distortion parameters will be ignored.";
// TODO: actually remove distortion from the camera so // TODO: actually remove distortion from the camera so
// that they do not get published on the camera_info topic // that they do not get published on the camera_info topic
} }
@ -213,12 +289,15 @@ void OpenGLRenderer::setCamera(const ze::Camera::Ptr& camera)
init(); init();
} }
void OpenGLRenderer::render(const Transformation& T_W_C, void OpenGLRenderer::render(
const Transformation& T_W_C,
const std::vector<Transformation>& T_W_OBJ, const std::vector<Transformation>& T_W_OBJ,
const ImagePtr& out_image, const ImagePtr& out_image,
const DepthmapPtr& out_depthmap) const const DepthmapPtr& out_depthmap
{ ) const {
CHECK(is_initialized_) << "Called render() but the renderer was not initialized yet. Have you first called setCamera()?"; CHECK(is_initialized_) << "Called render() but the renderer was not "
"initialized yet. Have you "
"first called setCamera()?";
CHECK(out_image); CHECK(out_image);
CHECK(out_depthmap); CHECK(out_depthmap);
CHECK_EQ(out_image->cols, width_); CHECK_EQ(out_image->cols, width_);
@ -245,8 +324,7 @@ void OpenGLRenderer::render(const Transformation& T_W_C,
T_C_W.block<1, 4>(2, 0) *= -1.0; T_C_W.block<1, 4>(2, 0) *= -1.0;
// view = transformation from point in world to point in camera // view = transformation from point in world to point in camera
glm::mat4 view = glm::mat4 view = glm::make_mat4(T_C_W.data());
glm::make_mat4(T_C_W.data());
ze::Matrix4 frustum; ze::Matrix4 frustum;
ze::VectorX intrinsics = camera_->projectionParameters(); ze::VectorX intrinsics = camera_->projectionParameters();
@ -254,7 +332,17 @@ void OpenGLRenderer::render(const Transformation& T_W_C,
const double fy = intrinsics(1); const double fy = intrinsics(1);
const double cx = intrinsics(2); const double cx = intrinsics(2);
const double cy = intrinsics(3); const double cy = intrinsics(3);
build_opengl_projection_for_intrinsics(frustum, fx, fy, cx, cy, width_, height_, zmin, zmax); build_opengl_projection_for_intrinsics(
frustum,
fx,
fy,
cx,
cy,
width_,
height_,
zmin,
zmax
);
glm::mat4 projection = glm::make_mat4(frustum.data()); glm::mat4 projection = glm::make_mat4(frustum.data());
unsigned int modelLoc = glGetUniformLocation(shader->ID, "model"); unsigned int modelLoc = glGetUniformLocation(shader->ID, "model");
@ -263,14 +351,19 @@ void OpenGLRenderer::render(const Transformation& T_W_C,
unsigned int viewLoc = glGetUniformLocation(shader->ID, "view"); unsigned int viewLoc = glGetUniformLocation(shader->ID, "view");
glUniformMatrix4fv(viewLoc, 1, GL_FALSE, glm::value_ptr(view)); glUniformMatrix4fv(viewLoc, 1, GL_FALSE, glm::value_ptr(view));
unsigned int projectionLoc = glGetUniformLocation(shader->ID, "projection"); unsigned int projectionLoc =
glUniformMatrix4fv(projectionLoc, 1, GL_FALSE, glm::value_ptr(projection)); // TODO outside of main loop glGetUniformLocation(shader->ID, "projection");
glUniformMatrix4fv(
projectionLoc,
1,
GL_FALSE,
glm::value_ptr(projection)
); // TODO outside of main loop
our_model->Draw(*shader); our_model->Draw(*shader);
// draw dynamic objects // draw dynamic objects
for (size_t i = 0; i < dynamic_objects_model.size(); i++) for (size_t i = 0; i < dynamic_objects_model.size(); i++) {
{
ze::Matrix4 T_W_OBJ_OPENGL = T_W_OBJ[i].getTransformationMatrix(); ze::Matrix4 T_W_OBJ_OPENGL = T_W_OBJ[i].getTransformationMatrix();
model = glm::make_mat4(T_W_OBJ_OPENGL.data()); model = glm::make_mat4(T_W_OBJ_OPENGL.data());
@ -281,7 +374,18 @@ void OpenGLRenderer::render(const Transformation& T_W_C,
// now resolve multisampled buffer into the normal fbo // now resolve multisampled buffer into the normal fbo
glBindFramebuffer(GL_READ_FRAMEBUFFER, multisampled_fbo); glBindFramebuffer(GL_READ_FRAMEBUFFER, multisampled_fbo);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fbo); glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fbo);
glBlitFramebuffer(0, 0, width_, height_, 0, 0, width_, height_, GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT, GL_NEAREST); glBlitFramebuffer(
0,
0,
width_,
height_,
0,
0,
width_,
height_,
GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT,
GL_NEAREST
);
// bind fbo back so that we read from it // bind fbo back so that we read from it
glBindFramebuffer(GL_FRAMEBUFFER, fbo); glBindFramebuffer(GL_FRAMEBUFFER, fbo);
@ -289,9 +393,20 @@ void OpenGLRenderer::render(const Transformation& T_W_C,
// read out what we just rendered // read out what we just rendered
cv::Mat img_color(height_, width_, CV_8UC3); cv::Mat img_color(height_, width_, CV_8UC3);
glPixelStorei(GL_PACK_ALIGNMENT, (img_color.step & 3) ? 1 : 4); glPixelStorei(GL_PACK_ALIGNMENT, (img_color.step & 3) ? 1 : 4);
glPixelStorei(GL_PACK_ROW_LENGTH, img_color.step/img_color.elemSize()); glPixelStorei(
GL_PACK_ROW_LENGTH,
img_color.step / img_color.elemSize()
);
glReadPixels(0, 0, img_color.cols, img_color.rows, GL_BGR, GL_UNSIGNED_BYTE, img_color.data); glReadPixels(
0,
0,
img_color.cols,
img_color.rows,
GL_BGR,
GL_UNSIGNED_BYTE,
img_color.data
);
GLenum err = glGetError(); GLenum err = glGetError();
if (err) { if (err) {
@ -302,9 +417,20 @@ void OpenGLRenderer::render(const Transformation& T_W_C,
// read out depth data // read out depth data
cv::Mat img_depth(height_, width_, CV_32FC1); cv::Mat img_depth(height_, width_, CV_32FC1);
glPixelStorei(GL_PACK_ALIGNMENT, (img_depth.step & 3) ? 1 : 4); glPixelStorei(GL_PACK_ALIGNMENT, (img_depth.step & 3) ? 1 : 4);
glPixelStorei(GL_PACK_ROW_LENGTH, img_depth.step/img_depth.elemSize()); glPixelStorei(
GL_PACK_ROW_LENGTH,
img_depth.step / img_depth.elemSize()
);
glReadPixels(0, 0, img_depth.cols, img_depth.rows, GL_DEPTH_COMPONENT, GL_FLOAT, img_depth.data); glReadPixels(
0,
0,
img_depth.cols,
img_depth.rows,
GL_DEPTH_COMPONENT,
GL_FLOAT,
img_depth.data
);
err = glGetError(); err = glGetError();
if (err) { if (err) {
@ -314,7 +440,9 @@ void OpenGLRenderer::render(const Transformation& T_W_C,
// convert inverse depth buffer to linear depth between zmin and zmax // convert inverse depth buffer to linear depth between zmin and zmax
// see the "Learn OpenGL book, page 177 // see the "Learn OpenGL book, page 177
cv::Mat linear_depth = (2.0 * zmin * zmax) / (zmax + zmin - (2 * img_depth - 1.f) * (zmax - zmin)); cv::Mat linear_depth =
(2.0 * zmin * zmax)
/ (zmax + zmin - (2 * img_depth - 1.f) * (zmax - zmin));
cv::Mat img_grayscale; cv::Mat img_grayscale;
cv::cvtColor(img_color, img_grayscale, cv::COLOR_BGR2GRAY); cv::cvtColor(img_color, img_grayscale, cv::COLOR_BGR2GRAY);
@ -323,8 +451,8 @@ void OpenGLRenderer::render(const Transformation& T_W_C,
linear_depth.copyTo(*out_depthmap); linear_depth.copyTo(*out_depthmap);
} }
void OpenGLRenderer::renderWithFlow(
void OpenGLRenderer::renderWithFlow(const Transformation& T_W_C, const Transformation& T_W_C,
const LinearVelocity& v_WC, const LinearVelocity& v_WC,
const AngularVelocity& w_WC, const AngularVelocity& w_WC,
const std::vector<Transformation>& T_W_OBJ, const std::vector<Transformation>& T_W_OBJ,
@ -332,8 +460,8 @@ void OpenGLRenderer::renderWithFlow(const Transformation& T_W_C,
const std::vector<AngularVelocity>& angular_velocity_obj, const std::vector<AngularVelocity>& angular_velocity_obj,
const ImagePtr& out_image, const ImagePtr& out_image,
const DepthmapPtr& out_depthmap, const DepthmapPtr& out_depthmap,
const OpticFlowPtr& optic_flow_map) const const OpticFlowPtr& optic_flow_map
{ ) const {
render(T_W_C, T_W_OBJ, out_image, out_depthmap); render(T_W_C, T_W_OBJ, out_image, out_depthmap);
// draw to our optic flow framebuffer instead of screen // draw to our optic flow framebuffer instead of screen
@ -355,8 +483,7 @@ void OpenGLRenderer::renderWithFlow(const Transformation& T_W_C,
T_C_W_tilde.block<1, 4>(2, 0) *= -1.0; T_C_W_tilde.block<1, 4>(2, 0) *= -1.0;
// view = transformation from point in world to point in camera // view = transformation from point in world to point in camera
glm::mat4 view = glm::mat4 view = glm::make_mat4(T_C_W_tilde.data());
glm::make_mat4(T_C_W_tilde.data());
ze::Matrix4 frustum; ze::Matrix4 frustum;
ze::VectorX intrinsics = camera_->projectionParameters(); ze::VectorX intrinsics = camera_->projectionParameters();
@ -364,17 +491,35 @@ void OpenGLRenderer::renderWithFlow(const Transformation& T_W_C,
const double fy = intrinsics(1); const double fy = intrinsics(1);
const double cx = intrinsics(2); const double cx = intrinsics(2);
const double cy = intrinsics(3); const double cy = intrinsics(3);
build_opengl_projection_for_intrinsics(frustum, fx, fy, cx, cy, width_, height_, zmin, zmax); build_opengl_projection_for_intrinsics(
frustum,
fx,
fy,
cx,
cy,
width_,
height_,
zmin,
zmax
);
glm::mat4 projection = glm::make_mat4(frustum.data()); glm::mat4 projection = glm::make_mat4(frustum.data());
unsigned int modelLoc = glGetUniformLocation(optic_flow_shader->ID, "model"); unsigned int modelLoc =
glGetUniformLocation(optic_flow_shader->ID, "model");
glUniformMatrix4fv(modelLoc, 1, GL_FALSE, glm::value_ptr(model)); glUniformMatrix4fv(modelLoc, 1, GL_FALSE, glm::value_ptr(model));
unsigned int viewLoc = glGetUniformLocation(optic_flow_shader->ID, "view"); unsigned int viewLoc =
glGetUniformLocation(optic_flow_shader->ID, "view");
glUniformMatrix4fv(viewLoc, 1, GL_FALSE, glm::value_ptr(view)); glUniformMatrix4fv(viewLoc, 1, GL_FALSE, glm::value_ptr(view));
unsigned int projectionLoc = glGetUniformLocation(optic_flow_shader->ID, "projection"); unsigned int projectionLoc =
glUniformMatrix4fv(projectionLoc, 1, GL_FALSE, glm::value_ptr(projection)); // TODO, remove from main loop glGetUniformLocation(optic_flow_shader->ID, "projection");
glUniformMatrix4fv(
projectionLoc,
1,
GL_FALSE,
glm::value_ptr(projection)
); // TODO, remove from main loop
// compute optic flow and draw it to the screen buffer // compute optic flow and draw it to the screen buffer
glm::vec3 w = glm::make_vec3(w_WC.data()); glm::vec3 w = glm::make_vec3(w_WC.data());
@ -398,8 +543,7 @@ void OpenGLRenderer::renderWithFlow(const Transformation& T_W_C,
our_model->Draw(*optic_flow_shader); our_model->Draw(*optic_flow_shader);
// draw optical flow for dynamic objects // draw optical flow for dynamic objects
for (size_t i = 0; i < dynamic_objects_model.size(); i++) for (size_t i = 0; i < dynamic_objects_model.size(); i++) {
{
optic_flow_shader->setBool("dynamic_object", true); optic_flow_shader->setBool("dynamic_object", true);
// relative position (in camera frame) // relative position (in camera frame)
@ -408,13 +552,14 @@ void OpenGLRenderer::renderWithFlow(const Transformation& T_W_C,
optic_flow_shader->setVec3("r_CB", r_C_OBJ); optic_flow_shader->setVec3("r_CB", r_C_OBJ);
// linear velocity (in camera frame) // linear velocity (in camera frame)
ze::Vector3 C_v_OBJ = T_C_W.getRotation().rotate(linear_velocity_obj[i]); ze::Vector3 C_v_OBJ =
T_C_W.getRotation().rotate(linear_velocity_obj[i]);
glm::vec3 v_obj = glm::make_vec3(C_v_OBJ.data()); glm::vec3 v_obj = glm::make_vec3(C_v_OBJ.data());
optic_flow_shader->setVec3("v_WB", v_obj); optic_flow_shader->setVec3("v_WB", v_obj);
// angular velocity (in camera frame) // angular velocity (in camera frame)
ze::Vector3 C_w_OBJ = T_C_W.getRotation().rotate(angular_velocity_obj[i]); ze::Vector3 C_w_OBJ =
T_C_W.getRotation().rotate(angular_velocity_obj[i]);
glm::vec3 w_obj = glm::make_vec3(C_w_OBJ.data()); glm::vec3 w_obj = glm::make_vec3(C_w_OBJ.data());
optic_flow_shader->setVec3("w_WB", w_obj); optic_flow_shader->setVec3("w_WB", w_obj);
@ -422,7 +567,6 @@ void OpenGLRenderer::renderWithFlow(const Transformation& T_W_C,
model = glm::make_mat4(T_W_OBJ_OPENGL.data()); model = glm::make_mat4(T_W_OBJ_OPENGL.data());
optic_flow_shader->setMat4("model", model); optic_flow_shader->setMat4("model", model);
dynamic_objects_model[i]->Draw(*optic_flow_shader); dynamic_objects_model[i]->Draw(*optic_flow_shader);
} }
@ -439,16 +583,14 @@ void OpenGLRenderer::renderWithFlow(const Transformation& T_W_C,
return; return;
} }
for(int y=0; y<height_; ++y) for (int y = 0; y < height_; ++y) {
{ for (int x = 0; x < width_; ++x) {
for(int x=0; x<width_; ++x) (*optic_flow_map)(y, x) = cv::Vec<ImageFloatType, 2>(
{ flow.at<cv::Vec<ImageFloatType, 3>>(y, x)[2],
(*optic_flow_map)(y,x) flow.at<cv::Vec<ImageFloatType, 3>>(y, x)[1]
= cv::Vec<ImageFloatType,2>(flow.at<cv::Vec<ImageFloatType,3>>(y,x)[2], );
flow.at<cv::Vec<ImageFloatType,3>>(y,x)[1]);
} }
} }
} }
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -5,33 +5,45 @@
namespace event_camera_simulator { namespace event_camera_simulator {
//! A rendering engine for planar scenes //! A rendering engine for planar scenes
class PanoramaRenderer : public Renderer class PanoramaRenderer : public Renderer {
{
public: public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW EIGEN_MAKE_ALIGNED_OPERATOR_NEW
PanoramaRenderer(const Image& texture, PanoramaRenderer(
const Transformation::Rotation& R_W_P); const Image& texture, const Transformation::Rotation& R_W_P
);
~PanoramaRenderer(); ~PanoramaRenderer();
//! Render image and depth map for a given camera pose //! Render image and depth map for a given camera pose
virtual void render(const Transformation& T_W_C, const ImagePtr &out_image, const DepthmapPtr &out_depthmap) const; virtual void render(
const Transformation& T_W_C,
const ImagePtr& out_image,
const DepthmapPtr& out_depthmap
) const;
void render(const Transformation& T_W_C, const std::vector<Transformation>& T_W_OBJ, const ImagePtr &out_image, const DepthmapPtr &out_depthmap) const override void render(
{ const Transformation& T_W_C,
const std::vector<Transformation>& T_W_OBJ,
const ImagePtr& out_image,
const DepthmapPtr& out_depthmap
) const override {
render(T_W_C, out_image, out_depthmap); render(T_W_C, out_image, out_depthmap);
} }
//! Returns true if the rendering engine can compute optic flow, false otherwise //! Returns true if the rendering engine can compute optic flow, false
virtual bool canComputeOpticFlow() const override { return false; } //! otherwise
virtual bool canComputeOpticFlow() const override {
return false;
}
virtual void setCamera(const ze::Camera::Ptr& camera) override; virtual void setCamera(const ze::Camera::Ptr& camera) override;
protected: protected:
void precomputePixelToBearingLookupTable(); void precomputePixelToBearingLookupTable();
void projectToPanorama(const Eigen::Ref<const ze::Bearing>& f, ze::Keypoint* keypoint) const; void projectToPanorama(
const Eigen::Ref<const ze::Bearing>& f, ze::Keypoint* keypoint
) const;
// Texture mapped on the plane // Texture mapped on the plane
Image texture_; Image texture_;

View File

@ -1,34 +1,30 @@
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <esim/imp_panorama_renderer/panorama_renderer.hpp> #include <esim/imp_panorama_renderer/panorama_renderer.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <glog/logging.h> #include <glog/logging.h>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/core/eigen.hpp> #include <opencv2/core/eigen.hpp>
#include <opencv2/imgproc/imgproc.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
PanoramaRenderer::PanoramaRenderer(const Image& texture, PanoramaRenderer::PanoramaRenderer(
const Transformation::Rotation &R_W_P) const Image& texture, const Transformation::Rotation& R_W_P
)
: texture_(texture), : texture_(texture),
R_W_P_(R_W_P) R_W_P_(R_W_P) {
{ principal_point_ =
principal_point_ = ze::Keypoint(0.5 * texture_.cols, ze::Keypoint(0.5 * texture_.cols, 0.5 * texture_.rows);
0.5 * texture_.rows);
fx_ = texture_.cols / (2.0 * CV_PI); fx_ = texture_.cols / (2.0 * CV_PI);
fy_ = texture_.rows / CV_PI; fy_ = texture_.rows / CV_PI;
LOG(INFO) << "Initialized panoramic camera with size: " LOG(INFO) << "Initialized panoramic camera with size: " << texture_.cols
<< texture_.cols << "x" << texture_.rows << "x" << texture_.rows
<< ", principal point: " << principal_point_.transpose() << ", principal point: " << principal_point_.transpose()
<< ", fx = " << fx_ << "px , fy = " << fy_ << " px"; << ", fx = " << fx_ << "px , fy = " << fy_ << " px";
} }
PanoramaRenderer::~PanoramaRenderer() PanoramaRenderer::~PanoramaRenderer() {}
{
}
void PanoramaRenderer::setCamera(const ze::Camera::Ptr& camera) void PanoramaRenderer::setCamera(const ze::Camera::Ptr& camera) {
{
CHECK(camera); CHECK(camera);
camera_ = camera; camera_ = camera;
@ -40,25 +36,20 @@ void PanoramaRenderer::setCamera(const ze::Camera::Ptr& camera)
map_y_ = cv::Mat_<float>::zeros(sensor_size); map_y_ = cv::Mat_<float>::zeros(sensor_size);
} }
void PanoramaRenderer::precomputePixelToBearingLookupTable() {
void PanoramaRenderer::precomputePixelToBearingLookupTable() // points_C is a matrix containing the coordinates of each pixel
{ // coordinate in the image plane
// points_C is a matrix containing the coordinates of each pixel coordinate in the image plane
ze::Keypoints points_C(2, camera_->width() * camera_->height()); ze::Keypoints points_C(2, camera_->width() * camera_->height());
for (int y = 0; y < camera_->height(); ++y) for (int y = 0; y < camera_->height(); ++y)
{
for (int x = 0; x < camera_->width(); ++x) for (int x = 0; x < camera_->width(); ++x)
{
points_C.col(x + camera_->width() * y) = ze::Keypoint(x, y); points_C.col(x + camera_->width() * y) = ze::Keypoint(x, y);
}
}
bearings_C_ = camera_->backProjectVectorized(points_C); bearings_C_ = camera_->backProjectVectorized(points_C);
bearings_C_.array().rowwise() /= bearings_C_.row(2).array(); bearings_C_.array().rowwise() /= bearings_C_.row(2).array();
} }
void PanoramaRenderer::projectToPanorama(
void PanoramaRenderer::projectToPanorama(const Eigen::Ref<const ze::Bearing>& f, ze::Keypoint* keypoint) const const Eigen::Ref<const ze::Bearing>& f, ze::Keypoint* keypoint
{ ) const {
CHECK(keypoint); CHECK(keypoint);
static constexpr FloatType kEpsilon = 1e-10; static constexpr FloatType kEpsilon = 1e-10;
@ -77,20 +68,21 @@ void PanoramaRenderer::projectToPanorama(const Eigen::Ref<const ze::Bearing>& f,
*keypoint = principal_point_ + ze::Keypoint(phi * fx_, -theta * fy_); *keypoint = principal_point_ + ze::Keypoint(phi * fx_, -theta * fy_);
} }
void PanoramaRenderer::render(
void PanoramaRenderer::render(const Transformation& T_W_C, const ImagePtr& out_image, const DepthmapPtr& out_depthmap) const const Transformation& T_W_C,
{ const ImagePtr& out_image,
const DepthmapPtr& out_depthmap
) const {
CHECK_EQ(out_image->rows, camera_->height()); CHECK_EQ(out_image->rows, camera_->height());
CHECK_EQ(out_image->cols, camera_->width()); CHECK_EQ(out_image->cols, camera_->width());
const Transformation::RotationMatrix R_P_C = R_W_P_.inverse().getRotationMatrix() * T_W_C.getRotationMatrix(); const Transformation::RotationMatrix R_P_C =
R_W_P_.inverse().getRotationMatrix() * T_W_C.getRotationMatrix();
ze::Bearings bearings_P = R_P_C * bearings_C_; ze::Bearings bearings_P = R_P_C * bearings_C_;
ze::Keypoint keypoint; ze::Keypoint keypoint;
for(int y=0; y<camera_->height(); ++y) for (int y = 0; y < camera_->height(); ++y) {
{ for (int x = 0; x < camera_->width(); ++x) {
for(int x=0; x<camera_->width(); ++x)
{
const ze::Bearing& f = bearings_P.col(x + camera_->width() * y); const ze::Bearing& f = bearings_P.col(x + camera_->width() * y);
// project bearing vector to panorama image // project bearing vector to panorama image
@ -101,10 +93,16 @@ void PanoramaRenderer::render(const Transformation& T_W_C, const ImagePtr& out_i
} }
} }
cv::remap(texture_, *out_image, map_x_, map_y_, cv::INTER_LINEAR, cv::BORDER_REFLECT_101); cv::remap(
texture_,
*out_image,
map_x_,
map_y_,
cv::INTER_LINEAR,
cv::BORDER_REFLECT_101
);
if(out_depthmap) if (out_depthmap) {
{
static constexpr ImageFloatType infinity = 1e10; static constexpr ImageFloatType infinity = 1e10;
out_depthmap->setTo(infinity); out_depthmap->setTo(infinity);
} }

View File

@ -5,25 +5,30 @@
namespace event_camera_simulator { namespace event_camera_simulator {
//! A rendering engine for planar scenes //! A rendering engine for planar scenes
class PanoramaRenderer : public Renderer class PanoramaRenderer : public Renderer {
{
public: public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW EIGEN_MAKE_ALIGNED_OPERATOR_NEW
PanoramaRenderer(const Image& texture, PanoramaRenderer(
const Transformation::Rotation& R_W_P); const Image& texture, const Transformation::Rotation& R_W_P
);
~PanoramaRenderer(); ~PanoramaRenderer();
//! Render image and depth map for a given camera pose //! Render image and depth map for a given camera pose
virtual void render(const Transformation& T_W_C, const ImagePtr &out_image, const DepthmapPtr &out_depthmap) const override; virtual void render(
const Transformation& T_W_C,
const ImagePtr& out_image,
const DepthmapPtr& out_depthmap
) const override;
virtual void setCamera(const ze::Camera::Ptr& camera) override; virtual void setCamera(const ze::Camera::Ptr& camera) override;
protected: protected:
void precomputePixelToBearingLookupTable(); void precomputePixelToBearingLookupTable();
void projectToPanorama(const Eigen::Ref<const ze::Bearing>& f, ze::Keypoint* keypoint) const; void projectToPanorama(
const Eigen::Ref<const ze::Bearing>& f, ze::Keypoint* keypoint
) const;
// Texture mapped on the plane // Texture mapped on the plane
Image texture_; Image texture_;

View File

@ -5,34 +5,46 @@
namespace event_camera_simulator { namespace event_camera_simulator {
//! A rendering engine for planar scenes //! A rendering engine for planar scenes
class PlanarRenderer : public Renderer class PlanarRenderer : public Renderer {
{
public: public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW EIGEN_MAKE_ALIGNED_OPERATOR_NEW
PlanarRenderer(const Image& texture, PlanarRenderer(
const Image& texture,
const Camera::Ptr& cam_src, const Camera::Ptr& cam_src,
const Transformation& T_W_P, const Transformation& T_W_P,
FloatType z_min, FloatType z_min,
FloatType z_max, FloatType z_max,
bool extend_border); bool extend_border
);
~PlanarRenderer(); ~PlanarRenderer();
//! Render image and depth map for a given camera pose //! Render image and depth map for a given camera pose
virtual void render(const Transformation& T_W_C, const ImagePtr &out_image, const DepthmapPtr &out_depthmap) const; virtual void render(
void render(const Transformation& T_W_C, const std::vector<Transformation>& T_W_OBJ, const ImagePtr &out_image, const DepthmapPtr &out_depthmap) const override const Transformation& T_W_C,
{ const ImagePtr& out_image,
const DepthmapPtr& out_depthmap
) const;
void render(
const Transformation& T_W_C,
const std::vector<Transformation>& T_W_OBJ,
const ImagePtr& out_image,
const DepthmapPtr& out_depthmap
) const override {
render(T_W_C, out_image, out_depthmap); render(T_W_C, out_image, out_depthmap);
} }
//! Returns true if the rendering engine can compute optic flow, false otherwise //! Returns true if the rendering engine can compute optic flow, false
virtual bool canComputeOpticFlow() const override { return false; } //! otherwise
virtual bool canComputeOpticFlow() const override {
return false;
}
virtual void setCamera(const ze::Camera::Ptr& camera) override; virtual void setCamera(const ze::Camera::Ptr& camera) override;
protected: protected:
void precomputePixelToBearingLookupTable(); void precomputePixelToBearingLookupTable();
// Texture mapped on the plane // Texture mapped on the plane

View File

@ -1,39 +1,44 @@
#pragma once #pragma once
#include <ze/common/macros.hpp>
#include <esim/common/types.hpp> #include <esim/common/types.hpp>
#include <ze/common/macros.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
//! Represents a rendering engine that generates images (and other outputs, //! Represents a rendering engine that generates images (and other outputs,
//! such as depth maps, or optical flow maps) given a scene and a camera position. //! such as depth maps, or optical flow maps) given a scene and a camera
class Renderer //! position.
{ class Renderer {
public: public:
ZE_POINTER_TYPEDEFS(Renderer); ZE_POINTER_TYPEDEFS(Renderer);
Renderer() {} Renderer() {}
//! Render an image at a given pose. //! Render an image at a given pose.
virtual void render(const Transformation& T_W_C, virtual void render(
const Transformation& T_W_C,
const ImagePtr& out_image, const ImagePtr& out_image,
const DepthmapPtr& out_depthmap) const = 0; const DepthmapPtr& out_depthmap
) const = 0;
//! Render an image + depth map + optic flow map at a given pose, //! Render an image + depth map + optic flow map at a given pose,
//! given the camera linear and angular velocity //! given the camera linear and angular velocity
virtual void render(const Transformation& T_W_C, virtual void render(
const Transformation& T_W_C,
const LinearVelocity& v_WC, const LinearVelocity& v_WC,
const AngularVelocity& w_WC, const AngularVelocity& w_WC,
const ImagePtr& out_image, const ImagePtr& out_image,
const DepthmapPtr& out_depthmap, const DepthmapPtr& out_depthmap,
const OpticFlowPtr& optic_flow_map) const {} const OpticFlowPtr& optic_flow_map
) const {}
//! Sets the camera //! Sets the camera
virtual void setCamera(const ze::Camera::Ptr& camera) = 0; virtual void setCamera(const ze::Camera::Ptr& camera) = 0;
//! Get the camera rig //! Get the camera rig
const ze::Camera::Ptr& getCamera() const { return camera_; } const ze::Camera::Ptr& getCamera() const {
return camera_;
}
protected: protected:
ze::Camera::Ptr camera_; ze::Camera::Ptr camera_;

View File

@ -1,7 +1,7 @@
#pragma once #pragma once
#include <gflags/gflags.h>
#include <esim/rendering/renderer_base.hpp> #include <esim/rendering/renderer_base.hpp>
#include <gflags/gflags.h>
namespace event_camera_simulator { namespace event_camera_simulator {

View File

@ -1,34 +1,30 @@
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <esim/rendering/panorama_renderer.hpp> #include <esim/rendering/panorama_renderer.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <glog/logging.h> #include <glog/logging.h>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/core/eigen.hpp> #include <opencv2/core/eigen.hpp>
#include <opencv2/imgproc/imgproc.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {
PanoramaRenderer::PanoramaRenderer(const Image& texture, PanoramaRenderer::PanoramaRenderer(
const Transformation::Rotation &R_W_P) const Image& texture, const Transformation::Rotation& R_W_P
)
: texture_(texture), : texture_(texture),
R_W_P_(R_W_P) R_W_P_(R_W_P) {
{ principal_point_ =
principal_point_ = ze::Keypoint(0.5 * texture_.cols, ze::Keypoint(0.5 * texture_.cols, 0.5 * texture_.rows);
0.5 * texture_.rows);
fx_ = texture_.cols / (2.0 * CV_PI); fx_ = texture_.cols / (2.0 * CV_PI);
fy_ = texture_.rows / CV_PI; fy_ = texture_.rows / CV_PI;
LOG(INFO) << "Initialized panoramic camera with size: " LOG(INFO) << "Initialized panoramic camera with size: " << texture_.cols
<< texture_.cols << "x" << texture_.rows << "x" << texture_.rows
<< ", principal point: " << principal_point_.transpose() << ", principal point: " << principal_point_.transpose()
<< ", fx = " << fx_ << "px , fy = " << fy_ << " px"; << ", fx = " << fx_ << "px , fy = " << fy_ << " px";
} }
PanoramaRenderer::~PanoramaRenderer() PanoramaRenderer::~PanoramaRenderer() {}
{
}
void PanoramaRenderer::setCamera(const ze::Camera::Ptr& camera) void PanoramaRenderer::setCamera(const ze::Camera::Ptr& camera) {
{
CHECK(camera); CHECK(camera);
camera_ = camera; camera_ = camera;
@ -40,25 +36,20 @@ void PanoramaRenderer::setCamera(const ze::Camera::Ptr& camera)
map_y_ = cv::Mat_<float>::zeros(sensor_size); map_y_ = cv::Mat_<float>::zeros(sensor_size);
} }
void PanoramaRenderer::precomputePixelToBearingLookupTable() {
void PanoramaRenderer::precomputePixelToBearingLookupTable() // points_C is a matrix containing the coordinates of each pixel
{ // coordinate in the image plane
// points_C is a matrix containing the coordinates of each pixel coordinate in the image plane
ze::Keypoints points_C(2, camera_->width() * camera_->height()); ze::Keypoints points_C(2, camera_->width() * camera_->height());
for (int y = 0; y < camera_->height(); ++y) for (int y = 0; y < camera_->height(); ++y)
{
for (int x = 0; x < camera_->width(); ++x) for (int x = 0; x < camera_->width(); ++x)
{
points_C.col(x + camera_->width() * y) = ze::Keypoint(x, y); points_C.col(x + camera_->width() * y) = ze::Keypoint(x, y);
}
}
bearings_C_ = camera_->backProjectVectorized(points_C); bearings_C_ = camera_->backProjectVectorized(points_C);
bearings_C_.array().rowwise() /= bearings_C_.row(2).array(); bearings_C_.array().rowwise() /= bearings_C_.row(2).array();
} }
void PanoramaRenderer::projectToPanorama(
void PanoramaRenderer::projectToPanorama(const Eigen::Ref<const ze::Bearing>& f, ze::Keypoint* keypoint) const const Eigen::Ref<const ze::Bearing>& f, ze::Keypoint* keypoint
{ ) const {
CHECK(keypoint); CHECK(keypoint);
static constexpr FloatType kEpsilon = 1e-10; static constexpr FloatType kEpsilon = 1e-10;
@ -77,20 +68,21 @@ void PanoramaRenderer::projectToPanorama(const Eigen::Ref<const ze::Bearing>& f,
*keypoint = principal_point_ + ze::Keypoint(phi * fx_, -theta * fy_); *keypoint = principal_point_ + ze::Keypoint(phi * fx_, -theta * fy_);
} }
void PanoramaRenderer::render(
void PanoramaRenderer::render(const Transformation& T_W_C, const ImagePtr& out_image, const DepthmapPtr& out_depthmap) const const Transformation& T_W_C,
{ const ImagePtr& out_image,
const DepthmapPtr& out_depthmap
) const {
CHECK_EQ(out_image->rows, camera_->height()); CHECK_EQ(out_image->rows, camera_->height());
CHECK_EQ(out_image->cols, camera_->width()); CHECK_EQ(out_image->cols, camera_->width());
const Transformation::RotationMatrix R_P_C = R_W_P_.inverse().getRotationMatrix() * T_W_C.getRotationMatrix(); const Transformation::RotationMatrix R_P_C =
R_W_P_.inverse().getRotationMatrix() * T_W_C.getRotationMatrix();
ze::Bearings bearings_P = R_P_C * bearings_C_; ze::Bearings bearings_P = R_P_C * bearings_C_;
ze::Keypoint keypoint; ze::Keypoint keypoint;
for(int y=0; y<camera_->height(); ++y) for (int y = 0; y < camera_->height(); ++y) {
{ for (int x = 0; x < camera_->width(); ++x) {
for(int x=0; x<camera_->width(); ++x)
{
const ze::Bearing& f = bearings_P.col(x + camera_->width() * y); const ze::Bearing& f = bearings_P.col(x + camera_->width() * y);
// project bearing vector to panorama image // project bearing vector to panorama image
@ -101,10 +93,16 @@ void PanoramaRenderer::render(const Transformation& T_W_C, const ImagePtr& out_i
} }
} }
cv::remap(texture_, *out_image, map_x_, map_y_, CV_INTER_LINEAR, cv::BORDER_REFLECT_101); cv::remap(
texture_,
*out_image,
map_x_,
map_y_,
CV_INTER_LINEAR,
cv::BORDER_REFLECT_101
);
if(out_depthmap) if (out_depthmap) {
{
static constexpr ImageFloatType infinity = 1e10; static constexpr ImageFloatType infinity = 1e10;
out_depthmap->setTo(infinity); out_depthmap->setTo(infinity);
} }

View File

@ -1,13 +1,14 @@
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <esim/imp_planar_renderer/planar_renderer.hpp> #include <esim/imp_planar_renderer/planar_renderer.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <glog/logging.h> #include <glog/logging.h>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/core/eigen.hpp> #include <opencv2/core/eigen.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <ze/common/timer_collection.hpp> #include <ze/common/timer_collection.hpp>
DECLARE_TIMER(TimerPlanarRenderer, timers_planar_renderer, DECLARE_TIMER(
TimerPlanarRenderer,
timers_planar_renderer,
compute_remap_maps, compute_remap_maps,
inverse_homography, inverse_homography,
remap, remap,
@ -16,19 +17,20 @@ DECLARE_TIMER(TimerPlanarRenderer, timers_planar_renderer,
namespace event_camera_simulator { namespace event_camera_simulator {
PlanarRenderer::PlanarRenderer(const Image& texture, PlanarRenderer::PlanarRenderer(
const Image& texture,
const Camera::Ptr& cam_src, const Camera::Ptr& cam_src,
const Transformation& T_W_P, const Transformation& T_W_P,
FloatType z_min, FloatType z_min,
FloatType z_max, FloatType z_max,
bool extend_border) bool extend_border
)
: texture_(texture), : texture_(texture),
cam_src_(cam_src), cam_src_(cam_src),
T_W_P_(T_W_P), T_W_P_(T_W_P),
z_min_(z_min), z_min_(z_min),
z_max_(z_max), z_max_(z_max),
extend_border_(extend_border) extend_border_(extend_border) {
{
K_src_ = calibrationMatrixFromCamera(cam_src_); K_src_ = calibrationMatrixFromCamera(cam_src_);
K_src_inv_ = K_src_.inverse(); K_src_inv_ = K_src_.inverse();
VLOG(1) << "K_src = " << K_src_; VLOG(1) << "K_src = " << K_src_;
@ -36,13 +38,11 @@ PlanarRenderer::PlanarRenderer(const Image& texture,
LOG(INFO) << "T_W_P = " << T_W_P_; LOG(INFO) << "T_W_P = " << T_W_P_;
} }
PlanarRenderer::~PlanarRenderer() PlanarRenderer::~PlanarRenderer() {
{
timers_planar_renderer.saveToFile("/tmp", "planar_renderer.csv"); timers_planar_renderer.saveToFile("/tmp", "planar_renderer.csv");
} }
void PlanarRenderer::setCamera(const ze::Camera::Ptr& camera) void PlanarRenderer::setCamera(const ze::Camera::Ptr& camera) {
{
CHECK(camera); CHECK(camera);
camera_ = camera; camera_ = camera;
@ -54,24 +54,22 @@ void PlanarRenderer::setCamera(const ze::Camera::Ptr& camera)
map_y_ = cv::Mat_<float>::zeros(sensor_size); map_y_ = cv::Mat_<float>::zeros(sensor_size);
} }
void PlanarRenderer::precomputePixelToBearingLookupTable() {
void PlanarRenderer::precomputePixelToBearingLookupTable() // points_C is a matrix containing the coordinates of each pixel
{ // coordinate in the image plane
// points_C is a matrix containing the coordinates of each pixel coordinate in the image plane
ze::Keypoints points_C(2, camera_->width() * camera_->height()); ze::Keypoints points_C(2, camera_->width() * camera_->height());
for (int y = 0; y < camera_->height(); ++y) for (int y = 0; y < camera_->height(); ++y)
{
for (int x = 0; x < camera_->width(); ++x) for (int x = 0; x < camera_->width(); ++x)
{
points_C.col(x + camera_->width() * y) = ze::Keypoint(x, y); points_C.col(x + camera_->width() * y) = ze::Keypoint(x, y);
}
}
bearings_C_ = camera_->backProjectVectorized(points_C); bearings_C_ = camera_->backProjectVectorized(points_C);
bearings_C_.array().rowwise() /= bearings_C_.row(2).array(); bearings_C_.array().rowwise() /= bearings_C_.row(2).array();
} }
void PlanarRenderer::render(const Transformation& T_W_C, const ImagePtr& out_image, const DepthmapPtr& out_depthmap) const void PlanarRenderer::render(
{ const Transformation& T_W_C,
const ImagePtr& out_image,
const DepthmapPtr& out_depthmap
) const {
CHECK_EQ(out_image->rows, camera_->height()); CHECK_EQ(out_image->rows, camera_->height());
CHECK_EQ(out_image->cols, camera_->width()); CHECK_EQ(out_image->cols, camera_->width());
@ -90,57 +88,71 @@ void PlanarRenderer::render(const Transformation& T_W_C, const ImagePtr& out_ima
HomographyMatrix H_P_C; HomographyMatrix H_P_C;
{ {
auto t = timers_planar_renderer[TimerPlanarRenderer::inverse_homography].timeScope(); auto t =
timers_planar_renderer[TimerPlanarRenderer::inverse_homography]
.timeScope();
H_P_C = H_C_P.inverse(); H_P_C = H_C_P.inverse();
} }
{ {
auto t = timers_planar_renderer[TimerPlanarRenderer::compute_remap_maps].timeScope(); auto t =
timers_planar_renderer[TimerPlanarRenderer::compute_remap_maps]
.timeScope();
ze::Bearings bearings_P = H_P_C * bearings_C_; ze::Bearings bearings_P = H_P_C * bearings_C_;
for(int y=0; y<camera_->height(); ++y) for (int y = 0; y < camera_->height(); ++y) {
{ for (int x = 0; x < camera_->width(); ++x) {
for(int x=0; x<camera_->width(); ++x) const ze::Bearing& f =
{ bearings_P.col(x + camera_->width() * y);
const ze::Bearing& f = bearings_P.col(x + camera_->width() * y); map_x_(y, x) = static_cast<float>(
map_x_(y,x) = static_cast<float>(K_src_(0,0) * f[0]/f[2] + K_src_(0,2)); K_src_(0, 0) * f[0] / f[2] + K_src_(0, 2)
map_y_(y,x) = static_cast<float>(K_src_(1,1) * f[1]/f[2] + K_src_(1,2)); );
map_y_(y, x) = static_cast<float>(
K_src_(1, 1) * f[1] / f[2] + K_src_(1, 2)
);
} }
} }
} }
int border = (extend_border_) ? cv::BORDER_REFLECT : cv::BORDER_CONSTANT; int border =
(extend_border_) ? cv::BORDER_REFLECT : cv::BORDER_CONSTANT;
{ {
auto t = timers_planar_renderer[TimerPlanarRenderer::remap].timeScope(); auto t =
cv::remap(texture_, *out_image, map_x_, map_y_, cv::INTER_LINEAR, border, 0.8); timers_planar_renderer[TimerPlanarRenderer::remap].timeScope();
cv::remap(
texture_,
*out_image,
map_x_,
map_y_,
cv::INTER_LINEAR,
border,
0.8
);
} }
// Compute depth map in dst // Compute depth map in dst
{ {
auto t = timers_planar_renderer[TimerPlanarRenderer::compute_depthmap].timeScope(); auto t =
timers_planar_renderer[TimerPlanarRenderer::compute_depthmap]
.timeScope();
Vector3 X; Vector3 X;
for(int y=0; y<camera_->height(); ++y) for (int y = 0; y < camera_->height(); ++y) {
{ for (int x = 0; x < camera_->width(); ++x) {
for(int x=0; x<camera_->width(); ++x)
{
X = bearings_C_.col(x + camera_->width() * y); X = bearings_C_.col(x + camera_->width() * y);
// @TODO: derive this formula for the depth to explain it // @TODO: derive this formula for the depth to explain it
const FloatType numer = -T_P_C.getPosition()[2]; const FloatType numer = -T_P_C.getPosition()[2];
const FloatType denom = T_P_C.getRotationMatrix().row(2) * X; const FloatType denom =
T_P_C.getRotationMatrix().row(2) * X;
const FloatType z = numer / denom; const FloatType z = numer / denom;
if (out_depthmap) if (out_depthmap)
{
(*out_depthmap)(y, x) = (ImageFloatType) z; (*out_depthmap)(y, x) = (ImageFloatType) z;
}
// clip depth to a reasonable depth range // clip depth to a reasonable depth range
if (z < z_min_ || z > z_max_) if (z < z_min_ || z > z_max_)
{
(*out_image)(y, x) = 0; (*out_image)(y, x) = 0;
} }
} }
} }
} }
}
} // namespace event_camera_simulator } // namespace event_camera_simulator

View File

@ -1,5 +1,3 @@
#include <esim/rendering/renderer_base.hpp> #include <esim/rendering/renderer_base.hpp>
namespace event_camera_simulator { namespace event_camera_simulator {} // namespace event_camera_simulator
} // namespace event_camera_simulator

View File

@ -1,94 +1,153 @@
#include <esim/common/utils.hpp> #include <esim/common/utils.hpp>
#include <esim/rendering/renderer_factory.hpp>
#include <esim/rendering/planar_renderer.hpp>
#include <esim/rendering/panorama_renderer.hpp> #include <esim/rendering/panorama_renderer.hpp>
#include <ze/cameras/camera_models.hpp> #include <esim/rendering/planar_renderer.hpp>
#include <ze/cameras/camera_impl.hpp> #include <esim/rendering/renderer_factory.hpp>
#include <ze/common/logging.hpp>
#include <opencv2/imgcodecs/imgcodecs.hpp> #include <opencv2/imgcodecs/imgcodecs.hpp>
#include <opencv2/imgproc/imgproc.hpp> #include <opencv2/imgproc/imgproc.hpp>
#include <ze/cameras/camera_impl.hpp>
#include <ze/cameras/camera_models.hpp>
#include <ze/common/logging.hpp>
DEFINE_int32(renderer_type, 0, " 0: Planar renderer"); DEFINE_int32(renderer_type, 0, " 0: Planar renderer");
DEFINE_string(renderer_texture, "", DEFINE_string(
"Path to image which will be used to texture the plane"); renderer_texture,
"",
"Path to image which will be used to texture the plane"
);
DEFINE_double(renderer_hfov_cam_source_deg, 130.0, DEFINE_double(
"Horizontal FoV of the source camera (that captured the image on the plane)"); renderer_hfov_cam_source_deg,
130.0,
"Horizontal FoV of the source camera (that captured the image on "
"the plane)"
);
DEFINE_double(renderer_preprocess_median_blur, 0, DEFINE_double(
"Kernel size of the preprocessing median blur."); renderer_preprocess_median_blur,
0,
"Kernel size of the preprocessing median blur."
);
DEFINE_double(renderer_preprocess_gaussian_blur, 0, DEFINE_double(
"Amount of preprocessing Gaussian blur."); renderer_preprocess_gaussian_blur,
0,
"Amount of preprocessing Gaussian blur."
);
DEFINE_double(renderer_plane_x, 0.0, DEFINE_double(
"x position of the center of the plane, in world coordinates"); renderer_plane_x,
0.0,
"x position of the center of the plane, in world coordinates"
);
DEFINE_double(renderer_plane_y, 0.0, DEFINE_double(
"y position of the center of the plane, in world coordinates"); renderer_plane_y,
0.0,
"y position of the center of the plane, in world coordinates"
);
DEFINE_double(renderer_plane_z, -1.0, DEFINE_double(
"z position of the center of the plane, in world coordinates"); renderer_plane_z,
-1.0,
"z position of the center of the plane, in world coordinates"
);
DEFINE_double(renderer_plane_qw, 0.0, DEFINE_double(
"w component of the quaternion q_W_P (orientation of the plane with respect to the world)"); renderer_plane_qw,
0.0,
"w component of the quaternion q_W_P (orientation of the plane "
"with respect to the world)"
);
DEFINE_double(renderer_plane_qx, 1.0, DEFINE_double(
"x component of the quaternion q_W_P (orientation of the plane with respect to the world)"); renderer_plane_qx,
1.0,
"x component of the quaternion q_W_P (orientation of the plane "
"with respect to the world)"
);
DEFINE_double(renderer_plane_qy, 0.0, DEFINE_double(
"y component of the quaternion q_W_P (orientation of the plane with respect to the world)"); renderer_plane_qy,
0.0,
"y component of the quaternion q_W_P (orientation of the plane "
"with respect to the world)"
);
DEFINE_double(renderer_plane_qz, 0.0, DEFINE_double(
"z component of the quaternion q_W_P (orientation of the plane with respect to the world)"); renderer_plane_qz,
0.0,
"z component of the quaternion q_W_P (orientation of the plane "
"with respect to the world)"
);
DEFINE_double(renderer_z_min, 0.01, DEFINE_double(renderer_z_min, 0.01, "Minimum clipping distance.");
"Minimum clipping distance.");
DEFINE_double(renderer_z_max, 10.0, DEFINE_double(renderer_z_max, 10.0, "Maximum clipping distance.");
"Maximum clipping distance.");
DEFINE_bool(renderer_extend_border, false, DEFINE_bool(
"Whether to extend the borders of the plane to infinity or not."); renderer_extend_border,
false,
"Whether to extend the borders of the plane to infinity or not."
);
DEFINE_double(renderer_panorama_qw, 0.0, DEFINE_double(
"w component of the quaternion q_W_P (orientation of the panorama with respect to the world)"); renderer_panorama_qw,
0.0,
"w component of the quaternion q_W_P (orientation of the "
"panorama with respect to the world)"
);
DEFINE_double(renderer_panorama_qx, 1.0, DEFINE_double(
"x component of the quaternion q_W_P (orientation of the panorama with respect to the world)"); renderer_panorama_qx,
1.0,
"x component of the quaternion q_W_P (orientation of the "
"panorama with respect to the world)"
);
DEFINE_double(renderer_panorama_qy, 0.0, DEFINE_double(
"y component of the quaternion q_W_P (orientation of the panorama with respect to the world)"); renderer_panorama_qy,
0.0,
"y component of the quaternion q_W_P (orientation of the "
"panorama with respect to the world)"
);
DEFINE_double(renderer_panorama_qz, 0.0, DEFINE_double(
"z component of the quaternion q_W_P (orientation of the panorama with respect to the world)"); renderer_panorama_qz,
0.0,
"z component of the quaternion q_W_P (orientation of the "
"panorama with respect to the world)"
);
namespace event_camera_simulator { namespace event_camera_simulator {
bool loadPreprocessedImage(const std::string& path_to_img, cv::Mat* img) bool loadPreprocessedImage(const std::string& path_to_img, cv::Mat* img) {
{
CHECK(img); CHECK(img);
VLOG(1) << "Loading texture file from file: " << FLAGS_renderer_texture << "."; VLOG(1) << "Loading texture file from file: " << FLAGS_renderer_texture
<< ".";
*img = cv::imread(path_to_img, 0); *img = cv::imread(path_to_img, 0);
if(!img->data) if (!img->data) {
{ LOG(FATAL) << "Could not open image at: " << FLAGS_renderer_texture
LOG(FATAL) << "Could not open image at: " << FLAGS_renderer_texture << "."; << ".";
return false; return false;
} }
if(FLAGS_renderer_preprocess_median_blur > 1) if (FLAGS_renderer_preprocess_median_blur > 1) {
{
VLOG(1) << "Pre-filtering the texture with median filter of size: " VLOG(1) << "Pre-filtering the texture with median filter of size: "
<< FLAGS_renderer_preprocess_median_blur << "."; << FLAGS_renderer_preprocess_median_blur << ".";
cv::medianBlur(*img, *img, FLAGS_renderer_preprocess_median_blur); cv::medianBlur(*img, *img, FLAGS_renderer_preprocess_median_blur);
} }
if(FLAGS_renderer_preprocess_gaussian_blur > 0) if (FLAGS_renderer_preprocess_gaussian_blur > 0) {
{ VLOG(1
VLOG(1) << "Pre-filtering the texture with gaussian filter of size: " ) << "Pre-filtering the texture with gaussian filter of size: "
<< FLAGS_renderer_preprocess_gaussian_blur << "."; << FLAGS_renderer_preprocess_gaussian_blur << ".";
cv::GaussianBlur(*img, *img, cv::Size(15,15), FLAGS_renderer_preprocess_gaussian_blur); cv::GaussianBlur(
*img,
*img,
cv::Size(15, 15),
FLAGS_renderer_preprocess_gaussian_blur
);
} }
img->convertTo(*img, cv::DataType<ImageFloatType>::type, 1.0 / 255.0); img->convertTo(*img, cv::DataType<ImageFloatType>::type, 1.0 / 255.0);
@ -96,41 +155,54 @@ bool loadPreprocessedImage(const std::string& path_to_img, cv::Mat* img)
return true; return true;
} }
Renderer::Ptr loadRendererFromGflags() Renderer::Ptr loadRendererFromGflags() {
{
Renderer::Ptr renderer; Renderer::Ptr renderer;
switch (FLAGS_renderer_type) switch (FLAGS_renderer_type) {
{
case 0: // Planar renderer case 0: // Planar renderer
{ {
cv::Mat img_src; cv::Mat img_src;
if (!loadPreprocessedImage(FLAGS_renderer_texture, &img_src)) if (!loadPreprocessedImage(FLAGS_renderer_texture, &img_src))
{
return nullptr; return nullptr;
}
double f_src = hfovToFocalLength(FLAGS_renderer_hfov_cam_source_deg, img_src.cols); double f_src = hfovToFocalLength(
FLAGS_renderer_hfov_cam_source_deg,
img_src.cols
);
Camera::Ptr cam_src = std::make_shared<ze::PinholeCamera>( Camera::Ptr cam_src = std::make_shared<ze::PinholeCamera>(
img_src.cols, img_src.rows, ze::CameraType::Pinhole, img_src.cols,
(Vector4() << f_src, f_src, 0.5 * img_src.cols, 0.5 * img_src.rows).finished(), img_src.rows,
ze::VectorX()); ze::CameraType::Pinhole,
(Vector4() << f_src,
f_src,
0.5 * img_src.cols,
0.5 * img_src.rows)
.finished(),
ze::VectorX()
);
Transformation T_W_P; Transformation T_W_P;
T_W_P.getPosition() = ze::Position(FLAGS_renderer_plane_x, T_W_P.getPosition() = ze::Position(
FLAGS_renderer_plane_x,
FLAGS_renderer_plane_y, FLAGS_renderer_plane_y,
FLAGS_renderer_plane_z); FLAGS_renderer_plane_z
);
T_W_P.getRotation() = ze::Quaternion(FLAGS_renderer_plane_qw, T_W_P.getRotation() = ze::Quaternion(
FLAGS_renderer_plane_qw,
FLAGS_renderer_plane_qx, FLAGS_renderer_plane_qx,
FLAGS_renderer_plane_qy, FLAGS_renderer_plane_qy,
FLAGS_renderer_plane_qz); FLAGS_renderer_plane_qz
);
renderer.reset(new PlanarRenderer(img_src, cam_src, renderer.reset(new PlanarRenderer(
img_src,
cam_src,
T_W_P, T_W_P,
FLAGS_renderer_z_min, FLAGS_renderer_z_min,
FLAGS_renderer_z_max, FLAGS_renderer_z_max,
FLAGS_renderer_extend_border)); FLAGS_renderer_extend_border
));
break; break;
} }
@ -138,21 +210,20 @@ Renderer::Ptr loadRendererFromGflags()
{ {
cv::Mat img_src; cv::Mat img_src;
if (!loadPreprocessedImage(FLAGS_renderer_texture, &img_src)) if (!loadPreprocessedImage(FLAGS_renderer_texture, &img_src))
{
return nullptr; return nullptr;
}
Transformation::Rotation R_W_P; Transformation::Rotation R_W_P;
R_W_P = ze::Quaternion(FLAGS_renderer_panorama_qw, R_W_P = ze::Quaternion(
FLAGS_renderer_panorama_qw,
FLAGS_renderer_panorama_qx, FLAGS_renderer_panorama_qx,
FLAGS_renderer_panorama_qy, FLAGS_renderer_panorama_qy,
FLAGS_renderer_panorama_qz); FLAGS_renderer_panorama_qz
);
renderer.reset(new PanoramaRenderer(img_src, R_W_P)); renderer.reset(new PanoramaRenderer(img_src, R_W_P));
break; break;
} }
default: default: {
{
LOG(FATAL) << "Renderer type is not known."; LOG(FATAL) << "Renderer type is not known.";
break; break;
} }

View File

@ -6,22 +6,31 @@ namespace event_camera_simulator {
class UnrealCvClient; // fwd class UnrealCvClient; // fwd
class UnrealCvRenderer : public Renderer class UnrealCvRenderer : public Renderer {
{
public: public:
UnrealCvRenderer(); UnrealCvRenderer();
//! Render image and depth map for a given camera pose //! Render image and depth map for a given camera pose
virtual void render(const Transformation& T_W_C, const ImagePtr &out_image, const DepthmapPtr &out_depthmap) const; virtual void render(
const Transformation& T_W_C,
const ImagePtr& out_image,
const DepthmapPtr& out_depthmap
) const;
void render(const Transformation& T_W_C, const std::vector<Transformation>& T_W_OBJ, const ImagePtr &out_image, const DepthmapPtr &out_depthmap) const void render(
{ const Transformation& T_W_C,
const std::vector<Transformation>& T_W_OBJ,
const ImagePtr& out_image,
const DepthmapPtr& out_depthmap
) const {
render(T_W_C, out_image, out_depthmap); render(T_W_C, out_image, out_depthmap);
} }
//! Returns true if the rendering engine can compute optic flow, false otherwise //! Returns true if the rendering engine can compute optic flow, false
virtual bool canComputeOpticFlow() const override { return false; } //! otherwise
virtual bool canComputeOpticFlow() const override {
return false;
}
virtual void setCamera(const ze::Camera::Ptr& camera) override; virtual void setCamera(const ze::Camera::Ptr& camera) override;
@ -30,5 +39,4 @@ private:
mutable size_t frame_idx_; mutable size_t frame_idx_;
}; };
} // namespace event_camera_simulator
}

View File

@ -6,15 +6,14 @@ namespace event_camera_simulator {
/** /**
* https://github.com/EpicGames/UnrealEngine/blob/dbced2dd59f9f5dfef1d7786fd67ad2970adf95f/Engine/Source/Runtime/Core/Public/Math/Rotator.h#L580 * https://github.com/EpicGames/UnrealEngine/blob/dbced2dd59f9f5dfef1d7786fd67ad2970adf95f/Engine/Source/Runtime/Core/Public/Math/Rotator.h#L580
* Helper function for eulerFromQuatSingularityTest, angles are expected to be given in degrees * Helper function for eulerFromQuatSingularityTest, angles are expected to
*be given in degrees
**/ **/
inline FloatType clampAxis(FloatType angle) inline FloatType clampAxis(FloatType angle) {
{
// returns angle in the range (-360,360) // returns angle in the range (-360,360)
angle = std::fmod(angle, 360.f); angle = std::fmod(angle, 360.f);
if (angle < 0.f) if (angle < 0.f) {
{
// shift to [0,360) range // shift to [0,360) range
angle += 360.f; angle += 360.f;
} }
@ -24,13 +23,12 @@ inline FloatType clampAxis(FloatType angle)
/** /**
* https://github.com/EpicGames/UnrealEngine/blob/dbced2dd59f9f5dfef1d7786fd67ad2970adf95f/Engine/Source/Runtime/Core/Public/Math/Rotator.h#L595$ * https://github.com/EpicGames/UnrealEngine/blob/dbced2dd59f9f5dfef1d7786fd67ad2970adf95f/Engine/Source/Runtime/Core/Public/Math/Rotator.h#L595$
* Helper function for eulerFromQuatSingularityTest, angles are expected to be given in degrees * Helper function for eulerFromQuatSingularityTest, angles are expected to
*be given in degrees
**/ **/
inline FloatType normalizeAxis(FloatType angle) inline FloatType normalizeAxis(FloatType angle) {
{
angle = clampAxis(angle); angle = clampAxis(angle);
if(angle > 180.f) if (angle > 180.f) {
{
// shift to (-180,180] // shift to (-180,180]
angle -= 360.f; angle -= 360.f;
} }
@ -38,14 +36,17 @@ inline FloatType normalizeAxis(FloatType angle)
return angle; return angle;
} }
/** /**
* *
* https://github.com/EpicGames/UnrealEngine/blob/f794321ffcad597c6232bc706304c0c9b4e154b2/Engine/Source/Runtime/Core/Private/Math/UnrealMath.cpp#L540 * https://github.com/EpicGames/UnrealEngine/blob/f794321ffcad597c6232bc706304c0c9b4e154b2/Engine/Source/Runtime/Core/Private/Math/UnrealMath.cpp#L540
* Quaternion given in (x,y,z,w) representation * Quaternion given in (x,y,z,w) representation
**/ **/
void quaternionToEulerUnrealEngine(const Transformation::Rotation& q, FloatType& yaw, FloatType& pitch, FloatType& roll) void quaternionToEulerUnrealEngine(
{ const Transformation::Rotation& q,
FloatType& yaw,
FloatType& pitch,
FloatType& roll
) {
const FloatType X = q.x(); const FloatType X = q.x();
const FloatType Y = q.y(); const FloatType Y = q.y();
const FloatType Z = q.z(); const FloatType Z = q.z();
@ -59,29 +60,29 @@ void quaternionToEulerUnrealEngine(const Transformation::Rotation& q, FloatType&
// http://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles // http://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles
// http://www.euclideanspace.com/maths/geometry/rotations/conversions/quaternionToEuler/ // http://www.euclideanspace.com/maths/geometry/rotations/conversions/quaternionToEuler/
// this value was found from experience, the above websites recommend different values // this value was found from experience, the above websites recommend
// but that isn't the case for us, so I went through different testing, and finally found the case // different values but that isn't the case for us, so I went through
// where both of world lives happily. // different testing, and finally found the case where both of world
// lives happily.
const FloatType SINGULARITY_THRESHOLD = 0.4999995; const FloatType SINGULARITY_THRESHOLD = 0.4999995;
const FloatType RAD_TO_DEG = (180.0) / CV_PI; const FloatType RAD_TO_DEG = (180.0) / CV_PI;
if (SingularityTest < -SINGULARITY_THRESHOLD) if (SingularityTest < -SINGULARITY_THRESHOLD) {
{
pitch = -90.; pitch = -90.;
yaw = std::atan2(YawY, YawX) * RAD_TO_DEG; yaw = std::atan2(YawY, YawX) * RAD_TO_DEG;
roll = normalizeAxis(-yaw - (2.f * std::atan2(X, W) * RAD_TO_DEG)); roll = normalizeAxis(-yaw - (2.f * std::atan2(X, W) * RAD_TO_DEG));
} } else if (SingularityTest > SINGULARITY_THRESHOLD) {
else if (SingularityTest > SINGULARITY_THRESHOLD)
{
pitch = 90.; pitch = 90.;
yaw = std::atan2(YawY, YawX) * RAD_TO_DEG; yaw = std::atan2(YawY, YawX) * RAD_TO_DEG;
roll = normalizeAxis(yaw - (2.f * std::atan2(X, W) * RAD_TO_DEG)); roll = normalizeAxis(yaw - (2.f * std::atan2(X, W) * RAD_TO_DEG));
} } else {
else
{
pitch = std::asin(2.f * (SingularityTest)) * RAD_TO_DEG; pitch = std::asin(2.f * (SingularityTest)) * RAD_TO_DEG;
yaw = std::atan2(YawY, YawX) * RAD_TO_DEG; yaw = std::atan2(YawY, YawX) * RAD_TO_DEG;
roll = std::atan2(-2.f*(W*X+Y*Z), (1.f-2.f*(X*X + Y*Y))) * RAD_TO_DEG; roll = std::atan2(
-2.f * (W * X + Y * Z),
(1.f - 2.f * (X * X + Y * Y))
)
* RAD_TO_DEG;
} }
} }

View File

@ -1,10 +1,10 @@
#include <esim/imp_unrealcv_renderer/unrealcv_renderer.hpp> #include <esim/imp_unrealcv_renderer/unrealcv_renderer.hpp>
#include <esim/unrealcv_bridge/unrealcv_bridge.hpp>
#include <esim/imp_unrealcv_renderer/utils.hpp> #include <esim/imp_unrealcv_renderer/utils.hpp>
#include <opencv2/imgproc/imgproc.hpp> #include <esim/unrealcv_bridge/unrealcv_bridge.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <ze/common/path_utils.hpp>
#include <iomanip> #include <iomanip>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <ze/common/path_utils.hpp>
DEFINE_double(x_offset, 0, "X offset of the trajectory, in meters"); DEFINE_double(x_offset, 0, "X offset of the trajectory, in meters");
DEFINE_double(y_offset, 0, "Y offset of the trajectory, in meters"); DEFINE_double(y_offset, 0, "Y offset of the trajectory, in meters");
@ -13,60 +13,67 @@ DECLARE_double(trajectory_multiplier_x);
DECLARE_double(trajectory_multiplier_y); DECLARE_double(trajectory_multiplier_y);
DECLARE_double(trajectory_multiplier_z); DECLARE_double(trajectory_multiplier_z);
DEFINE_int32(unrealcv_post_median_blur, 0, DEFINE_int32(
unrealcv_post_median_blur,
0,
"If > 0, median blur the raw UnrealCV images" "If > 0, median blur the raw UnrealCV images"
"with a median filter of this size"); "with a median filter of this size"
);
DEFINE_double(unrealcv_post_gaussian_blur_sigma, 0, DEFINE_double(
unrealcv_post_gaussian_blur_sigma,
0,
"If sigma > 0, Gaussian blur the raw UnrealCV images" "If sigma > 0, Gaussian blur the raw UnrealCV images"
"with a Gaussian filter standard deviation sigma."); "with a Gaussian filter standard deviation sigma."
);
DEFINE_string(unrealcv_output_directory, "", DEFINE_string(
"Output directory in which to output the raw RGB images."); unrealcv_output_directory,
"",
"Output directory in which to output the raw RGB images."
);
namespace event_camera_simulator { namespace event_camera_simulator {
UnrealCvRenderer::UnrealCvRenderer() UnrealCvRenderer::UnrealCvRenderer() {
{
client_ = std::make_shared<UnrealCvClient>("localhost", "9000"); client_ = std::make_shared<UnrealCvClient>("localhost", "9000");
frame_idx_ = 0; frame_idx_ = 0;
} }
void UnrealCvRenderer::setCamera(const ze::Camera::Ptr& camera) void UnrealCvRenderer::setCamera(const ze::Camera::Ptr& camera) {
{
camera_ = camera; camera_ = camera;
// compute the horizontal field of view of the camera // compute the horizontal field of view of the camera
ze::VectorX intrinsics = camera_->projectionParameters(); ze::VectorX intrinsics = camera_->projectionParameters();
const FloatType fx = intrinsics(0); const FloatType fx = intrinsics(0);
const FloatType hfov_deg = 2 * std::atan(0.5 * (FloatType) camera_->width() / fx) * 180. / CV_PI; const FloatType hfov_deg =
2 * std::atan(0.5 * (FloatType) camera_->width() / fx) * 180.
/ CV_PI;
client_->setCameraFOV(static_cast<float>(hfov_deg)); client_->setCameraFOV(static_cast<float>(hfov_deg));
client_->setCameraSize(camera->width(), camera->height()); client_->setCameraSize(camera->width(), camera->height());
} }
void UnrealCvRenderer::render(
void UnrealCvRenderer::render(const Transformation& T_W_C, const ImagePtr& out_image, const DepthmapPtr& out_depthmap) const const Transformation& T_W_C,
{ const ImagePtr& out_image,
const DepthmapPtr& out_depthmap
) const {
CHECK_EQ(out_image->rows, camera_->height()); CHECK_EQ(out_image->rows, camera_->height());
CHECK_EQ(out_image->cols, camera_->width()); CHECK_EQ(out_image->cols, camera_->width());
VLOG(1) << "T_W_C (ZE) = " << T_W_C; VLOG(1) << "T_W_C (ZE) = " << T_W_C;
const Transformation::TransformationMatrix mT_ZE_C = T_W_C.getTransformationMatrix(); const Transformation::TransformationMatrix mT_ZE_C =
T_W_C.getTransformationMatrix();
Transformation::TransformationMatrix mT_UE_ZE; Transformation::TransformationMatrix mT_UE_ZE;
mT_UE_ZE << 1, 0, 0, 0, mT_UE_ZE << 1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1;
0, -1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1;
Transformation::TransformationMatrix mT_C_UEC; Transformation::TransformationMatrix mT_C_UEC;
mT_C_UEC << 0, 1, 0, 0, mT_C_UEC << 0, 1, 0, 0, 0, 0, -1, 0, 1, 0, 0, 0, 0, 0, 0, 1;
0, 0, -1, 0,
1, 0, 0, 0,
0, 0, 0, 1;
// rotate 90 deg to the right so that R_W_C = Identity <=> euler angle (0,0,0) in UnrealCV // rotate 90 deg to the right so that R_W_C = Identity <=> euler angle
// (0,0,0) in UnrealCV
// Transformation::TransformationMatrix mT_rotate_90_right; // Transformation::TransformationMatrix mT_rotate_90_right;
// mT_rotate_90_right << 0, -1, 0, 0, // mT_rotate_90_right << 0, -1, 0, 0,
// 1, 0, 0, 0, // 1, 0, 0, 0,
@ -76,8 +83,11 @@ void UnrealCvRenderer::render(const Transformation& T_W_C, const ImagePtr& out_i
// 0, 1, 0, 0, // 0, 1, 0, 0,
// 0, 0, 1, 0, // 0, 0, 1, 0,
// 0, 0, 0, 1; // 0, 0, 0, 1;
// const Transformation::TransformationMatrix mT_UE_UEC = mT_rotate_90_right * mT_UE_ZE * mT_ZE_C * mT_C_UEC; // const Transformation::TransformationMatrix mT_UE_UEC =
const Transformation::TransformationMatrix mT_UE_UEC = mT_UE_ZE * mT_ZE_C * mT_C_UEC; // mT_rotate_90_right
// * mT_UE_ZE * mT_ZE_C * mT_C_UEC;
const Transformation::TransformationMatrix mT_UE_UEC =
mT_UE_ZE * mT_ZE_C * mT_C_UEC;
const Transformation T_UE_UEC(mT_UE_UEC); const Transformation T_UE_UEC(mT_UE_UEC);
VLOG(1) << "T_ZE_C = " << mT_ZE_C; VLOG(1) << "T_ZE_C = " << mT_ZE_C;
@ -90,30 +100,35 @@ void UnrealCvRenderer::render(const Transformation& T_W_C, const ImagePtr& out_i
const FloatType y_offset = static_cast<FloatType>(FLAGS_y_offset); const FloatType y_offset = static_cast<FloatType>(FLAGS_y_offset);
const FloatType z_offset = static_cast<FloatType>(FLAGS_z_offset); const FloatType z_offset = static_cast<FloatType>(FLAGS_z_offset);
const FloatType x = 100. * (FLAGS_trajectory_multiplier_x * T_UE_UEC.getPosition()[0] + x_offset); const FloatType x =
const FloatType y = 100. * (FLAGS_trajectory_multiplier_y * T_UE_UEC.getPosition()[1] + y_offset); 100.
const FloatType z = 100. * (FLAGS_trajectory_multiplier_z * T_UE_UEC.getPosition()[2] + z_offset); * (FLAGS_trajectory_multiplier_x * T_UE_UEC.getPosition()[0]
+ x_offset);
const FloatType y =
100.
* (FLAGS_trajectory_multiplier_y * T_UE_UEC.getPosition()[1]
+ y_offset);
const FloatType z =
100.
* (FLAGS_trajectory_multiplier_z * T_UE_UEC.getPosition()[2]
+ z_offset);
VLOG(1) << yaw << " " << pitch << " " << roll; VLOG(1) << yaw << " " << pitch << " " << roll;
CameraData cam_data = {0, CameraData cam_data = {0, pitch, yaw, roll, x, y, z};
pitch,
yaw,
roll,
x,
y,
z};
client_->setCamera(cam_data); client_->setCamera(cam_data);
cv::Mat img = client_->getImage(0); cv::Mat img = client_->getImage(0);
VLOG(5) << "Got image from the UnrealCV client"; VLOG(5) << "Got image from the UnrealCV client";
// (optionally) save raw RGB image to the output directory // (optionally) save raw RGB image to the output directory
if(FLAGS_unrealcv_output_directory != "") if (FLAGS_unrealcv_output_directory != "") {
{
std::stringstream ss_nr; std::stringstream ss_nr;
ss_nr << std::setw(10) << std::setfill('0') << frame_idx_; ss_nr << std::setw(10) << std::setfill('0') << frame_idx_;
std::string path_frame = ze::joinPath(FLAGS_unrealcv_output_directory, "frame_" + ss_nr.str() + ".png"); std::string path_frame = ze::joinPath(
FLAGS_unrealcv_output_directory,
"frame_" + ss_nr.str() + ".png"
);
VLOG(1) << "Saving raw RGB image to: " << path_frame; VLOG(1) << "Saving raw RGB image to: " << path_frame;
cv::imwrite(path_frame, img, {cv::IMWRITE_PNG_COMPRESSION, 9}); cv::imwrite(path_frame, img, {cv::IMWRITE_PNG_COMPRESSION, 9});
} }
@ -122,17 +137,27 @@ void UnrealCvRenderer::render(const Transformation& T_W_C, const ImagePtr& out_i
cv::cvtColor(img, img_gray, cv::COLOR_BGR2GRAY); cv::cvtColor(img, img_gray, cv::COLOR_BGR2GRAY);
if (FLAGS_unrealcv_post_median_blur > 0) if (FLAGS_unrealcv_post_median_blur > 0)
{
cv::medianBlur(img_gray, img_gray, FLAGS_unrealcv_post_median_blur); cv::medianBlur(img_gray, img_gray, FLAGS_unrealcv_post_median_blur);
if (FLAGS_unrealcv_post_gaussian_blur_sigma > 0) {
cv::GaussianBlur(
img_gray,
img_gray,
cv::Size(-1, -1),
FLAGS_unrealcv_post_gaussian_blur_sigma
);
} }
if(FLAGS_unrealcv_post_gaussian_blur_sigma > 0) cv::resize(
{ img_gray,
cv::GaussianBlur(img_gray, img_gray, cv::Size(-1,-1), FLAGS_unrealcv_post_gaussian_blur_sigma); img_gray,
} cv::Size(camera_->width(), camera_->height())
);
cv::resize(img_gray, img_gray, cv::Size(camera_->width(), camera_->height())); img_gray.convertTo(
img_gray.convertTo(*out_image, cv::DataType<ImageFloatType>::type, 1./255.); *out_image,
cv::DataType<ImageFloatType>::type,
1. / 255.
);
cv::Mat depth = client_->getDepth(0); cv::Mat depth = client_->getDepth(0);
VLOG(5) << "Got depth map from the UnrealCV client"; VLOG(5) << "Got depth map from the UnrealCV client";
@ -141,20 +166,26 @@ void UnrealCvRenderer::render(const Transformation& T_W_C, const ImagePtr& out_i
cv::resize(depth, depth, cv::Size(camera_->width(), camera_->height())); cv::resize(depth, depth, cv::Size(camera_->width(), camera_->height()));
depth.convertTo(*out_depthmap, cv::DataType<ImageFloatType>::type); depth.convertTo(*out_depthmap, cv::DataType<ImageFloatType>::type);
// the depth provided by the unrealcv client is the distance from the scene to the camera center, // the depth provided by the unrealcv client is the distance from the
// we need to convert it to the distance to image plane (see Github issue: https://github.com/unrealcv/unrealcv/issues/14) // scene to the camera center, we need to convert it to the distance to
const ImageFloatType yc = 0.5 * static_cast<ImageFloatType>(camera_->height()) - 1.0; // image plane (see Github issue:
const ImageFloatType xc = 0.5 * static_cast<ImageFloatType>(camera_->width()) - 1.0; // https://github.com/unrealcv/unrealcv/issues/14)
const ImageFloatType f = static_cast<ImageFloatType>(camera_->projectionParameters()(0)); const ImageFloatType yc =
for(int y=0; y<camera_->height(); ++y) 0.5 * static_cast<ImageFloatType>(camera_->height()) - 1.0;
{ const ImageFloatType xc =
for(int x=0; x<camera_->width(); ++x) 0.5 * static_cast<ImageFloatType>(camera_->width()) - 1.0;
{ const ImageFloatType f =
static_cast<ImageFloatType>(camera_->projectionParameters()(0));
for (int y = 0; y < camera_->height(); ++y) {
for (int x = 0; x < camera_->width(); ++x) {
const ImageFloatType point_depth = (*out_depthmap)(y, x); const ImageFloatType point_depth = (*out_depthmap)(y, x);
const ImageFloatType dx = static_cast<ImageFloatType>(x) - xc; const ImageFloatType dx = static_cast<ImageFloatType>(x) - xc;
const ImageFloatType dy = static_cast<ImageFloatType>(y) - yc; const ImageFloatType dy = static_cast<ImageFloatType>(y) - yc;
const ImageFloatType distance_from_center = std::sqrt(dx*dx + dy*dy); const ImageFloatType distance_from_center =
const ImageFloatType plane_depth = point_depth / std::sqrt(1.0 + std::pow(distance_from_center / f, 2)); std::sqrt(dx * dx + dy * dy);
const ImageFloatType plane_depth =
point_depth
/ std::sqrt(1.0 + std::pow(distance_from_center / f, 2));
(*out_depthmap)(y, x) = plane_depth; (*out_depthmap)(y, x) = plane_depth;
} }
} }