commit a8c2f0ca4373818774519c7271190e0628e0d018 Author: Henri Rebecq Date: Mon Oct 29 17:53:15 2018 +0100 initial commit diff --git a/README.md b/README.md new file mode 100644 index 0000000..98a5ff1 --- /dev/null +++ b/README.md @@ -0,0 +1,19 @@ +## Features + +- Accurate event simulation, guaranteed by the tight integration between the rendering engine and the event simulator +- IMU simulation +- Support for multi-camera systems +- Ground truth camera poses, IMU biases, angular/linear velocities, depth maps, and optic flow maps +- Support for camera distortion +- Different C+/C- contrast thresholds +- Basic noise simulation for event cameras (based on additive Gaussian noise on the contrast threshold) +- Motion blur simulation +- Publish to ROS and/or save data to rosbag + +## Install + +Installation instructions can be found in [our wiki](https://github.com/uzh-rpg/rpg_event_camera_simulator/wiki/Installation). + +## Run + +Specific instructions to run the simulator depending on the chosen rendering engine can be found in [our wiki](https://github.com/uzh-rpg/rpg_event_camera_simulator/wiki). diff --git a/dependencies.yaml b/dependencies.yaml new file mode 100644 index 0000000..1a1bf76 --- /dev/null +++ b/dependencies.yaml @@ -0,0 +1,45 @@ +repositories: + catkin_simple: + type: git + url: git@github.com:catkin/catkin_simple.git + version: master + ze_oss: + type: git + url: git@github.com:uzh-rpg/ze_oss.git + version: master + gflags_catkin: + type: git + url: git@github.com:ethz-asl/gflags_catkin.git + version: master + glog_catkin: + type: git + url: git@github.com:ethz-asl/glog_catkin.git + version: master + eigen_catkin: + type: git + url: git@github.com:ethz-asl/eigen_catkin.git + version: master + eigen_checks: + type: git + url: git@github.com:ethz-asl/eigen_checks.git + version: master + minkindr: + type: git + url: git@github.com:ethz-asl/minkindr.git + version: master + minkindr_ros: + type: git + url: git@github.com:ethz-asl/minkindr_ros.git + version: master + yaml_cpp_catkin: + type: git + url: git@github.com:ethz-asl/yaml_cpp_catkin.git + version: master + rpg_dvs_ros: + type: git + url: git@github.com:uzh-rpg/rpg_dvs_ros.git + version: master + assimp_catkin: + type: git + url: git@github.com:uzh-rpg/assimp_catkin.git + version: master diff --git a/event_camera_simulator/.gitignore b/event_camera_simulator/.gitignore new file mode 100644 index 0000000..ae5035a --- /dev/null +++ b/event_camera_simulator/.gitignore @@ -0,0 +1,4 @@ +*.txt.user +esim_ros/scripts/exp_* +*.swp +*.autosave diff --git a/event_camera_simulator/esim/CMakeLists.txt b/event_camera_simulator/esim/CMakeLists.txt new file mode 100644 index 0000000..e546f82 --- /dev/null +++ b/event_camera_simulator/esim/CMakeLists.txt @@ -0,0 +1,30 @@ +cmake_minimum_required(VERSION 2.8.3) +project(esim) + +find_package(catkin_simple REQUIRED) +find_package(OpenCV REQUIRED) +catkin_simple() + +set(HEADERS + include/esim/esim/simulator.hpp + include/esim/esim/event_simulator.hpp + include/esim/esim/camera_simulator.hpp +) + +set(SOURCES + src/simulator.cpp + src/event_simulator.cpp + src/camera_simulator.cpp +) + +cs_add_library(${PROJECT_NAME} ${SOURCES} ${HEADERS}) + +########## +# GTESTS # +########## + +catkin_add_gtest(test_event_simulator test/test_event_simulator.cpp) +target_link_libraries(test_event_simulator ${PROJECT_NAME} ${OpenCV_LIBRARIES}) + +cs_install() +cs_export() diff --git a/event_camera_simulator/esim/include/esim/esim/camera_simulator.hpp b/event_camera_simulator/esim/include/esim/esim/camera_simulator.hpp new file mode 100644 index 0000000..f601b98 --- /dev/null +++ b/event_camera_simulator/esim/include/esim/esim/camera_simulator.hpp @@ -0,0 +1,72 @@ +#pragma once + +#include +#include +#include + +namespace event_camera_simulator { + +class ImageBuffer +{ +public: + ZE_POINTER_TYPEDEFS(ImageBuffer); + + struct ImageData + { + ImageData(Image img, Time stamp, Duration exposure_time) + : image(img), + stamp(stamp), + exposure_time(exposure_time) {} + + Image image; + Time stamp; + Duration exposure_time; // timestamp since last image (0 if this is the first image) + }; + + using ExposureImage = std::pair; + + // Rolling image buffer of mazimum size 'buffer_size_ns'. + ImageBuffer(Duration buffer_size_ns) + : buffer_size_ns_(buffer_size_ns) {} + + void addImage(Time t, const Image& img); + + std::deque getRawBuffer() const { return data_; } + + size_t size() const { return data_.size(); } + + Duration getExposureTime() const { return buffer_size_ns_; } + +private: + Duration buffer_size_ns_; + std::deque data_; +}; + + +/* + * The CameraSimulator takes as input a sequence of stamped images, + * assumed to be sampled at a "sufficiently high" framerate and with + * floating-point precision, and treats each image as a measure of + * irradiance. + * From this, it simulates a real camera, including motion blur. + * + * @TODO: simulate a non-linear camera response curve, shot noise, etc. + */ +class CameraSimulator +{ +public: + CameraSimulator(double exposure_time_ms) + : exposure_time_(ze::secToNanosec(exposure_time_ms / 1000.0)) + { + buffer_.reset(new ImageBuffer(exposure_time_)); + } + + bool imageCallback(const Image& img, Time time, + const ImagePtr &camera_image); + +private: + ImageBuffer::Ptr buffer_; + const Duration exposure_time_; +}; + +} // namespace event_camera_simulator diff --git a/event_camera_simulator/esim/include/esim/esim/event_simulator.hpp b/event_camera_simulator/esim/include/esim/esim/event_simulator.hpp new file mode 100644 index 0000000..fc8ad03 --- /dev/null +++ b/event_camera_simulator/esim/include/esim/esim/event_simulator.hpp @@ -0,0 +1,54 @@ +#pragma once + +#include + +namespace event_camera_simulator { + +/* + * The EventSimulator takes as input a sequence of stamped images, + * assumed to be sampled at a "sufficiently high" framerate, + * and simulates the principle of operation of an idea event camera + * with a constant contrast threshold C. + * Pixel-wise intensity values are linearly interpolated in time. + * + * The pixel-wise voltages are reset with the values from the first image + * which is passed to the simulator. + */ +class EventSimulator +{ +public: + + struct Config + { + double Cp; + double Cm; + double sigma_Cp; + double sigma_Cm; + Duration refractory_period_ns; + bool use_log_image; + double log_eps; + }; + + using TimestampImage = cv::Mat_; + + EventSimulator(const Config& config) + : config_(config), + is_initialized_(false), + current_time_(0) + {} + + void init(const Image& img, Time time); + Events imageCallback(const Image& img, Time time); + +private: + bool is_initialized_; + Time current_time_; + Image ref_values_; + Image last_img_; + TimestampImage last_event_timestamp_; + cv::Size size_; + + Config config_; +}; + +} // namespace event_camera_simulator diff --git a/event_camera_simulator/esim/include/esim/esim/simulator.hpp b/event_camera_simulator/esim/include/esim/esim/simulator.hpp new file mode 100644 index 0000000..d18a63e --- /dev/null +++ b/event_camera_simulator/esim/include/esim/esim/simulator.hpp @@ -0,0 +1,59 @@ +#pragma once + +#include +#include +#include + +namespace event_camera_simulator { + +/* The Simulator forwards the simulated images / depth maps + * from the data provider to multiple, specialized camera simulators, such as: + * (i) event camera simulators that simulate events based on sequences of images + * (ii) camera simulators that simulate real cameras + * (including motion blur, camera response function, noise, etc.) + * + * The Simulator then forwards the simulated data to one or more publishers. +*/ +class Simulator +{ +public: + Simulator(size_t num_cameras, + const EventSimulator::Config& event_sim_config, + double exposure_time_ms) + : num_cameras_(num_cameras), + exposure_time_(ze::millisecToNanosec(exposure_time_ms)) + { + for(size_t i=0; i event_simulators_; + + std::vector camera_simulators_; + Duration exposure_time_; + + std::vector publishers_; + + ImagePtrVector corrupted_camera_images_; +}; + +} // namespace event_camera_simulator diff --git a/event_camera_simulator/esim/package.xml b/event_camera_simulator/esim/package.xml new file mode 100644 index 0000000..e80d852 --- /dev/null +++ b/event_camera_simulator/esim/package.xml @@ -0,0 +1,20 @@ + + + esim + 0.0.0 + Event camera simulator, which can simulate events from a stream of images samplet at high frequency. + + Henri Rebecq + + GPLv3 + + catkin + catkin_simple + + esim_common + esim_data_provider + esim_visualization + gflags_catkin + glog_catkin + + diff --git a/event_camera_simulator/esim/src/camera_simulator.cpp b/event_camera_simulator/esim/src/camera_simulator.cpp new file mode 100644 index 0000000..26c0363 --- /dev/null +++ b/event_camera_simulator/esim/src/camera_simulator.cpp @@ -0,0 +1,62 @@ +#include + +namespace event_camera_simulator { + +void ImageBuffer::addImage(Time t, const Image& img) +{ + if(!data_.empty()) + { + // Check that the image timestamps are monotonically increasing + CHECK_GT(t, data_.back().stamp); + } + + Duration exposure_time = data_.empty() ? 0 : t - data_.back().stamp; + VLOG(2) << "Adding image to buffer with stamp: " << t + << " and exposure time " << exposure_time; + data_.push_back(ImageData(img.clone(), t, exposure_time)); + + // Remove all the images with timestamp older than t - buffer_size_ns_ + auto first_valid_element = std::lower_bound(data_.begin(), data_.end(), t - buffer_size_ns_, + [](ImageData lhs, Time rhs) -> bool { return lhs.stamp < rhs; }); + + data_.erase(data_.begin(), first_valid_element); + VLOG(3) << "first/last element in buffer: " + << data_.front().stamp + << " " << data_.back().stamp; + VLOG(3) << "number of images in the buffer: " << data_.size(); + + CHECK_LE(data_.back().stamp - data_.front().stamp, buffer_size_ns_); +} + + +bool CameraSimulator::imageCallback(const Image &img, Time time, + const ImagePtr& camera_image) +{ + CHECK(camera_image); + CHECK_EQ(camera_image->size(), img.size()); + + buffer_->addImage(time, img); + + static const Time initial_time = time; + if(time - initial_time < exposure_time_) + { + LOG_FIRST_N(WARNING, 1) << "The images do not cover a time span long enough to simulate the exposure time accurately."; + return false; + } + + // average all the images in the buffer to simulate motion blur + camera_image->setTo(0); + ze::real_t denom = 0.; + for(const ImageBuffer::ImageData& img : buffer_->getRawBuffer()) + { + *camera_image += ze::nanosecToMillisecTrunc(img.exposure_time) * img.image; + denom += ze::nanosecToMillisecTrunc(img.exposure_time); + } + *camera_image /= denom; + cv::Mat disp; + camera_image->convertTo(disp, CV_8U, 255); + + return true; +} + +} // namespace event_camera_simulator diff --git a/event_camera_simulator/esim/src/event_simulator.cpp b/event_camera_simulator/esim/src/event_simulator.cpp new file mode 100644 index 0000000..fa81286 --- /dev/null +++ b/event_camera_simulator/esim/src/event_simulator.cpp @@ -0,0 +1,117 @@ +#include +#include +#include +#include +#include + +namespace event_camera_simulator { + +void EventSimulator::init(const Image &img, Time time) +{ + VLOG(1) << "Initialized event camera simulator with sensor size: " << img.size(); + VLOG(1) << "and contrast thresholds: C+ = " << config_.Cp << " , C- = " << config_.Cm; + is_initialized_ = true; + last_img_ = img.clone(); + ref_values_ = img.clone(); + last_event_timestamp_ = TimestampImage::zeros(img.size()); + current_time_ = time; + size_ = img.size(); +} + +Events EventSimulator::imageCallback(const Image& img, Time time) +{ + CHECK_GE(time, 0); + Image preprocessed_img = img.clone(); + if(config_.use_log_image) + { + LOG_FIRST_N(INFO, 1) << "Converting the image to log image with eps = " << config_.log_eps << "."; + cv::log(config_.log_eps + img, preprocessed_img); + } + + if(!is_initialized_) + { + init(preprocessed_img, time); + return {}; + } + + // For each pixel, check if new events need to be generated since the last image sample + static constexpr ImageFloatType tolerance = 1e-6; + Events events; + Duration delta_t_ns = time - current_time_; + + CHECK_GT(delta_t_ns, 0u); + CHECK_EQ(img.size(), size_); + + for (int y = 0; y < size_.height; ++y) + { + for (int x = 0; x < size_.width; ++x) + { + ImageFloatType itdt = preprocessed_img(y, x); + ImageFloatType it = last_img_(y, x); + ImageFloatType prev_cross = ref_values_(y, x); + + if (std::fabs (it - itdt) > tolerance) + { + ImageFloatType pol = (itdt >= it) ? +1.0 : -1.0; + ImageFloatType C = (pol > 0) ? config_.Cp : config_.Cm; + ImageFloatType sigma_C = (pol > 0) ? config_.sigma_Cp : config_.sigma_Cm; + if(sigma_C > 0) + { + C += ze::sampleNormalDistribution(false, 0, sigma_C); + } + ImageFloatType curr_cross = prev_cross; + bool all_crossings = false; + + do + { + curr_cross += pol * C; + + if ((pol > 0 && curr_cross > it && curr_cross <= itdt) + || (pol < 0 && curr_cross < it && curr_cross >= itdt)) + { + Duration edt = (curr_cross - it) * delta_t_ns / (itdt - it); + Time t = current_time_ + edt; + + // check that pixel (x,y) is not currently in a "refractory" state + // i.e. |t-that last_timestamp(x,y)| >= refractory_period + const Time last_stamp_at_xy = ze::secToNanosec(last_event_timestamp_(y,x)); + CHECK_GE(t, last_stamp_at_xy); + const Duration dt = t - last_stamp_at_xy; + if(last_event_timestamp_(y,x) == 0 || dt >= config_.refractory_period_ns) + { + events.push_back(Event(x, y, t, pol > 0)); + last_event_timestamp_(y,x) = ze::nanosecToSecTrunc(t); + } + else + { + VLOG(3) << "Dropping event because time since last event (" + << dt << " ns) < refractory period (" + << config_.refractory_period_ns << " ns)."; + } + ref_values_(y,x) = curr_cross; + } + else + { + all_crossings = true; + } + } while (!all_crossings); + } // end tolerance + } // end for each pixel + } + + // update simvars for next loop + current_time_ = time; + last_img_ = preprocessed_img.clone(); // it is now the latest image + + // Sort the events by increasing timestamps, since this is what + // most event processing algorithms expect + sort(events.begin(), events.end(), + [](const Event& a, const Event& b) -> bool + { + return a.t < b.t; + }); + + return events; +} + +} // namespace event_camera_simulator diff --git a/event_camera_simulator/esim/src/simulator.cpp b/event_camera_simulator/esim/src/simulator.cpp new file mode 100644 index 0000000..9b95093 --- /dev/null +++ b/event_camera_simulator/esim/src/simulator.cpp @@ -0,0 +1,137 @@ +#include +#include +#include + +namespace event_camera_simulator { + +DECLARE_TIMER(TimerEventSimulator, timers_event_simulator_, + simulate_events, + visualization + ); + +Simulator::~Simulator() +{ + timers_event_simulator_.saveToFile("/tmp", "event_simulator.csv"); +} + +void Simulator::dataProviderCallback(const SimulatorData &sim_data) +{ + CHECK_EQ(event_simulators_.size(), num_cameras_); + + if(sim_data.images_updated) + { + EventsVector events(num_cameras_); + Time time = sim_data.timestamp; + // simulate the events and camera images for every sensor in the rig + { + auto t = timers_event_simulator_[TimerEventSimulator::simulate_events].timeScope(); + for(size_t i=0; i(sim_data.images[i]->size())); + corrupted_camera_images_[i]->setTo(0.); + } + + camera_simulators_[i].imageCallback(*sim_data.images[i], time, corrupted_camera_images_[i]); + } + } + + // publish the simulation data + events + { + auto t = timers_event_simulator_[TimerEventSimulator::visualization].timeScope(); + publishData(sim_data, events, corrupted_camera_images_); + } + } + else + { + { + // just forward the simulation data to the publisher + auto t = timers_event_simulator_[TimerEventSimulator::visualization].timeScope(); + publishData(sim_data, {}, corrupted_camera_images_); + } + } +} + +void Simulator::publishData(const SimulatorData& sim_data, + const EventsVector& events, + const ImagePtrVector& camera_images) +{ + if(publishers_.empty()) + { + LOG_FIRST_N(WARNING, 1) << "No publisher available"; + return; + } + + Time time = sim_data.timestamp; + const Transformation& T_W_B = sim_data.groundtruth.T_W_B; + const TransformationVector& T_W_Cs = sim_data.groundtruth.T_W_Cs; + const ze::CameraRig::Ptr& camera_rig = sim_data.camera_rig; + + // Publish the new data (events, images, depth maps, poses, point clouds, etc.) + if(!events.empty()) + { + for(const Publisher::Ptr& publisher : publishers_) + publisher->eventsCallback(events); + } + if(sim_data.poses_updated) + { + for(const Publisher::Ptr& publisher : publishers_) + publisher->poseCallback(T_W_B, T_W_Cs, time); + } + if(sim_data.twists_updated) + { + for(const Publisher::Ptr& publisher : publishers_) + publisher->twistCallback(sim_data.groundtruth.angular_velocities_, + sim_data.groundtruth.linear_velocities_, + time); + } + if(sim_data.imu_updated) + { + for(const Publisher::Ptr& publisher : publishers_) + publisher->imuCallback(sim_data.specific_force_corrupted, sim_data.angular_velocity_corrupted, time); + } + + if(camera_rig) + { + for(const Publisher::Ptr& publisher : publishers_) + publisher->cameraInfoCallback(camera_rig, time); + } + if(sim_data.images_updated) + { + for(const Publisher::Ptr& publisher : publishers_) + { + publisher->imageCallback(sim_data.images, time); + + // the images should be timestamped at mid-exposure (unless it is not possible) + const Time mid_exposure_time = (time >= 0.5 * exposure_time_) ? time - 0.5 * exposure_time_ : time; + publisher->imageCorruptedCallback(camera_images, mid_exposure_time); + } + } + if(sim_data.depthmaps_updated) + { + for(const Publisher::Ptr& publisher : publishers_) + publisher->depthmapCallback(sim_data.depthmaps, time); + } + if(sim_data.optic_flows_updated) + { + for(const Publisher::Ptr& publisher : publishers_) + publisher->opticFlowCallback(sim_data.optic_flows, time); + } + if(sim_data.depthmaps_updated && !events.empty()) + { + PointCloudVector pointclouds(num_cameras_); + for(size_t i=0; iatShared(i)); + } + for(const Publisher::Ptr& publisher : publishers_) + publisher->pointcloudCallback(pointclouds, time); + } +} + +} // namespace event_camera_simulator diff --git a/event_camera_simulator/esim/test/test_event_simulator.cpp b/event_camera_simulator/esim/test/test_event_simulator.cpp new file mode 100644 index 0000000..0137140 --- /dev/null +++ b/event_camera_simulator/esim/test/test_event_simulator.cpp @@ -0,0 +1,237 @@ +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#define USE_OPENCV + +namespace event_camera_simulator +{ + +class CSVImageLoader +{ +public: + CSVImageLoader(const std::string& path_to_data_folder) + : path_to_data_folder_(path_to_data_folder) + { + images_in_str_.open(ze::joinPath(path_to_data_folder, "images.csv")); + CHECK(images_in_str_.is_open()); + } + + bool next(int64_t& stamp, Image& img) + { + std::string line; + if(!getline(images_in_str_, line)) + { + LOG(INFO) << "Finished reading all the images in the folder"; + return false; + } + + if('%' != line.at(0) && '#' != line.at(0)) + { + std::vector items = ze::splitString(line, delimiter_); + stamp = std::stoll(items[0]); + + const std::string& path_to_image + = ze::joinPath(path_to_data_folder_, "frame", "cam_0", items[1]); + + img = cv::imread(path_to_image, 0); + CHECK(img.data) << "Error loading image: " << path_to_image; + + return true; + } + else + { + return next(stamp, img); + } + } + +private: + std::ifstream images_in_str_; + const char delimiter_{','}; + std::string path_to_data_folder_; +}; + +} // namespace event_camera_simulator + +std::string getTestDataDir(const std::string& dataset_name) +{ + using namespace ze; + + const char* datapath_dir = std::getenv("ESIM_TEST_DATA_PATH"); + CHECK(datapath_dir != nullptr) + << "Did you download the esim_test_data repository and set " + << "the ESIM_TEST_DATA_PATH environment variable?"; + + std::string path(datapath_dir); + CHECK(isDir(path)) << "Folder does not exist: " << path; + path = path + "/data/" + dataset_name; + CHECK(isDir(path)) << "Dataset does not exist: " << path; + return path; +} + +TEST(EventSimulator, testImageReconstruction) +{ + using namespace event_camera_simulator; + + // Load image sequence from folder + const std::string path_to_data_folder = getTestDataDir("planar_carpet"); + CSVImageLoader reader(path_to_data_folder); + + EventSimulator::Config event_sim_config; + event_sim_config.Cp = 0.05; + event_sim_config.Cm = 0.03; + event_sim_config.sigma_Cp = 0; + event_sim_config.sigma_Cm = 0; + event_sim_config.use_log_image = true; + event_sim_config.log_eps = 0.001; + EventSimulator simulator(event_sim_config); + + LOG(INFO) << "Testing event camera simulator with C+ = " << event_sim_config.Cp + << ", C- = " << event_sim_config.Cm; + + const ImageFloatType max_reconstruction_error + = std::max(event_sim_config.Cp, event_sim_config.Cm); + + bool is_first_image = true; + Image I, L, L_reconstructed; + int64_t stamp; + while(reader.next(stamp, I)) + { + I.convertTo(I, cv::DataType::type, 1./255.); + cv::log(event_sim_config.log_eps + I, L); + + if(is_first_image) + { + // Initialize reconstructed image from the ground truth image + L_reconstructed = L.clone(); + is_first_image = false; + } + + Events events = simulator.imageCallback(I, stamp); + + // Reconstruct next image from previous one using the events in between + for(const Event& e : events) + { + ImageFloatType pol = e.pol ? 1. : -1.; + const ImageFloatType C = e.pol ? event_sim_config.Cp : event_sim_config.Cm; + L_reconstructed(e.y,e.x) += pol * C; + } + + // Check that the reconstruction error is bounded by the contrast thresholds + for(int y=0; y::type, 1./255.); + cv::log(event_sim_config.log_eps + I, L); + + if(is_first_image) + { + // Initialize reconstructed image from the ground truth image + L_reconstructed = L.clone(); + is_first_image = false; + } + + Events events = simulator.imageCallback(I, stamp); + + // Reconstruct next image from previous one using the events in between + for(const Event& e : events) + { + ImageFloatType pol = e.pol ? 1. : -1.; + ImageFloatType C = e.pol ? event_sim_config.Cp : event_sim_config.Cm; + C += contrast_bias; + L_reconstructed(e.y,e.x) += pol * C; + } + + // Compute the mean and average reconstruction error over the whole image + Image error; + cv::absdiff(L, L_reconstructed, error); + cv::Scalar mean_error, stddev_error; + cv::meanStdDev(error, mean_error, stddev_error); + VLOG(1) << "Mean error: " << mean_error + << ", Stddev: " << stddev_error; + + times.push_back(ze::nanosecToSecTrunc(stamp)); + mean_errors.push_back(mean_error[0]); + stddev_errors.push_back(stddev_error[0]); + +#ifdef USE_OPENCV + const ImageFloatType vmin = std::log(event_sim_config.log_eps); + const ImageFloatType vmax = std::log(1.0 + event_sim_config.log_eps); + cv::Mat disp = 255.0 * (L_reconstructed - vmin) / (vmax - vmin); + disp.convertTo(disp, CV_8U); + cv::imshow("Reconstructed", disp); + cv::waitKey(1); +#endif + } + + // Plot the mean and stddev reconstruction error over time + ze::plt::plot(times, mean_errors, "r"); + ze::plt::plot(times, stddev_errors, "b--"); + std::stringstream title; + title << "C = " << event_sim_config.Cp + << ", sigma = " << event_sim_config.sigma_Cp + << ", bias = " << contrast_bias; + ze::plt::title(title.str()); + ze::plt::xlabel("Time (s)"); + ze::plt::ylabel("Mean / Stddev reconstruction error"); + ze::plt::grid(true); + ze::plt::save("/tmp/evolution_reconstruction_error.pdf"); + ze::plt::show(); +} + +ZE_UNITTEST_ENTRYPOINT diff --git a/event_camera_simulator/esim_common/CMakeLists.txt b/event_camera_simulator/esim_common/CMakeLists.txt new file mode 100644 index 0000000..9a3b486 --- /dev/null +++ b/event_camera_simulator/esim_common/CMakeLists.txt @@ -0,0 +1,28 @@ +cmake_minimum_required(VERSION 2.8.3) +project(esim_common) + +find_package(catkin_simple REQUIRED) +find_package(OpenCV REQUIRED) +include_directories(${OpenCV_INCLUDE_DIRS}) +catkin_simple() + +set(HEADERS + include/esim/common/types.hpp + include/esim/common/utils.hpp +) + +set(SOURCES + src/utils.cpp +) + +cs_add_library(${PROJECT_NAME} ${SOURCES} ${HEADERS}) + +########## +# GTESTS # +########## + +catkin_add_gtest(test_utils test/test_utils.cpp) +target_link_libraries(test_utils ${PROJECT_NAME} ${OpenCV_LIBRARIES}) + +cs_install() +cs_export() diff --git a/event_camera_simulator/esim_common/include/esim/common/types.hpp b/event_camera_simulator/esim_common/include/esim/common/types.hpp new file mode 100644 index 0000000..e174326 --- /dev/null +++ b/event_camera_simulator/esim_common/include/esim/common/types.hpp @@ -0,0 +1,160 @@ +#pragma once + +#include +#include +#include +#include + +// FloatType defines the floating point accuracy (single or double precision) +// for the geometric operations (computing rotation matrices, point projection, etc.). +// This should typically be double precision (highest accuracy). +#define FloatType ze::real_t + +// ImageFloatType defines the floating point accuracy (single or double precision) +// of the intensity images (and depth images). +// Single precision should be enough there in most cases. +#define ImageFloatType float + +namespace event_camera_simulator { + +using Translation = ze::Position; +using Vector3 = ze::Vector3; +using Vector4 = ze::Vector4; +using Vector3i = Eigen::Vector3i; + +using Transformation = ze::Transformation; +using TransformationVector = ze::TransformationVector; +using TransformationPtr = std::shared_ptr; + +using Normal = ze::Vector3; +using CalibrationMatrix = ze::Matrix3; +using RotationMatrix = ze::Matrix3; +using HomographyMatrix = ze::Matrix3; + +using AngularVelocity = ze::Vector3; +using LinearVelocity = ze::Vector3; +using AngularVelocityVector = std::vector; +using LinearVelocityVector = std::vector; + +using uint16_t = ze::uint16_t; + +using Time = ze::int64_t; +using Duration = ze::uint64_t; +using Image = cv::Mat_; +using ImagePtr = std::shared_ptr; +using Depthmap = cv::Mat_; +using OpticFlow = cv::Mat_< cv::Vec >; +using OpticFlowPtr = std::shared_ptr; +using DepthmapPtr = std::shared_ptr; + +using ImagePtrVector = std::vector; +using DepthmapPtrVector = std::vector; +using OpticFlowPtrVector = std::vector; + +using Camera = ze::Camera; + +struct Event +{ + Event(uint16_t x, uint16_t y, Time t, bool pol) + : x(x), + y(y), + t(t), + pol(pol) + { + + } + + uint16_t x; + uint16_t y; + Time t; + bool pol; +}; + +using Events = std::vector; +using EventsVector = std::vector; +using EventsPtr = std::shared_ptr; + +struct PointXYZRGB +{ + PointXYZRGB(FloatType x, FloatType y, FloatType z, + int red, int green, int blue) + : xyz(x, y, z), + rgb(red, green, blue) {} + + PointXYZRGB(const Vector3& xyz) + : xyz(xyz) {} + + PointXYZRGB(const Vector3& xyz, const Vector3i& rgb) + : xyz(xyz), + rgb(rgb) {} + + Vector3 xyz; + Vector3i rgb; +}; + +using PointCloud = std::vector; +using PointCloudVector = std::vector; + +struct SimulatorData +{ + EIGEN_MAKE_ALIGNED_OPERATOR_NEW + + //! Nanosecond timestamp. + Time timestamp; + + //! Camera images. + ImagePtrVector images; + + //! Depth maps. + DepthmapPtrVector depthmaps; + + //! Optic flow maps. + OpticFlowPtrVector optic_flows; + + //! Camera + ze::CameraRig::Ptr camera_rig; + + //! An accelerometer measures the specific force (incl. gravity), + //! corrupted by noise and bias. + Vector3 specific_force_corrupted; + + //! The angular velocity (in the body frame) corrupted by noise and bias. + Vector3 angular_velocity_corrupted; + + //! Groundtruth states. + struct Groundtruth + { + EIGEN_MAKE_ALIGNED_OPERATOR_NEW + + //! Pose of the body (i.e. the IMU) expressed in the world frame. + Transformation T_W_B; + + //! Accelerometer and gyro bias + Vector3 acc_bias; + Vector3 gyr_bias; + + //! Poses of the cameras in the rig expressed in the world frame. + TransformationVector T_W_Cs; + + //! Linear and angular velocities (i.e. twists) of the cameras in the rig, + //! expressed in each camera's local coordinate frame. + LinearVelocityVector linear_velocities_; + AngularVelocityVector angular_velocities_; + + // dynamic objects + std::vector T_W_OBJ_; + std::vector linear_velocity_obj_; + std::vector angular_velocity_obj_; + }; + Groundtruth groundtruth; + + // Flags to indicate whether a value has been updated or not + bool images_updated; + bool depthmaps_updated; + bool optic_flows_updated; + bool twists_updated; + bool poses_updated; + bool imu_updated; +}; + +} // namespace event_camera_simulator diff --git a/event_camera_simulator/esim_common/include/esim/common/utils.hpp b/event_camera_simulator/esim_common/include/esim/common/utils.hpp new file mode 100644 index 0000000..d89e7be --- /dev/null +++ b/event_camera_simulator/esim_common/include/esim/common/utils.hpp @@ -0,0 +1,59 @@ +#pragma once + +#include + +namespace ze { + class Camera; +} + +namespace event_camera_simulator { + +inline double degToRad(double deg) +{ + return deg * CV_PI / 180.0; +} + +inline double hfovToFocalLength(double hfov_deg, int W) +{ + return 0.5 * static_cast(W) / std::tan(0.5 * degToRad(hfov_deg)); +} + +CalibrationMatrix calibrationMatrixFromCamera(const Camera::Ptr& camera); + +PointCloud eventsToPointCloud(const Events& events, const Depthmap& depthmap, const ze::Camera::Ptr& camera); + +FloatType maxMagnitudeOpticFlow(const OpticFlowPtr& flow); + +FloatType maxPredictedAbsBrightnessChange(const ImagePtr& I, const OpticFlowPtr& flow); + +void gaussianBlur(ImagePtr& I, FloatType sigma); + +// Helper class to compute optic flow from a twist vector and depth map +// Precomputes a lookup table for pixel -> bearing vector correspondences +// to accelerate the computation +class OpticFlowHelper +{ +public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW + + ZE_POINTER_TYPEDEFS(OpticFlowHelper); + + OpticFlowHelper(const ze::Camera::Ptr& camera); + + void computeFromDepthAndTwist(const ze::Vector3& w_WC, const ze::Vector3& v_WC, + const DepthmapPtr& depthmap, OpticFlowPtr& flow); + + void computeFromDepthCamTwistAndObjDepthAndTwist(const ze::Vector3& w_WC, const ze::Vector3& v_WC, const DepthmapPtr& depthmap, + const ze::Vector3& r_COBJ, const ze::Vector3& w_WOBJ, const ze::Vector3& v_WOBJ, + OpticFlowPtr& flow); + +private: + + void precomputePixelToBearingLookupTable(); + + ze::Camera::Ptr camera_; + + // Precomputed lookup table from keypoint -> bearing vector + ze::Bearings bearings_C_; +}; +} // namespace event_camera_simulator diff --git a/event_camera_simulator/esim_common/package.xml b/event_camera_simulator/esim_common/package.xml new file mode 100644 index 0000000..b78de16 --- /dev/null +++ b/event_camera_simulator/esim_common/package.xml @@ -0,0 +1,19 @@ + + + esim_common + 0.0.0 + Common data types and utils for the event camera simulator. + + Henri Rebecq + + GPLv3 + + catkin + catkin_simple + + ze_common + ze_cameras + gflags_catkin + glog_catkin + + diff --git a/event_camera_simulator/esim_common/src/utils.cpp b/event_camera_simulator/esim_common/src/utils.cpp new file mode 100644 index 0000000..7b28edc --- /dev/null +++ b/event_camera_simulator/esim_common/src/utils.cpp @@ -0,0 +1,190 @@ +#include +#include +#include + +namespace event_camera_simulator { + +OpticFlowHelper::OpticFlowHelper(const ze::Camera::Ptr& camera) + : camera_(camera) +{ + CHECK(camera_); + precomputePixelToBearingLookupTable(); +} + +void OpticFlowHelper::precomputePixelToBearingLookupTable() +{ + // points_C is a matrix containing the coordinates of each pixel coordinate in the image plane + ze::Keypoints points_C(2, camera_->width() * camera_->height()); + for(int y=0; yheight(); ++y) + { + for(int x=0; xwidth(); ++x) + { + points_C.col(x + camera_->width() * y) = ze::Keypoint(x,y); + } + } + bearings_C_ = camera_->backProjectVectorized(points_C); + bearings_C_.array().rowwise() /= bearings_C_.row(2).array(); +} + +void OpticFlowHelper::computeFromDepthAndTwist(const ze::Vector3& w_WC, const ze::Vector3& v_WC, + const DepthmapPtr& depthmap, OpticFlowPtr& flow) +{ + CHECK(depthmap); + CHECK_EQ(depthmap->rows, camera_->height()); + CHECK_EQ(depthmap->cols, camera_->width()); + CHECK(depthmap->isContinuous()); + + const ze::Vector3 w_CW = -w_WC; // rotation speed of the world wrt the camera + const ze::Vector3 v_CW = -v_WC; // speed of the world wrt the camera + const ze::Matrix33 R_CW = ze::skewSymmetric(w_CW); + + Eigen::Map> depths(depthmap->ptr(), 1, depthmap->rows * depthmap->cols); + ze::Positions Xs = bearings_C_; + Xs.array().rowwise() *= depths.cast().array(); + + ze::Matrix6X dproject_dX = + camera_->dProject_dLandmarkVectorized(Xs); + + for(int y=0; yheight(); ++y) + { + for(int x=0; xwidth(); ++x) + { + const Vector3 X = Xs.col(x + camera_->width() * y); + ze::Matrix31 dXdt = R_CW * X + v_CW; + ze::Vector2 flow_vec + = Eigen::Map(dproject_dX.col(x + camera_->width() * y).data()) * dXdt; + + (*flow)(y,x) = cv::Vec(flow_vec(0), flow_vec(1)); + } + } +} + +void OpticFlowHelper::computeFromDepthCamTwistAndObjDepthAndTwist(const ze::Vector3& w_WC, const ze::Vector3& v_WC, const DepthmapPtr& depthmap, + const ze::Vector3& r_COBJ, const ze::Vector3& w_WOBJ, const ze::Vector3& v_WOBJ, OpticFlowPtr& flow) +{ + CHECK(depthmap); + CHECK_EQ(depthmap->rows, camera_->height()); + CHECK_EQ(depthmap->cols, camera_->width()); + CHECK(depthmap->isContinuous()); + + const ze::Matrix33 w_WC_tilde = ze::skewSymmetric(w_WC); + const ze::Matrix33 w_WOBJ_tilde = ze::skewSymmetric(w_WOBJ); + + Eigen::Map> depths(depthmap->ptr(), 1, depthmap->rows * depthmap->cols); + ze::Positions Xs = bearings_C_; + Xs.array().rowwise() *= depths.cast().array(); + + ze::Matrix6X dproject_dX = + camera_->dProject_dLandmarkVectorized(Xs); + + for(int y=0; yheight(); ++y) + { + for(int x=0; xwidth(); ++x) + { + const Vector3 r_CX = Xs.col(x + camera_->width() * y); + const Vector3 r_OBJX = r_CX - r_COBJ; + + ze::Matrix31 dXdt = v_WOBJ - v_WC - w_WC_tilde*r_CX + w_WOBJ_tilde*r_OBJX; + ze::Vector2 flow_vec + = Eigen::Map(dproject_dX.col(x + camera_->width() * y).data()) * dXdt; + + (*flow)(y,x) = cv::Vec(flow_vec(0), flow_vec(1)); + } + } +} + +FloatType maxMagnitudeOpticFlow(const OpticFlowPtr& flow) +{ + CHECK(flow); + FloatType max_squared_magnitude = 0; + for(int y=0; yrows; ++y) + { + for(int x=0; xcols; ++x) + { + const FloatType squared_magnitude = cv::norm((*flow)(y,x), cv::NORM_L2SQR); + if(squared_magnitude > max_squared_magnitude) + { + max_squared_magnitude = squared_magnitude; + } + } + } + return std::sqrt(max_squared_magnitude); +} + +FloatType maxPredictedAbsBrightnessChange(const ImagePtr& I, const OpticFlowPtr& flow) +{ + const size_t height = I->rows; + const size_t width = I->cols; + + Image Ix, Iy; // horizontal/vertical gradients of I + // the factor 1/8 accounts for the scaling introduced by the Sobel filter mask + //cv::Sobel(*I, Ix, cv::DataType::type, 1, 0, 3, 1./8.); + //cv::Sobel(*I, Iy, cv::DataType::type, 0, 1, 3, 1./8.); + + // the factor 1/32 accounts for the scaling introduced by the Scharr filter mask + cv::Scharr(*I, Ix, cv::DataType::type, 1, 0, 1./32.); + cv::Scharr(*I, Iy, cv::DataType::type, 0, 1, 1./32.); + + Image Lx, Ly; // horizontal/vertical gradients of log(I). d(logI)/dx = 1/I * dI/dx + static const ImageFloatType eps = 1e-3; // small additive term to avoid problems at I=0 + cv::divide(Ix, *I+eps, Lx); + cv::divide(Iy, *I+eps, Ly); + + Image dLdt(height, width); + for(int y=0; y + const ImageFloatType dLdt_at_xy = + Lx(y,x) * (*flow)(y,x)[0] + + Ly(y,x) * (*flow)(y,x)[1]; // "-" sign ignored since we are interested in the absolute value... + dLdt(y,x) = std::fabs(dLdt_at_xy); + } + } + double min_dLdt, max_dLdt; + int min_idx, max_idx; + cv::minMaxIdx(dLdt, &min_dLdt, &max_dLdt, + &min_idx, &max_idx); + + return static_cast(max_dLdt); +} + +void gaussianBlur(ImagePtr& I, FloatType sigma) +{ + cv::GaussianBlur(*I, *I, cv::Size(15,15), sigma, sigma); +} + +CalibrationMatrix calibrationMatrixFromCamera(const Camera::Ptr& camera) +{ + CHECK(camera); + const ze::VectorX params = camera->projectionParameters(); + CalibrationMatrix K; + K << params(0), 0, params(2), + 0, params(1), params(3), + 0, 0, 1; + return K; +} + +PointCloud eventsToPointCloud(const Events& events, const Depthmap& depthmap, const ze::Camera::Ptr& camera) +{ + PointCloud pcl_camera; + for(const Event& ev : events) + { + Vector3 X_c = camera->backProject(ze::Keypoint(ev.x,ev.y)); + X_c[0] /= X_c[2]; + X_c[1] /= X_c[2]; + X_c[2] = 1.; + const ImageFloatType z = depthmap(ev.y,ev.x); + Vector3 P_c = z * X_c; + Vector3i rgb; + static const Vector3i red(255, 0, 0); + static const Vector3i blue(0, 0, 255); + rgb = (ev.pol) ? blue : red; + PointXYZRGB P_c_intensity(P_c, rgb); + pcl_camera.push_back(P_c_intensity); + } + return pcl_camera; +} + +} // namespace event_camera_simulator diff --git a/event_camera_simulator/esim_common/test/test_utils.cpp b/event_camera_simulator/esim_common/test/test_utils.cpp new file mode 100644 index 0000000..0f744d5 --- /dev/null +++ b/event_camera_simulator/esim_common/test/test_utils.cpp @@ -0,0 +1,213 @@ +#include +#include +#include +#include +#include +#include +#include + +std::string getTestDataDir(const std::string& dataset_name) +{ + using namespace ze; + + const char* datapath_dir = std::getenv("ESIM_TEST_DATA_PATH"); + CHECK(datapath_dir != nullptr) + << "Did you download the esim_test_data repository and set " + << "the ESIM_TEST_DATA_PATH environment variable?"; + + std::string path(datapath_dir); + CHECK(isDir(path)) << "Folder does not exist: " << path; + path = path + "/data/" + dataset_name; + CHECK(isDir(path)) << "Dataset does not exist: " << path; + return path; +} + +namespace event_camera_simulator { + +void computeOpticFlowFiniteDifference(const ze::Camera::Ptr& camera, + const ze::Vector3& angular_velocity, + const ze::Vector3& linear_velocity, + const DepthmapPtr& depth, + OpticFlowPtr& flow) +{ + CHECK(flow); + CHECK_EQ(flow->rows, camera->height()); + CHECK_EQ(flow->cols, camera->width()); + + const FloatType dt = 0.001; + + for(int y=0; yrows; ++y) + { + for(int x=0; xcols; ++x) + { + ze::Keypoint u_t(x,y); + ze::Bearing f = camera->backProject(u_t); + const FloatType z = static_cast((*depth)(y,x)); + ze::Position Xc_t = z * ze::Position(f[0]/f[2], f[1]/f[2], 1.); + + ze::Transformation::Rotation dR = ze::Transformation::Rotation::exp(-angular_velocity * dt); + ze::Transformation::Position dT = -linear_velocity * dt; + + // Transform Xc(t) to Xc(t+dt) + ze::Transformation T_tdt_t; + T_tdt_t.getRotation() = dR; + T_tdt_t.getPosition() = dT; + VLOG(5) << T_tdt_t; + + ze::Position Xc_t_dt = T_tdt_t.transform(Xc_t); + + // Project Xc(t+dt) in the image plane + ze::Keypoint u_t_dt = camera->project(Xc_t_dt); + VLOG(5) << u_t; + VLOG(5) << u_t_dt; + + ze::Vector2 flow_vec = (u_t_dt - u_t) / dt; + (*flow)(y,x) = cv::Vec(flow_vec(0), flow_vec(1)); + } + } +} + +void computeOpticFlowFiniteDifference(const ze::Camera::Ptr& camera, + const ze::Vector3& angular_velocity, + const ze::Vector3& linear_velocity, + const DepthmapPtr& depth, + const ze::Vector3& r_COBJ, + const ze::Vector3& angular_velocity_obj, + const ze::Vector3& linear_velocity_obj, + OpticFlowPtr& flow) +{ + CHECK(flow); + CHECK_EQ(flow->rows, camera->height()); + CHECK_EQ(flow->cols, camera->width()); + + const FloatType dt = 0.001; + + for(int y=0; yrows; ++y) + { + for(int x=0; xcols; ++x) + { + ze::Keypoint u_t(x,y); + ze::Bearing f = camera->backProject(u_t); + const FloatType z = static_cast((*depth)(y,x)); + ze::Position Xc_t = z * ze::Position(f[0]/f[2], f[1]/f[2], 1.); + ze::Position r_OBJX = Xc_t - r_COBJ; + ze::Matrix33 w_WOBJ_tilde = ze::skewSymmetric(angular_velocity_obj); + + ze::Transformation::Rotation dR = ze::Transformation::Rotation::exp(-angular_velocity * dt); + ze::Transformation::Position dT = linear_velocity_obj*dt - linear_velocity * dt + w_WOBJ_tilde*r_OBJX*dt; + + // Transform Xc(t) to Xc(t+dt) + ze::Transformation T_tdt_t; + T_tdt_t.getRotation() = dR; + T_tdt_t.getPosition() = dT; + VLOG(5) << T_tdt_t; + + ze::Position Xc_t_dt = T_tdt_t.transform(Xc_t); + + // Project Xc(t+dt) in the image plane + ze::Keypoint u_t_dt = camera->project(Xc_t_dt); + VLOG(5) << u_t; + VLOG(5) << u_t_dt; + + ze::Vector2 flow_vec = (u_t_dt - u_t) / dt; + (*flow)(y,x) = cv::Vec(flow_vec(0), flow_vec(1)); + } + } +} + +} // event_camera_simulator + +TEST(Utils, testOpticFlowComputation) +{ + using namespace event_camera_simulator; + + // Load camera calib from folder + const std::string path_to_data_folder = getTestDataDir("camera_calibs"); + ze::CameraRig::Ptr camera_rig + = ze::cameraRigFromYaml(ze::joinPath(path_to_data_folder, "pinhole_mono.yaml")); + + CHECK(camera_rig); + + const ze::Camera::Ptr camera = camera_rig->atShared(0); + cv::Size sensor_size(camera->width(), camera->height()); + + OpticFlowPtr flow_analytic = + std::make_shared(sensor_size); + + // Sample random depth map + const ImageFloatType z_mean = 5.0; + const ImageFloatType z_stddev = 0.5; + DepthmapPtr depth = std::make_shared(sensor_size); + for(int y=0; y + + esim_data_provider + 0.0.0 + Data providers for the event camera simulator. + + Henri Rebecq + + GPLv3 + + catkin + catkin_simple + + esim_common + esim_rendering + imp_planar_renderer + imp_panorama_renderer + imp_opengl_renderer + imp_unrealcv_renderer + imp_multi_objects_2d + esim_trajectory + ze_vi_simulation + gflags_catkin + glog_catkin + + diff --git a/event_camera_simulator/esim_data_provider/src/data_provider_base.cpp b/event_camera_simulator/esim_data_provider/src/data_provider_base.cpp new file mode 100644 index 0000000..0387394 --- /dev/null +++ b/event_camera_simulator/esim_data_provider/src/data_provider_base.cpp @@ -0,0 +1,33 @@ +#include + +namespace event_camera_simulator { + +DataProviderBase::DataProviderBase(DataProviderType type) + : type_(type) + , signal_handler_(running_) +{} + +void DataProviderBase::spin() +{ + while (ok()) + { + spinOnce(); + } +} + +void DataProviderBase::pause() +{ + running_ = false; +} + +void DataProviderBase::shutdown() +{ + running_ = false; +} + +void DataProviderBase::registerCallback(const Callback& callback) +{ + callback_ = callback; +} + +} // namespace event_camera_simulator diff --git a/event_camera_simulator/esim_data_provider/src/data_provider_factory.cpp b/event_camera_simulator/esim_data_provider/src/data_provider_factory.cpp new file mode 100644 index 0000000..f283d78 --- /dev/null +++ b/event_camera_simulator/esim_data_provider/src/data_provider_factory.cpp @@ -0,0 +1,99 @@ +#include +#include +#include +#include +#include +#include +#include + +DEFINE_int32(data_source, 0, " 0: Online renderer"); + +DEFINE_double(simulation_minimum_framerate, 30.0, + "Minimum frame rate, in Hz" + "Especially useful when the event rate is low, to guarantee" + "that frames are still published at a minimum framerate"); + +DEFINE_double(simulation_imu_rate, 1000.0, + "Fixed IMU sampling frequency, in Hz"); + +DEFINE_int32(simulation_adaptive_sampling_method, 0, + "Method to use for adaptive sampling." + "0: based on predicted absolute brightness change" + "1: based on optic flow"); + +DEFINE_double(simulation_adaptive_sampling_lambda, 0.5, + "Parameter that controls the behavior of the adaptive sampling method." + "The meaning of this value depends on the adaptive sampling method used:" + "...based on predicted absolute brightness change: deltaT = lambda / max(|dL/dt|)" + "...based on optic flow: deltaT = lambda \ max(||du/dt||) where du/dt denotes the 2D optic flow field."); + +DEFINE_string(path_to_data_folder, "", + "Path to folder containing the data."); + + +// Parameters for the DataProviderRosbag +DEFINE_string(bag_filename, "dataset.bag", "Name of bagfile from which to read."); +DEFINE_string(topic_cam0, "/cam0/image_raw", ""); +DEFINE_string(topic_cam1, "/cam1/image_raw", ""); +DEFINE_string(topic_cam2, "/cam2/image_raw", ""); +DEFINE_string(topic_cam3, "/cam3/image_raw", ""); +DEFINE_uint64(num_cams, 1, "Number of normal cameras to read from rosbag."); + +namespace event_camera_simulator { + +DataProviderBase::Ptr loadDataProviderFromGflags() +{ + // Create data provider. + DataProviderBase::Ptr data_provider; + switch (FLAGS_data_source) + { + case 0: // Online Renderer for Moving 3D Camera Rig with IMU + { + data_provider.reset( + new DataProviderOnlineMoving3DCameraRig(FLAGS_simulation_minimum_framerate, + FLAGS_simulation_imu_rate, + FLAGS_simulation_adaptive_sampling_method, + FLAGS_simulation_adaptive_sampling_lambda)); + break; + } + case 1: // Online Renderer Simple + { + data_provider.reset( + new DataProviderOnlineSimple(FLAGS_simulation_minimum_framerate, + FLAGS_simulation_adaptive_sampling_method, + FLAGS_simulation_adaptive_sampling_lambda)); + break; + } + case 2: // Read data from a folder + { + data_provider.reset( + new DataProviderFromFolder(FLAGS_path_to_data_folder)); + break; + } + case 3: // Read data (images) from a rosbag + { + CHECK_LE(FLAGS_num_cams, 4u); + CHECK_EQ(FLAGS_num_cams, 1u) << "Only one camera is supported currently"; + + // Fill camera topics. + std::map cam_topics; + if (FLAGS_num_cams >= 1) cam_topics[FLAGS_topic_cam0] = 0; + if (FLAGS_num_cams >= 2) cam_topics[FLAGS_topic_cam1] = 1; + if (FLAGS_num_cams >= 3) cam_topics[FLAGS_topic_cam2] = 2; + if (FLAGS_num_cams >= 4) cam_topics[FLAGS_topic_cam3] = 3; + data_provider.reset( + new DataProviderRosbag(FLAGS_bag_filename, + cam_topics)); + break; + } + default: + { + LOG(FATAL) << "Data source not known."; + break; + } + } + + return data_provider; +} + +} // namespace ze diff --git a/event_camera_simulator/esim_data_provider/src/data_provider_from_folder.cpp b/event_camera_simulator/esim_data_provider/src/data_provider_from_folder.cpp new file mode 100644 index 0000000..132d589 --- /dev/null +++ b/event_camera_simulator/esim_data_provider/src/data_provider_from_folder.cpp @@ -0,0 +1,88 @@ +#include +#include +#include + +namespace event_camera_simulator { + +DataProviderFromFolder::DataProviderFromFolder(const std::string& path_to_data_folder) + : DataProviderBase(DataProviderType::Folder), + path_to_data_folder_(path_to_data_folder) +{ + // Load CSV image file + images_in_str_.open(ze::joinPath(path_to_data_folder, "images.csv")); + CHECK(images_in_str_.is_open()); + + // Load camera rig + camera_rig_ = ze::cameraRigFromYaml(ze::joinPath(path_to_data_folder, "calib.yaml")); + CHECK(camera_rig_); + CHECK_EQ(camera_rig_->size(), 1u) << "Only one camera in the rig is supported at the moment"; + + // Allocate memory for image data + sim_data_.images.emplace_back(ImagePtr(new Image( + cv::Size(camera_rig_->at(0).width(), + camera_rig_->at(0).height())))); + + sim_data_.camera_rig = camera_rig_; + sim_data_.images_updated = true; + sim_data_.depthmaps_updated = false; + sim_data_.optic_flows_updated = false; + sim_data_.twists_updated = false; + sim_data_.poses_updated = false; + sim_data_.imu_updated = false; +} + +int64_t DataProviderFromFolder::getTimeStamp(const std::string& ts_str) const +{ + return std::stoll(ts_str); +} + +size_t DataProviderFromFolder::numCameras() const +{ + return camera_rig_->size(); +} + +bool DataProviderFromFolder::spinOnce() +{ + std::string line; + if(!getline(images_in_str_, line)) + { + return false; + } + + if('%' != line.at(0) && '#' != line.at(0)) + { + std::vector items = ze::splitString(line, delimiter_); + CHECK_GE(items.size(), num_tokens_in_line_); + int64_t stamp = getTimeStamp(items[0]); + + const std::string& path_to_image = ze::joinPath(path_to_data_folder_, "frame", "cam_0", items[1]); + cv::Mat image = cv::imread(path_to_image, 0); + CHECK(image.data) << "Could not load image from file: " << path_to_image; + CHECK(image.rows == camera_rig_->at(0).height() + && image.cols == camera_rig_->at(0).width()) << "The image size in the data folder and the image size" + "specified in the camera rig do not match"; + + VLOG(3) << "Read image from file: " << path_to_image; + image.convertTo(*sim_data_.images[0], cv::DataType::type, 1./255.); + + if(callback_) + { + sim_data_.timestamp = static_cast