Compare commits

..

No commits in common. "3ba975a8301013907b88496adb1bede1719086fc" and "e27602488d6045df5300dea166ffe8d00c9414dc" have entirely different histories.

15 changed files with 23 additions and 109 deletions

View File

@ -2,4 +2,3 @@
esim_ros/scripts/exp_*
*.swp
*.autosave
.clang-format

View File

@ -11,16 +11,12 @@ namespace event_camera_simulator {
ZE_POINTER_TYPEDEFS(ImageBuffer);
struct ImageData {
ImageData(
Image img, ImageRGB img_rgb, Time stamp, Duration exposure_time
)
ImageData(Image img, Time stamp, Duration exposure_time)
: image(img),
image_rgb(img_rgb),
stamp(stamp),
exposure_time(exposure_time) {}
Image image;
ImageRGB image_rgb;
Time stamp;
Duration exposure_time; // timestamp since last image (0 if this is
// the first image)
@ -31,7 +27,7 @@ namespace event_camera_simulator {
// Rolling image buffer of mazimum size 'buffer_size_ns'.
ImageBuffer(Duration buffer_size_ns): buffer_size_ns_(buffer_size_ns) {}
void addImage(Time t, const Image& img, const ImageRGB& img_rgb);
void addImage(Time t, const Image& img);
std::deque<ImageData> getRawBuffer() const {
return data_;
@ -67,10 +63,7 @@ namespace event_camera_simulator {
}
bool imageCallback(
const Image& img,
const ImageRGB& img_rgb,
Time time,
const ImagePtr& camera_image
const Image& img, Time time, const ImagePtr& camera_image
);
private:

View File

@ -4,7 +4,6 @@
#include <esim/esim/camera_simulator.hpp>
#include <fstream>
#include <iomanip>
#include <iostream>
#include <ostream>
#include <ze/common/file_utils.hpp>
@ -13,8 +12,7 @@ static std::ofstream exposures_file_;
namespace event_camera_simulator {
void
ImageBuffer::addImage(Time t, const Image& img, const ImageRGB& img_rgb) {
void ImageBuffer::addImage(Time t, const Image& img) {
if (!data_.empty()) {
// Check that the image timestamps are monotonically increasing
CHECK_GT(t, data_.back().stamp);
@ -23,9 +21,7 @@ namespace event_camera_simulator {
Duration exposure_time = data_.empty() ? 0 : t - data_.back().stamp;
VLOG(2) << "Adding image to buffer with stamp: " << t
<< " and exposure time " << exposure_time;
data_.push_back(
ImageData(img.clone(), img_rgb.clone(), t, exposure_time)
);
data_.push_back(ImageData(img.clone(), t, exposure_time));
// Remove all the images with timestamp older than t - buffer_size_ns_
auto first_valid_element = std::lower_bound(
@ -44,16 +40,12 @@ namespace event_camera_simulator {
}
bool CameraSimulator::imageCallback(
const Image& img,
const ImageRGB& img_rgb,
Time time,
const ImagePtr& camera_image
const Image& img, Time time, const ImagePtr& camera_image
) {
CHECK(camera_image);
CHECK_EQ(camera_image->size(), img.size());
CHECK_EQ(img_rgb.size(), img.size());
buffer_->addImage(time, img, img_rgb);
buffer_->addImage(time, img);
static const Time initial_time = time;
if (time - initial_time < exposure_time_) {
@ -63,15 +55,10 @@ namespace event_camera_simulator {
return false;
}
ImageRGB rgb(img.rows, img.cols, CV_32FC3);
rgb.setTo(0.);
// average all the images in the buffer to simulate motion blur
camera_image->setTo(0);
ze::real_t denom = 0.;
for (const ImageBuffer::ImageData& img : buffer_->getRawBuffer()) {
rgb +=
ze::nanosecToMillisecTrunc(img.exposure_time) * img.image_rgb;
*camera_image +=
ze::nanosecToMillisecTrunc(img.exposure_time) * img.image;
denom += ze::nanosecToMillisecTrunc(img.exposure_time);
@ -83,7 +70,7 @@ namespace event_camera_simulator {
ss << hdr_output_folder << "/frames/frame_" << std::setfill('0')
<< std::setw(5) << frame_number++ << ".exr";
std::string frame_path = ss.str();
cv::imwrite(frame_path, rgb);
cv::imwrite(frame_path, *camera_image);
// ze::openOutputFileStream(
// ze::joinPath(output_folder, "exposures.csv"),

View File

@ -46,7 +46,6 @@ namespace event_camera_simulator {
camera_simulator_success =
camera_simulators_[i].imageCallback(
*sim_data.images[i],
*sim_data.images_rgb[i],
time,
corrupted_camera_images_[i]
);

View File

@ -42,15 +42,12 @@ namespace event_camera_simulator {
using Duration = ze::uint64_t;
using Image = cv::Mat_<ImageFloatType>;
using ImagePtr = std::shared_ptr<Image>;
using ImageRGB = cv::Mat;
using ImageRGBPtr = std::shared_ptr<ImageRGB>;
using Depthmap = cv::Mat_<ImageFloatType>;
using OpticFlow = cv::Mat_<cv::Vec<ImageFloatType, 2>>;
using OpticFlowPtr = std::shared_ptr<OpticFlow>;
using DepthmapPtr = std::shared_ptr<Depthmap>;
using ImagePtrVector = std::vector<ImagePtr>;
using ImageRGBPtrVector = std::vector<ImageRGBPtr>;
using DepthmapPtrVector = std::vector<DepthmapPtr>;
using OpticFlowPtrVector = std::vector<OpticFlowPtr>;
@ -101,7 +98,6 @@ namespace event_camera_simulator {
//! Camera images.
ImagePtrVector images;
ImageRGBPtrVector images_rgb;
//! Depth maps.
DepthmapPtrVector depthmaps;

View File

@ -1,11 +1,4 @@
#include "esim/imp_multi_objects_2d/object.hpp"
#include "opencv2/imgcodecs.hpp"
#include "opencv2/imgproc.hpp"
#include "opencv2/imgproc/types_c.h"
#include <cstdint>
#include <esim/data_provider/data_provider_from_folder.hpp>
#include <iostream>
#include <opencv2/imgcodecs/imgcodecs.hpp>
#include <ze/cameras/camera_impl.hpp>
#include <ze/common/file_utils.hpp>
@ -47,11 +40,6 @@ namespace event_camera_simulator {
sim_data_.images.emplace_back(ImagePtr(new Image(
cv::Size(camera_rig_->at(0).width(), camera_rig_->at(0).height())
)));
sim_data_.images_rgb.emplace_back(ImageRGBPtr(new ImageRGB(
camera_rig_->at(0).width(),
camera_rig_->at(0).height(),
CV_32FC3
)));
sim_data_.camera_rig = camera_rig_;
sim_data_.images_updated = true;
@ -86,17 +74,12 @@ namespace event_camera_simulator {
const std::string& path_to_image =
ze::joinPath(path_to_data_folder_, items[1]);
cv::Mat image = cv::imread(path_to_image);
cv::Mat image = cv::imread(path_to_image, 0);
CHECK(image.data)
<< "Could not load image from file: " << path_to_image;
VLOG(3) << "Read image from file: " << path_to_image;
image.copyTo(*sim_data_.images_rgb[0]);
cv::Mat image_gray = image;
cv::cvtColor(image, image_gray, CV_BGR2GRAY);
image_gray.convertTo(
image.convertTo(
*sim_data_.images[0],
cv::DataType<ImageFloatType>::type,
1. / 255.

View File

@ -103,15 +103,11 @@ namespace event_camera_simulator {
);
sim_data_.images.emplace_back(ImagePtr(new Image(size)));
sim_data_.images_rgb.emplace_back(
ImageRGBPtr(new ImageRGB(size, CV_32FC3))
);
sim_data_.depthmaps.emplace_back(DepthmapPtr(new Depthmap(size)));
sim_data_.optic_flows.emplace_back(OpticFlowPtr(new OpticFlow(size))
);
sim_data_.images[i]->setTo(0);
sim_data_.images_rgb[i]->setTo(0);
sim_data_.depthmaps[i]->setTo(0);
sim_data_.optic_flows[i]->setTo(0);
}
@ -245,7 +241,6 @@ namespace event_camera_simulator {
sim_data_.groundtruth.linear_velocity_obj_,
sim_data_.groundtruth.angular_velocity_obj_,
sim_data_.images[i],
sim_data_.images_rgb[i],
sim_data_.depthmaps[i],
sim_data_.optic_flows[i]
);
@ -254,7 +249,6 @@ namespace event_camera_simulator {
sim_data_.groundtruth.T_W_B * camera_rig_->T_B_C(i),
sim_data_.groundtruth.T_W_OBJ_,
sim_data_.images[i],
sim_data_.images_rgb[i],
sim_data_.depthmaps[i]
);
}

View File

@ -19,7 +19,6 @@ namespace event_camera_simulator {
const Transformation& T_W_C,
const std::vector<Transformation>& T_W_OBJ,
const ImagePtr& out_image,
const ImageRGBPtr& out_image_rgb,
const DepthmapPtr& out_depthmap
) const = 0;
@ -37,7 +36,6 @@ namespace event_camera_simulator {
const std::vector<LinearVelocity>& linear_velocity_obj,
const std::vector<AngularVelocity>& angular_velocity_obj,
const ImagePtr& out_image,
const ImageRGBPtr& out_image_rgb,
const DepthmapPtr& out_depthmap,
const OpticFlowPtr& optic_flow_map
) const {}

View File

@ -1,7 +1,7 @@
--data_source=2
# --path_to_output_bag=/tmp/out.bag
--path_to_data_folder=/home/arno/Videos/sponze/frames
--path_to_output_bag=/tmp/out.bag
--path_to_data_folder=/tmp/tests/frames
--hdr_output_folder=/home/arno/sim_ws/out
--ros_publisher_frame_rate=60

View File

@ -100,14 +100,14 @@ namespace event_camera_simulator {
}
void HdrPublisher::imageCallback(const ImagePtrVector& images, Time t) {
// CHECK_EQ(images.size(), 1);
// static uint frame_number = 0;
// std::stringstream ss;
// ss << output_folder_ << "/frames/frame_" << std::setfill('0')
// << std::setw(5) << frame_number++ << ".exr";
// std::string frame_path = ss.str();
//
// cv::imwrite(frame_path, *images[0]);
CHECK_EQ(images.size(), 1);
static uint frame_number = 0;
std::stringstream ss;
ss << output_folder_ << "/frames/frame_" << std::setfill('0')
<< std::setw(5) << frame_number++ << ".exr";
std::string frame_path = ss.str();
cv::imwrite(frame_path, *images[0]);
}
void HdrPublisher::eventsCallback(const EventsVector& events) {

View File

@ -24,9 +24,8 @@ namespace event_camera_simulator {
const Transformation& T_W_C,
const std::vector<Transformation>& T_W_OBJ,
const ImagePtr& out_image,
const ImageRGBPtr& out_image_rgb,
const DepthmapPtr& out_depthmap
) const override;
) const;
//! Returns true if the rendering engine can compute optic flow, false
//! otherwise
@ -44,7 +43,6 @@ namespace event_camera_simulator {
const std::vector<LinearVelocity>& linear_velocity_obj,
const std::vector<AngularVelocity>& angular_velocity_obj,
const ImagePtr& out_image,
const ImageRGBPtr& out_image_rgb,
const DepthmapPtr& out_depthmap,
const OpticFlowPtr& optic_flow_map
) const override;

View File

@ -17,7 +17,6 @@
#include <esim/imp_opengl_renderer/opengl_renderer.hpp>
#include <glad/glad.h>
#include <iomanip>
#include <learnopengl/shader.h>
#include <learnopengl/model.h>
#include <GLFW/glfw3.h>
@ -315,23 +314,18 @@ namespace event_camera_simulator {
const Transformation& T_W_C,
const std::vector<Transformation>& T_W_OBJ,
const ImagePtr& out_image,
const ImageRGBPtr& out_image_rgb,
const DepthmapPtr& out_depthmap
) const {
CHECK(is_initialized_) << "Called render() but the renderer was not "
"initialized yet. Have you "
"first called setCamera()?";
CHECK(out_image);
CHECK(out_image_rgb);
CHECK(out_depthmap);
CHECK_EQ(out_image->cols, width_);
CHECK_EQ(out_image_rgb->cols, width_);
CHECK_EQ(out_image->rows, height_);
CHECK_EQ(out_image_rgb->rows, height_);
CHECK_EQ(out_depthmap->cols, width_);
CHECK_EQ(out_depthmap->rows, height_);
CHECK_EQ(out_image->type(), CV_32F);
CHECK_EQ(out_image_rgb->type(), CV_32FC3);
CHECK_EQ(out_depthmap->type(), CV_32F);
// draw to our framebuffer instead of screen
@ -419,29 +413,6 @@ namespace event_camera_simulator {
// read out what we just rendered
cv::Mat img_color(height_, width_, CV_8UC3);
{
float pixels[height_ * width_ * 3];
glReadPixels(
0,
0,
img_color.cols,
img_color.rows,
GL_RGB,
GL_FLOAT,
pixels
);
cv::Mat rgb(height_, width_, CV_32FC3, pixels);
rgb.copyTo(*out_image_rgb);
}
// static uint frame_number = 0;
// std::stringstream ss;
// ss << "/tmp/tests"
// << "/frames/frame_" << std::setfill('0') << std::setw(5)
// << frame_number++ << ".exr";
// cv::imwrite(ss.str(), rgb);
glPixelStorei(GL_PACK_ALIGNMENT, (img_color.step & 3) ? 1 : 4);
glPixelStorei(
GL_PACK_ROW_LENGTH,
@ -509,11 +480,10 @@ namespace event_camera_simulator {
const std::vector<LinearVelocity>& linear_velocity_obj,
const std::vector<AngularVelocity>& angular_velocity_obj,
const ImagePtr& out_image,
const ImageRGBPtr& out_image_rgb,
const DepthmapPtr& out_depthmap,
const OpticFlowPtr& optic_flow_map
) const {
render(T_W_C, T_W_OBJ, out_image, out_image_rgb, out_depthmap);
render(T_W_C, T_W_OBJ, out_image, out_depthmap);
// draw to our optic flow framebuffer instead of screen
glBindFramebuffer(GL_FRAMEBUFFER, fbo_of);

View File

@ -26,7 +26,6 @@ namespace event_camera_simulator {
const Transformation& T_W_C,
const std::vector<Transformation>& T_W_OBJ,
const ImagePtr& out_image,
const ImageRGBPtr& out_image_rgb,
const DepthmapPtr& out_depthmap
) const override {
render(T_W_C, out_image, out_depthmap);

View File

@ -31,7 +31,6 @@ namespace event_camera_simulator {
const Transformation& T_W_C,
const std::vector<Transformation>& T_W_OBJ,
const ImagePtr& out_image,
const ImageRGBPtr& out_image_rbg,
const DepthmapPtr& out_depthmap
) const override {
render(T_W_C, out_image, out_depthmap);

View File

@ -21,9 +21,8 @@ namespace event_camera_simulator {
const Transformation& T_W_C,
const std::vector<Transformation>& T_W_OBJ,
const ImagePtr& out_image,
const ImageRGBPtr& out_image_rgb,
const DepthmapPtr& out_depthmap
) const override {
) const {
render(T_W_C, out_image, out_depthmap);
}