initial commit

This commit is contained in:
Henri Rebecq
2018-10-29 17:53:15 +01:00
commit a8c2f0ca43
208 changed files with 554184 additions and 0 deletions

View File

@ -0,0 +1,31 @@
cmake_minimum_required(VERSION 2.8.3)
project(esim_visualization)
find_package(catkin_simple REQUIRED)
catkin_simple()
# This macro ensures modules and global scripts declared therein get installed
# See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
catkin_python_setup()
set(HEADERS
include/esim/visualization/publisher_interface.hpp
include/esim/visualization/ros_utils.hpp
include/esim/visualization/ros_publisher.hpp
include/esim/visualization/rosbag_writer.hpp
include/esim/visualization/adaptive_sampling_benchmark_publisher.hpp
include/esim/visualization/synthetic_optic_flow_publisher.hpp
)
set(SOURCES
src/ros_utils.cpp
src/ros_publisher.cpp
src/rosbag_writer.cpp
src/adaptive_sampling_benchmark_publisher.cpp
src/synthetic_optic_flow_publisher.cpp
)
cs_add_library(${PROJECT_NAME} ${SOURCES} ${HEADERS})
cs_install()
cs_export()

View File

@ -0,0 +1,570 @@
{
"keys": {},
"groups": {
"pluginmanager": {
"keys": {
"running-plugins": {
"type": "repr",
"repr": "{u'rqt_plot/Plot': [2], u'rqt_image_view/ImageView': [4, 3, 1, 2]}"
}
},
"groups": {
"plugin__rqt_image_view__ImageView__6": {
"keys": {},
"groups": {
"dock_widget__ImageViewWidget": {
"keys": {
"dockable": {
"type": "repr",
"repr": "u'true'"
},
"parent": {
"type": "repr",
"repr": "None"
},
"dock_widget_title": {
"type": "repr",
"repr": "u'Image View (6)'"
}
},
"groups": {}
},
"plugin": {
"keys": {
"max_range": {
"type": "repr",
"repr": "u'10'"
},
"mouse_pub_topic": {
"type": "repr",
"repr": "u'/dvs/image_raw_mouse_left'"
},
"num_gridlines": {
"type": "repr",
"repr": "u'0'"
},
"toolbar_hidden": {
"type": "repr",
"repr": "u'false'"
},
"zoom1": {
"type": "repr",
"repr": "u'false'"
},
"dynamic_range": {
"type": "repr",
"repr": "u'false'"
},
"topic": {
"type": "repr",
"repr": "u'/dvs/image_raw'"
},
"publish_click_location": {
"type": "repr",
"repr": "u'false'"
}
},
"groups": {}
}
}
},
"plugin__rqt_image_view__ImageView__4": {
"keys": {},
"groups": {
"dock_widget__ImageViewWidget": {
"keys": {
"dockable": {
"type": "repr",
"repr": "True"
},
"parent": {
"type": "repr",
"repr": "None"
},
"dock_widget_title": {
"type": "repr",
"repr": "u'Image View (4)'"
}
},
"groups": {}
},
"plugin": {
"keys": {
"max_range": {
"type": "repr",
"repr": "10.0"
},
"mouse_pub_topic": {
"type": "repr",
"repr": "u'/flow_arrows_mouse_left'"
},
"num_gridlines": {
"type": "repr",
"repr": "0"
},
"toolbar_hidden": {
"type": "repr",
"repr": "False"
},
"zoom1": {
"type": "repr",
"repr": "False"
},
"dynamic_range": {
"type": "repr",
"repr": "True"
},
"topic": {
"type": "repr",
"repr": "u'/flow_arrows'"
},
"publish_click_location": {
"type": "repr",
"repr": "False"
}
},
"groups": {}
}
}
},
"plugin__rqt_image_view__ImageView__5": {
"keys": {},
"groups": {
"dock_widget__ImageViewWidget": {
"keys": {
"dockable": {
"type": "repr",
"repr": "u'true'"
},
"parent": {
"type": "repr",
"repr": "None"
},
"dock_widget_title": {
"type": "repr",
"repr": "u'Image View (5)'"
}
},
"groups": {}
},
"plugin": {
"keys": {
"max_range": {
"type": "repr",
"repr": "u'10'"
},
"mouse_pub_topic": {
"type": "repr",
"repr": "u'/davis/left/rendering/undistorted/pos_mouse_left'"
},
"num_gridlines": {
"type": "repr",
"repr": "u'0'"
},
"toolbar_hidden": {
"type": "repr",
"repr": "u'false'"
},
"zoom1": {
"type": "repr",
"repr": "u'false'"
},
"dynamic_range": {
"type": "repr",
"repr": "u'false'"
},
"topic": {
"type": "repr",
"repr": "u'/davis/left/rendering/undistorted/pos'"
},
"publish_click_location": {
"type": "repr",
"repr": "u'false'"
}
},
"groups": {}
}
}
},
"plugin__rqt_image_view__ImageView__2": {
"keys": {},
"groups": {
"dock_widget__ImageViewWidget": {
"keys": {
"dockable": {
"type": "repr",
"repr": "True"
},
"parent": {
"type": "repr",
"repr": "None"
},
"dock_widget_title": {
"type": "repr",
"repr": "u'Image View (2)'"
}
},
"groups": {}
},
"plugin": {
"keys": {
"max_range": {
"type": "repr",
"repr": "14.0"
},
"mouse_pub_topic": {
"type": "repr",
"repr": "u'/dvs_rendering_mouse_left'"
},
"num_gridlines": {
"type": "repr",
"repr": "0"
},
"toolbar_hidden": {
"type": "repr",
"repr": "False"
},
"zoom1": {
"type": "repr",
"repr": "False"
},
"dynamic_range": {
"type": "repr",
"repr": "False"
},
"topic": {
"type": "repr",
"repr": "u'/dvs_rendering'"
},
"publish_click_location": {
"type": "repr",
"repr": "False"
}
},
"groups": {}
}
}
},
"plugin__rqt_image_view__ImageView__3": {
"keys": {},
"groups": {
"dock_widget__ImageViewWidget": {
"keys": {
"dockable": {
"type": "repr",
"repr": "True"
},
"parent": {
"type": "repr",
"repr": "None"
},
"dock_widget_title": {
"type": "repr",
"repr": "u'Image View (3)'"
}
},
"groups": {}
},
"plugin": {
"keys": {
"max_range": {
"type": "repr",
"repr": "10.0"
},
"mouse_pub_topic": {
"type": "repr",
"repr": "u'/cam0/image_raw_mouse_left'"
},
"num_gridlines": {
"type": "repr",
"repr": "0"
},
"toolbar_hidden": {
"type": "repr",
"repr": "False"
},
"zoom1": {
"type": "repr",
"repr": "False"
},
"dynamic_range": {
"type": "repr",
"repr": "False"
},
"topic": {
"type": "repr",
"repr": "u'/cam0/image_raw'"
},
"publish_click_location": {
"type": "repr",
"repr": "False"
}
},
"groups": {}
}
}
},
"plugin__rqt_image_view__ImageView__1": {
"keys": {},
"groups": {
"dock_widget__ImageViewWidget": {
"keys": {
"dockable": {
"type": "repr",
"repr": "True"
},
"parent": {
"type": "repr",
"repr": "None"
},
"dock_widget_title": {
"type": "repr",
"repr": "u'Image View'"
}
},
"groups": {}
},
"plugin": {
"keys": {
"max_range": {
"type": "repr",
"repr": "10.0"
},
"mouse_pub_topic": {
"type": "repr",
"repr": "u'/flow_color_mouse_left'"
},
"num_gridlines": {
"type": "repr",
"repr": "0"
},
"toolbar_hidden": {
"type": "repr",
"repr": "False"
},
"zoom1": {
"type": "repr",
"repr": "False"
},
"dynamic_range": {
"type": "repr",
"repr": "False"
},
"topic": {
"type": "repr",
"repr": "u'/flow_color'"
},
"publish_click_location": {
"type": "repr",
"repr": "False"
}
},
"groups": {}
}
}
},
"plugin__rqt_plot__Plot__1": {
"keys": {},
"groups": {
"dock_widget__DataPlotWidget": {
"keys": {
"dockable": {
"type": "repr",
"repr": "u'true'"
},
"parent": {
"type": "repr",
"repr": "None"
},
"dock_widget_title": {
"type": "repr",
"repr": "u'MatPlot'"
}
},
"groups": {}
},
"plugin": {
"keys": {
"autoscroll": {
"type": "repr",
"repr": "u'true'"
},
"plot_type": {
"type": "repr",
"repr": "u'1'"
},
"topics": {
"type": "repr",
"repr": "[u'/dvs/imu/linear_acceleration/z', u'/dvs/imu/linear_acceleration/x', u'/dvs/imu/linear_acceleration/y']"
},
"y_limits": {
"type": "repr",
"repr": "[u'-13.594160308518013', u'26.558581681023206']"
},
"x_limits": {
"type": "repr",
"repr": "[u'-1522917844.8259192', u'-1522917843.8259192']"
}
},
"groups": {}
}
}
},
"plugin__rqt_plot__Plot__2": {
"keys": {},
"groups": {
"dock_widget__DataPlotWidget": {
"keys": {
"dockable": {
"type": "repr",
"repr": "True"
},
"parent": {
"type": "repr",
"repr": "None"
},
"dock_widget_title": {
"type": "repr",
"repr": "u'MatPlot (2)'"
}
},
"groups": {}
},
"plugin": {
"keys": {
"autoscroll": {
"type": "repr",
"repr": "True"
},
"plot_type": {
"type": "repr",
"repr": "1"
},
"topics": {
"type": "repr",
"repr": "[u'/imu/linear_acceleration/z', u'/imu/linear_acceleration/x', u'/imu/linear_acceleration/y']"
},
"y_limits": {
"type": "repr",
"repr": "[-24.496623122229014, 26.176058546767358]"
},
"x_limits": {
"type": "repr",
"repr": "[-1523004547.6889791, -1523004546.6889791]"
}
},
"groups": {}
}
}
},
"plugin__rqt_publisher__Publisher__1": {
"keys": {},
"groups": {
"plugin": {
"keys": {
"publishers": {
"type": "repr",
"repr": "u'[]'"
}
},
"groups": {}
}
}
},
"plugin__rqt_service_caller__ServiceCaller__1": {
"keys": {},
"groups": {
"dock_widget__ServiceCallerWidget": {
"keys": {
"dockable": {
"type": "repr",
"repr": "u'true'"
},
"parent": {
"type": "repr",
"repr": "None"
},
"dock_widget_title": {
"type": "repr",
"repr": "u'Service Caller'"
}
},
"groups": {}
},
"plugin": {
"keys": {
"splitter_orientation": {
"type": "repr",
"repr": "u'2'"
}
},
"groups": {}
}
}
},
"plugin__rqt_reconfigure__Param__1": {
"keys": {},
"groups": {
"dock_widget___plugincontainer_top_widget": {
"keys": {
"dockable": {
"type": "repr",
"repr": "u'true'"
},
"parent": {
"type": "repr",
"repr": "None"
},
"dock_widget_title": {
"type": "repr",
"repr": "u'Dynamic Reconfigure'"
}
},
"groups": {}
},
"plugin": {
"keys": {
"splitter": {
"type": "repr(QByteArray.hex)",
"repr(QByteArray.hex)": "QtCore.QByteArray('000000ff0000000100000002000000ae0000006401ffffffff010000000100')",
"pretty-print": " d "
},
"_splitter": {
"type": "repr(QByteArray.hex)",
"repr(QByteArray.hex)": "QtCore.QByteArray('000000ff00000001000000020000012c000000640100000009010000000200')",
"pretty-print": " , d "
}
},
"groups": {}
}
}
}
}
},
"mainwindow": {
"keys": {
"geometry": {
"type": "repr(QByteArray.hex)",
"repr(QByteArray.hex)": "QtCore.QByteArray('01d9d0cb0002000000000041000000180000077f0000043700000041000000180000077f0000043700000001000000000780')",
"pretty-print": " A 7 A 7 "
},
"state": {
"type": "repr(QByteArray.hex)",
"repr(QByteArray.hex)": "QtCore.QByteArray('000000ff00000000fd00000001000000030000073f0000040afc0100000006fc000000000000019b0000000000fffffffc0200000001fb0000005a007200710074005f0069006d006100670065005f0076006900650077005f005f0049006d0061006700650056006900650077005f005f0035005f005f0049006d00610067006500560069006500770057006900640067006500740100000000000002020000000000000000fb00000058007200710074005f007000750062006c00690073006800650072005f005f005000750062006c00690073006800650072005f005f0031005f005f005000750062006c0069007300680065007200570069006400670065007401000001b0000002650000000000000000fc00000000000003ad000001c000fffffffc0100000002fc00000000000001d3000000dd00fffffffc0200000002fb0000005a007200710074005f0069006d006100670065005f0076006900650077005f005f0049006d0061006700650056006900650077005f005f0033005f005f0049006d00610067006500560069006500770057006900640067006500740100000000000001ff0000005100fffffffb0000005a007200710074005f0069006d006100670065005f0076006900650077005f005f0049006d0061006700650056006900650077005f005f0031005f005f0049006d00610067006500560069006500770057006900640067006500740100000205000002050000005100fffffffc000001d9000001d4000000dd00fffffffc0200000002fb0000005a007200710074005f0069006d006100670065005f0076006900650077005f005f0049006d0061006700650056006900650077005f005f0034005f005f0049006d00610067006500560069006500770057006900640067006500740100000000000001ff0000005100fffffffb0000005a007200710074005f0069006d006100670065005f0076006900650077005f005f0049006d0061006700650056006900650077005f005f0032005f005f0049006d00610067006500560069006500770057006900640067006500740100000205000002050000005100fffffffb0000005a007200710074005f0069006d006100670065005f0076006900650077005f005f0049006d0061006700650056006900650077005f005f0036005f005f0049006d006100670065005600690065007700570069006400670065007401000005a40000019b0000000000000000fb00000042007200710074005f0070006c006f0074005f005f0050006c006f0074005f005f0031005f005f00440061007400610050006c006f0074005700690064006700650074010000029a000002220000000000000000fb00000042007200710074005f0070006c006f0074005f005f0050006c006f0074005f005f0032005f005f00440061007400610050006c006f007400570069006400670065007401000003b30000038c0000017800ffffff0000073f0000000000000004000000040000000800000008fc00000001000000030000000100000036004d0069006e0069006d0069007a006500640044006f0063006b00570069006400670065007400730054006f006f006c0062006100720000000000ffffffff0000000000000000')",
"pretty-print": " e Q Q Zrqt_image_view__ImageView__4__ImageViewWidget Zrqt_image_view__ImageView__2__ImageViewWidget Zrqt_image_view__ImageView__6__ImageViewWidget Brqt_plot__Plot__1__DataPlotWidget Brqt_plot__Plot__2__DataPlotWidget 6MinimizedDockWidgetsToolbar "
}
},
"groups": {
"toolbar_areas": {
"keys": {
"MinimizedDockWidgetsToolbar": {
"type": "repr",
"repr": "8"
}
},
"groups": {}
}
}
}
}
}

View File

@ -0,0 +1,207 @@
Panels:
- Class: rviz/Displays
Help Height: 78
Name: Displays
Property Tree Widget:
Expanded:
- /Grid1/Offset1
- /TF1/Status1
- /TF1/Frames1
- /PointCloud21
- /PointCloud21/Autocompute Value Bounds1
Splitter Ratio: 0.504322767
Tree Height: 449
- Class: rviz/Selection
Name: Selection
- Class: rviz/Tool Properties
Expanded:
- /2D Pose Estimate1
- /2D Nav Goal1
- /Publish Point1
Name: Tool Properties
Splitter Ratio: 0.588679016
- Class: rviz/Views
Expanded:
- /Current View1
Name: Views
Splitter Ratio: 0.5
- Class: rviz/Time
Experimental: false
Name: Time
SyncMode: 0
SyncSource: Image
Visualization Manager:
Class: ""
Displays:
- Alpha: 0.5
Cell Size: 1
Class: rviz/Grid
Color: 160; 160; 164
Enabled: true
Line Style:
Line Width: 0.0299999993
Value: Lines
Name: Grid
Normal Cell Count: 0
Offset:
X: 0
Y: 0
Z: -1
Plane: XY
Plane Cell Count: 10
Reference Frame: <Fixed Frame>
Value: true
- Class: rviz/TF
Enabled: true
Frame Timeout: 15
Frames:
All Enabled: false
body:
Value: false
cam0:
Value: true
map:
Value: false
Marker Scale: 1
Name: TF
Show Arrows: false
Show Axes: true
Show Names: false
Tree:
map:
body:
{}
cam0:
{}
Update Interval: 0
Value: true
- Class: rviz/Image
Enabled: true
Image Topic: /dvs_rendering
Max Value: 1
Median window: 5
Min Value: 0
Name: Image
Normalize Range: true
Queue Size: 2
Transport Hint: raw
Unreliable: false
Value: true
- Alpha: 1
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 1.51484442
Min Value: 1.28932905
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 132; 132; 132
Color Transformer: RGB8
Decay Time: 100
Enabled: true
Invert Rainbow: false
Max Color: 255; 255; 255
Max Intensity: 1
Min Color: 0; 0; 0
Min Intensity: 0
Name: PointCloud2
Position Transformer: XYZ
Queue Size: 10
Selectable: true
Size (Pixels): 1
Size (m): 0.00999999978
Style: Points
Topic: /cam0/pointcloud
Unreliable: false
Use Fixed Frame: false
Use rainbow: true
Value: true
- Alpha: 1
Buffer Length: 1
Class: rviz/Path
Color: 25; 255; 0
Enabled: true
Head Diameter: 0.300000012
Head Length: 0.200000003
Length: 0.100000001
Line Style: Billboards
Line Width: 0.0199999996
Name: Path
Offset:
X: 0
Y: 0
Z: 0
Pose Color: 255; 85; 255
Pose Style: None
Radius: 0.0199999996
Shaft Diameter: 0.100000001
Shaft Length: 0.100000001
Topic: /trajectory
Unreliable: false
Value: true
Enabled: true
Global Options:
Background Color: 255; 255; 255
Default Light: true
Fixed Frame: map
Frame Rate: 30
Name: root
Tools:
- Class: rviz/Interact
Hide Inactive Objects: true
- Class: rviz/MoveCamera
- Class: rviz/Select
- Class: rviz/FocusCamera
- Class: rviz/Measure
- Class: rviz/SetInitialPose
Topic: /initialpose
- Class: rviz/SetGoal
Topic: /move_base_simple/goal
- Class: rviz/PublishPoint
Single click: true
Topic: /clicked_point
Value: true
Views:
Current:
Class: rviz/Orbit
Distance: 4.34333372
Enable Stereo Rendering:
Stereo Eye Separation: 0.0599999987
Stereo Focal Distance: 1
Swap Stereo Eyes: false
Value: false
Focal Point:
X: 0.177987233
Y: -0.165689588
Z: -0.522414088
Focal Shape Fixed Size: true
Focal Shape Size: 0.0500000007
Invert Z Axis: false
Name: Current View
Near Clip Distance: 0.00999999978
Pitch: 0.280202091
Target Frame: <Fixed Frame>
Value: Orbit (rviz)
Yaw: 4.70558119
Saved: ~
Window Geometry:
Displays:
collapsed: true
Height: 1056
Hide Left Dock: true
Hide Right Dock: true
Image:
collapsed: false
QMainWindow State: 000000ff00000000fd00000004000000000000016a00000250fc0200000009fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006100fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073000000002800000250000000d700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000c00430061006d0065007200610000000262000000160000000000000000000000010000010f00000250fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002800000250000000ad00fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073f00000184fc0100000005fb0000000a0049006d00610067006501000000000000073f0000005c00fffffffc000000000000073f0000000000fffffffa000000000200000002fb0000000a0049006d0061006700650100000000ffffffff0000000000000000fb0000000800540069006d006500000003c40000003e0000003b00fffffffb0000000800540069006d00650100000000000004500000000000000000fb000000180049006e00700075007400200074006f002000560049004f010000009d000006a20000000000000000fb00000016004400410056004900530020006600720061006d00650000000000ffffffff00000000000000000000073f0000025000000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
Selection:
collapsed: false
Time:
collapsed: false
Tool Properties:
collapsed: false
Views:
collapsed: true
Width: 1855
X: 1985
Y: 24

View File

@ -0,0 +1,45 @@
#pragma once
#include <esim/common/types.hpp>
#include <esim/visualization/publisher_interface.hpp>
#include <fstream>
namespace event_camera_simulator {
class AdaptiveSamplingBenchmarkPublisher : public Publisher
{
public:
using PixelLocation = std::pair<int,int>;
using PixelLocations = std::vector<PixelLocation>;
AdaptiveSamplingBenchmarkPublisher(const std::string &benchmark_folder,
const std::string &pixels_to_record_filename);
~AdaptiveSamplingBenchmarkPublisher();
virtual void imageCallback(const ImagePtrVector& images, Time t) override;
virtual void eventsCallback(const EventsVector& events) override;
virtual void opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) override;
virtual void imageCorruptedCallback(const ImagePtrVector& corrupted_images, Time t) override {}
virtual void depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) override {}
virtual void poseCallback(const Transformation& T_W_B, const TransformationVector& T_W_Cs, Time t) override {}
virtual void twistCallback(const AngularVelocityVector& ws, const LinearVelocityVector& vs, Time t) override {}
virtual void imuCallback(const Vector3& acc, const Vector3& gyr, Time t) override {}
virtual void cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t) override {}
virtual void pointcloudCallback(const PointCloudVector& pointclouds, Time t) override {}
static Publisher::Ptr createFromGflags();
private:
std::ofstream events_file_;
std::ofstream images_file_;
std::ofstream pixel_intensities_file_;
std::ofstream optic_flows_file_;
size_t image_index_;
PixelLocations pixels_to_record_;
};
} // namespace event_camera_simulator

View File

@ -0,0 +1,29 @@
#pragma once
#include <esim/common/types.hpp>
#include <ze/common/macros.hpp>
namespace event_camera_simulator {
class Publisher
{
public:
ZE_POINTER_TYPEDEFS(Publisher);
Publisher() = default;
virtual ~Publisher() = default;
virtual void imageCallback(const ImagePtrVector& images, Time t) = 0;
virtual void imageCorruptedCallback(const ImagePtrVector& corrupted_images, Time t) = 0;
virtual void depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) = 0;
virtual void opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) = 0;
virtual void eventsCallback(const EventsVector& events) = 0;
virtual void poseCallback(const Transformation& T_W_B, const TransformationVector& T_W_Cs, Time t) = 0;
virtual void twistCallback(const AngularVelocityVector& ws, const LinearVelocityVector& vs, Time t) = 0;
virtual void imuCallback(const Vector3& acc, const Vector3& gyr, Time t) = 0;
virtual void cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t) = 0;
virtual void pointcloudCallback(const PointCloudVector& pointclouds, Time t) = 0;
};
} // namespace event_camera_simulator

View File

@ -0,0 +1,59 @@
#pragma once
#include <esim/common/types.hpp>
#include <esim/visualization/publisher_interface.hpp>
#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <geometry_msgs/TransformStamped.h>
#include <tf/tf.h>
#include <tf/transform_broadcaster.h>
namespace event_camera_simulator {
class RosPublisher : public Publisher
{
public:
RosPublisher(size_t num_cameras);
~RosPublisher();
virtual void imageCallback(const ImagePtrVector& images, Time t) override;
virtual void imageCorruptedCallback(const ImagePtrVector& corrupted_images, Time t) override;
virtual void depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) override;
virtual void opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) override;
virtual void eventsCallback(const EventsVector& events) override;
virtual void poseCallback(const Transformation& T_W_B, const TransformationVector& T_W_Cs, Time t) override;
virtual void twistCallback(const AngularVelocityVector& ws, const LinearVelocityVector& vs, Time t) override;
virtual void imuCallback(const Vector3& acc, const Vector3& gyr, Time t) override;
virtual void cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t) override;
virtual void pointcloudCallback(const PointCloudVector& pointclouds, Time t) override;
private:
size_t num_cameras_;
std::vector<cv::Size> sensor_sizes_;
std::shared_ptr<ros::NodeHandle> nh_;
std::shared_ptr<image_transport::ImageTransport> it_;
std::vector< std::shared_ptr<ros::Publisher> > event_pub_;
std::shared_ptr<ros::Publisher> pose_pub_;
std::shared_ptr<ros::Publisher> imu_pub_;
std::vector< std::shared_ptr<ros::Publisher> > pointcloud_pub_;
std::vector< std::shared_ptr<ros::Publisher> > camera_info_pub_;
std::vector< std::shared_ptr<image_transport::Publisher> > image_pub_;
std::vector< std::shared_ptr<image_transport::Publisher> > image_corrupted_pub_;
std::vector< std::shared_ptr<image_transport::Publisher> > depthmap_pub_;
std::vector< std::shared_ptr<ros::Publisher> > optic_flow_pub_;
std::vector< std::shared_ptr<ros::Publisher> > twist_pub_;
std::shared_ptr<tf::TransformBroadcaster> tf_broadcaster_;
Time last_published_camera_info_time_;
Time last_published_image_time_;
Time last_published_corrupted_image_time_;
Time last_published_depthmap_time_;
Time last_published_optic_flow_time_;
Time last_published_pointcloud_time_;
};
} // namespace event_camera_simulator

View File

@ -0,0 +1,54 @@
#pragma once
#include <esim/common/types.hpp>
#include <pcl_ros/point_cloud.h>
#include <pcl/point_types.h>
#include <sensor_msgs/Image.h>
#include <dvs_msgs/EventArray.h>
#include <geometry_msgs/PoseStamped.h>
#include <geometry_msgs/TwistStamped.h>
#include <sensor_msgs/Imu.h>
#include <sensor_msgs/CameraInfo.h>
#include <esim_msgs/OpticFlow.h>
namespace event_camera_simulator {
inline std::string getTopicName(int i, const std::string& suffix)
{
std::stringstream ss;
ss << "cam" << i << "/" << suffix;
return ss.str();
}
inline std::string getTopicName(const std::string& prefix, int i, const std::string& suffix)
{
std::stringstream ss;
ss << prefix << "/" << getTopicName(i, suffix);
return ss.str();
}
inline ros::Time toRosTime(Time t)
{
ros::Time ros_time;
ros_time.fromNSec(t);
return ros_time;
}
void pointCloudToMsg(const PointCloud& pointcloud, const std::string& frame_id, Time t,pcl::PointCloud<pcl::PointXYZRGB>::Ptr& msg);
void imageToMsg(const Image& image, Time t, sensor_msgs::ImagePtr& msg);
void depthmapToMsg(const Depthmap& depthmap, Time t, sensor_msgs::ImagePtr& msg);
void opticFlowToMsg(const OpticFlow& flow, Time t, esim_msgs::OpticFlowPtr& msg);
void eventsToMsg(const Events& events, int width, int height, dvs_msgs::EventArrayPtr& msg);
sensor_msgs::Imu imuToMsg(const Vector3& acc, const Vector3& gyr, Time t);
geometry_msgs::TwistStamped twistToMsg(const AngularVelocity& w, const LinearVelocity& v, Time t);
void cameraToMsg(const ze::Camera::Ptr& camera, Time t, sensor_msgs::CameraInfoPtr& msg);
} // namespace event_camera_simulator

View File

@ -0,0 +1,45 @@
#pragma once
#include <esim/common/types.hpp>
#include <esim/visualization/publisher_interface.hpp>
#include <rosbag/bag.h>
namespace event_camera_simulator {
class RosbagWriter : public Publisher
{
public:
RosbagWriter(const std::string& path_to_output_bag,
size_t num_cameras);
~RosbagWriter();
virtual void imageCallback(const ImagePtrVector& images, Time t) override;
virtual void imageCorruptedCallback(const ImagePtrVector& corrupted_images, Time t) override;
virtual void depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) override;
virtual void opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) override;
virtual void eventsCallback(const EventsVector& events) override;
virtual void poseCallback(const Transformation& T_W_B, const TransformationVector& T_W_Cs, Time t) override;
virtual void twistCallback(const AngularVelocityVector& ws, const LinearVelocityVector& vs, Time t) override;
virtual void imuCallback(const Vector3& acc, const Vector3& gyr, Time t) override;
virtual void cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t) override;
virtual void pointcloudCallback(const PointCloudVector& pointclouds, Time t) override;
static Publisher::Ptr createBagWriterFromGflags(size_t num_cameras);
private:
size_t num_cameras_;
std::vector<cv::Size> sensor_sizes_;
rosbag::Bag bag_;
const std::string topic_name_prefix_ = "";
Time last_published_camera_info_time_;
Time last_published_image_time_;
Time last_published_corrupted_image_time_;
Time last_published_depthmap_time_;
Time last_published_optic_flow_time_;
Time last_published_pointcloud_time_;
};
} // namespace event_camera_simulator

View File

@ -0,0 +1,45 @@
#pragma once
#include <esim/common/types.hpp>
#include <esim/visualization/publisher_interface.hpp>
#include <fstream>
namespace event_camera_simulator {
class SyntheticOpticFlowPublisher : public Publisher
{
public:
SyntheticOpticFlowPublisher(const std::string &output_folder);
~SyntheticOpticFlowPublisher();
virtual void imageCallback(const ImagePtrVector& images, Time t) override {
CHECK_EQ(images.size(), 1);
if(sensor_size_.width == 0 || sensor_size_.height == 0)
{
sensor_size_ = images[0]->size();
}
}
virtual void eventsCallback(const EventsVector& events) override;
virtual void opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t) override {}
virtual void imageCorruptedCallback(const ImagePtrVector& corrupted_images, Time t) override {}
virtual void depthmapCallback(const DepthmapPtrVector& depthmaps, Time t) override {}
virtual void poseCallback(const Transformation& T_W_B, const TransformationVector& T_W_Cs, Time t) override {}
virtual void twistCallback(const AngularVelocityVector& ws, const LinearVelocityVector& vs, Time t) override {}
virtual void imuCallback(const Vector3& acc, const Vector3& gyr, Time t) override {}
virtual void cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t) override {}
virtual void pointcloudCallback(const PointCloudVector& pointclouds, Time t) override {}
static Publisher::Ptr createFromGflags();
private:
std::string output_folder_;
cv::Size sensor_size_;
std::ofstream events_file_;
Events events_; // buffer containing all the events since the beginning
};
} // namespace event_camera_simulator

View File

@ -0,0 +1,31 @@
<?xml version="1.0"?>
<package format="2">
<name>esim_visualization</name>
<version>0.0.0</version>
<description>Visualizers for the event camera simulator.</description>
<maintainer email="rebecq@ifi.uzh.ch">Henri Rebecq</maintainer>
<license>GPLv3</license>
<buildtool_depend>catkin</buildtool_depend>
<buildtool_depend>catkin_simple</buildtool_depend>
<depend>esim_common</depend>
<depend>esim_msgs</depend>
<depend>gflags_catkin</depend>
<depend>glog_catkin</depend>
<depend>roscpp</depend>
<depend>dvs_msgs</depend>
<depend>sensor_msgs</depend>
<depend>cv_bridge</depend>
<depend>geometry_msgs</depend>
<depend>image_transport</depend>
<depend>pcl_ros</depend>
<depend>tf</depend>
<depend>minkindr_conversions</depend>
<depend>rosbag</depend>
<depend>rospy</depend>
<depend>dvs_renderer</depend>
</package>

View File

@ -0,0 +1,12 @@
#!/usr/bin/env python
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
packages=['esim_visualization'],
package_dir={'': 'py'},
install_requires=['rospy', 'sensor_msgs', 'esim_msgs'],
)
setup(**d)

View File

@ -0,0 +1,135 @@
#include <esim/visualization/adaptive_sampling_benchmark_publisher.hpp>
#include <esim/common/utils.hpp>
#include <ze/common/path_utils.hpp>
#include <ze/common/file_utils.hpp>
#include <ze/common/time_conversions.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <gflags/gflags.h>
#include <glog/logging.h>
DEFINE_string(adaptive_sampling_benchmark_folder, "",
"Folder in which to output the results.");
DEFINE_string(adaptive_sampling_benchmark_pixels_to_record_file, "",
"File containing the pixel locations to record.");
namespace event_camera_simulator {
AdaptiveSamplingBenchmarkPublisher::AdaptiveSamplingBenchmarkPublisher(const std::string& benchmark_folder,
const std::string& pixels_to_record_filename)
: image_index_(0)
{
ze::openOutputFileStream(ze::joinPath(benchmark_folder, "events.txt"),
&events_file_);
ze::openOutputFileStream(ze::joinPath(benchmark_folder, "images.txt"),
&images_file_);
ze::openOutputFileStream(ze::joinPath(benchmark_folder, "pixel_intensities.txt"),
&pixel_intensities_file_);
ze::openOutputFileStream(ze::joinPath(benchmark_folder, "optic_flows.txt"),
&optic_flows_file_);
// Load and parse the file containing the list of pixel locations
// whose intensity values to record
std::ifstream pixels_to_record_file;
if(pixels_to_record_filename != "")
{
ze::openFileStream(pixels_to_record_filename, &pixels_to_record_file);
int x, y;
LOG(INFO) << "Pixels that will be recorded: ";
while(pixels_to_record_file >> x >> y)
{
LOG(INFO) << x << " , " << y;
pixels_to_record_.push_back(PixelLocation(x,y));
}
}
}
Publisher::Ptr AdaptiveSamplingBenchmarkPublisher::createFromGflags()
{
if(FLAGS_adaptive_sampling_benchmark_folder == "")
{
LOG(WARNING) << "Empty benchmark folder string: will not write benchmark files";
return nullptr;
}
return std::make_shared<AdaptiveSamplingBenchmarkPublisher>(FLAGS_adaptive_sampling_benchmark_folder,
FLAGS_adaptive_sampling_benchmark_pixels_to_record_file);
}
AdaptiveSamplingBenchmarkPublisher::~AdaptiveSamplingBenchmarkPublisher()
{
// finish writing files
events_file_.close();
images_file_.close();
pixel_intensities_file_.close();
optic_flows_file_.close();
}
void AdaptiveSamplingBenchmarkPublisher::imageCallback(const ImagePtrVector& images, Time t)
{
CHECK_EQ(images.size(), 1);
images_file_ << t << std::endl;
ImagePtr img = images[0];
cv::Mat img_8bit;
img->convertTo(img_8bit, CV_8U, 255);
if(image_index_ == 0)
{
static const std::vector<int> compression_params = {CV_IMWRITE_PNG_COMPRESSION, 0};
std::stringstream ss;
ss << ze::joinPath(FLAGS_adaptive_sampling_benchmark_folder, "image_");
ss << image_index_ << ".png";
LOG(INFO) << ss.str();
cv::imwrite(ss.str(), img_8bit, compression_params);
}
for(const PixelLocation& pixel_loc : pixels_to_record_)
{
// write line in the form "x y I(x,y)"
const int x = pixel_loc.first;
const int y = pixel_loc.second;
pixel_intensities_file_ << x << " "
<< y << " "
<< (*images[0])(y,x) << std::endl;
}
image_index_++;
}
void AdaptiveSamplingBenchmarkPublisher::opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t)
{
CHECK_EQ(optic_flows.size(), 1);
for(const PixelLocation& pixel_loc : pixels_to_record_)
{
// write line in the form "x y optic_flow(x,y)[0] optic_flow(x,y)[1]"
const int x = pixel_loc.first;
const int y = pixel_loc.second;
optic_flows_file_ << x << " "
<< y << " "
<< (*optic_flows[0])(y,x)[0] << " "
<< (*optic_flows[0])(y,x)[1]
<< std::endl;
}
}
void AdaptiveSamplingBenchmarkPublisher::eventsCallback(const EventsVector& events)
{
CHECK_EQ(events.size(), 1);
for(const Event& e : events[0])
{
events_file_ << e.t << " " << e.x << " " << e.y << " " << (e.pol? 1 : 0) << std::endl;
}
}
} // namespace event_camera_simulator

View File

@ -0,0 +1,110 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Node that listens to esim_msgs/OpticFlow messages, and publishes it
as a color-coded image, and as a vector field.
"""
import rospy
import numpy as np
import cv2
from sensor_msgs.msg import Image
from esim_msgs.msg import OpticFlow
from cv_bridge import CvBridge
class FlowConverterNode:
_p0 = None
def __init__(self):
self._bridge = CvBridge()
rospy.Subscriber("flow", OpticFlow, self._OpticFlowCallback)
self.pub_color = rospy.Publisher("flow_color", Image, queue_size=0)
self.pub_arrows = rospy.Publisher("flow_arrows", Image, queue_size=0)
self.arrows_step = rospy.get_param('~arrows_step', 12)
self.arrows_scale = rospy.get_param('~arrows_scale', 0.1)
self.arrows_upsample_factor = rospy.get_param('~arrows_upsample_factor', 2)
self.publish_rate = rospy.get_param('~publish_rate', 20)
rospy.loginfo('Started flow converter node')
rospy.loginfo('Step size between arrows: {}'.format(self.arrows_step))
rospy.loginfo('Scale factor: {:.2f}'.format(self.arrows_scale))
rospy.loginfo('Upsample factor: x{}'.format(self.arrows_upsample_factor))
rospy.loginfo('Publish rate: {} Hz'.format(self.publish_rate))
self.arrows_color = (0,0,255) # red
self.first_msg_received = False
def reset(self):
self.first_msg_received = False
self.last_msg_stamp = None
def _OpticFlowCallback(self, msg):
if not self.first_msg_received:
rospy.logdebug('Received first message at stamp: {}'.format(msg.header.stamp.to_sec()))
self.last_msg_stamp = msg.header.stamp
self.first_msg_received = True
self.convertAndPublishFlow(msg)
if msg.header.stamp.to_sec() < self.last_msg_stamp.to_sec():
rospy.loginfo('Optic flow converter reset because new stamp is older than the latest stamp')
self.reset()
return
time_since_last_published_msg = (msg.header.stamp - self.last_msg_stamp).to_sec()
rospy.logdebug('Time since last published message: {}'.format(time_since_last_published_msg))
if time_since_last_published_msg >= 1./float(self.publish_rate):
self.last_msg_stamp = msg.header.stamp
self.convertAndPublishFlow(msg)
def convertAndPublishFlow(self, msg):
height, width = msg.height, msg.width
flow_x = np.array(msg.flow_x).reshape((height, width))
flow_y = np.array(msg.flow_y).reshape((height, width))
self.publishColorCodedFlow(flow_x, flow_y, msg.header.stamp)
self.publishArrowFlow(flow_x, flow_y, msg.header.stamp)
def publishColorCodedFlow(self, flow_x, flow_y, stamp):
assert(flow_x.shape == flow_y.shape)
height, width = flow_x.shape
magnitude, angle = cv2.cartToPolar(flow_x, flow_y)
magnitude = cv2.normalize(src=magnitude, dst=magnitude, alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8U)
hsv = np.zeros((height,width,3), dtype=np.uint8)
self.hsv = hsv.copy()
hsv[...,1] = 255
hsv[...,0] = 0.5 * angle * 180 / np.pi
hsv[...,2] = cv2.normalize(magnitude,None,0,255,cv2.NORM_MINMAX)
bgr = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
img_msg = self._bridge.cv2_to_imgmsg(bgr, 'bgr8')
img_msg.header.stamp = stamp
self.pub_color.publish(img_msg)
def publishArrowFlow(self, flow_x, flow_y, stamp):
assert(flow_x.shape == flow_y.shape)
height, width = flow_x.shape
step = self.arrows_step
scale = self.arrows_scale
ss = self.arrows_upsample_factor
arrow_field = np.zeros((ss * height, ss * width,3), dtype=np.uint8)
for x in np.arange(0, width, step):
for y in np.arange(0, height, step):
vx = flow_x[y,x]
vy = flow_y[y,x]
cv2.arrowedLine(arrow_field, (ss * x, ss * y),
(int(ss * (x + scale * vx)), int(ss * (y + scale * vy))), color=self.arrows_color, thickness=1)
img_msg = self._bridge.cv2_to_imgmsg(arrow_field, 'bgr8')
img_msg.header.stamp = stamp
self.pub_arrows.publish(img_msg)
if __name__ == '__main__':
rospy.init_node('flow_converter_node')
node = FlowConverterNode()
rospy.spin()

View File

@ -0,0 +1,400 @@
#include <esim/visualization/ros_publisher.hpp>
#include <esim/common/utils.hpp>
#include <ze/common/time_conversions.hpp>
#include <esim/visualization/ros_utils.hpp>
#include <minkindr_conversions/kindr_msg.h>
#include <minkindr_conversions/kindr_tf.h>
#include <gflags/gflags.h>
#include <glog/logging.h>
DEFINE_double(ros_publisher_camera_info_rate, 10,
"Camera info (maximum) publish rate, in Hz");
DEFINE_double(ros_publisher_frame_rate, 25,
"(Maximum) frame rate, in Hz");
DEFINE_double(ros_publisher_depth_rate, 25,
"(Maximum) depthmap publish rate, in Hz");
DEFINE_double(ros_publisher_pointcloud_rate, 25,
"(Maximum) point cloud publish rate, in Hz");
DEFINE_double(ros_publisher_optic_flow_rate, 25,
"(Maximum) optic flow map publish rate, in Hz");
namespace event_camera_simulator {
RosPublisher::RosPublisher(size_t num_cameras)
{
CHECK_GE(num_cameras, 1);
num_cameras_ = num_cameras;
sensor_sizes_ = std::vector<cv::Size>(num_cameras_);
// Initialize ROS if it was not initialized before.
if(!ros::isInitialized())
{
VLOG(1) << "Initializing ROS";
int argc = 0;
ros::init(argc, nullptr, std::string("ros_publisher"), ros::init_options::NoSigintHandler);
}
// Create node and subscribe.
nh_.reset(new ros::NodeHandle(""));
it_.reset(new image_transport::ImageTransport(*nh_));
// Setup ROS publishers for images, events, poses, depth maps, camera info, etc.
for(size_t i=0; i<num_cameras_; ++i)
{
event_pub_.emplace_back(
new ros::Publisher(
nh_->advertise<dvs_msgs::EventArray> (getTopicName(i, "events"), 0)));
image_pub_.emplace_back(
new image_transport::Publisher(
it_->advertise(getTopicName(i, "image_raw"), 0)));
image_corrupted_pub_.emplace_back(
new image_transport::Publisher(
it_->advertise(getTopicName(i, "image_corrupted"), 0)));
depthmap_pub_.emplace_back(
new image_transport::Publisher(
it_->advertise(getTopicName(i, "depthmap"), 0)));
optic_flow_pub_.emplace_back(
new ros::Publisher(
nh_->advertise<esim_msgs::OpticFlow> (getTopicName(i, "optic_flow"), 0)));
camera_info_pub_.emplace_back(
new ros::Publisher(
nh_->advertise<sensor_msgs::CameraInfo> (getTopicName(i, "camera_info"), 0)));
twist_pub_.emplace_back(
new ros::Publisher(
nh_->advertise<geometry_msgs::TwistStamped> (getTopicName(i, "twist"), 0)));
pointcloud_pub_.emplace_back(
new ros::Publisher(
nh_->advertise<pcl::PointCloud<pcl::PointXYZ>> (getTopicName(i, "pointcloud"), 0)));
}
pose_pub_.reset(new ros::Publisher(nh_->advertise<geometry_msgs::PoseStamped> ("pose", 0)));
imu_pub_.reset(new ros::Publisher(nh_->advertise<sensor_msgs::Imu> ("imu", 0)));
tf_broadcaster_.reset(new tf::TransformBroadcaster());
last_published_camera_info_time_ = 0;
last_published_image_time_ = 0;
last_published_corrupted_image_time_ = 0;
last_published_depthmap_time_ = 0;
last_published_optic_flow_time_ = 0;
last_published_pointcloud_time_ = 0;
}
RosPublisher::~RosPublisher()
{
for(size_t i=0; i<num_cameras_; ++i)
{
event_pub_[i]->shutdown();
image_pub_[i]->shutdown();
image_corrupted_pub_[i]->shutdown();
depthmap_pub_[i]->shutdown();
optic_flow_pub_[i]->shutdown();
camera_info_pub_[i]->shutdown();
twist_pub_[i]->shutdown();
pointcloud_pub_[i]->shutdown();
}
pose_pub_->shutdown();
ros::shutdown();
}
void RosPublisher::pointcloudCallback(const PointCloudVector& pointclouds, Time t)
{
CHECK_EQ(pointcloud_pub_.size(), num_cameras_);
CHECK_EQ(pointclouds.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i)
{
const PointCloud& pcl_camera = pointclouds[i];
CHECK(pointcloud_pub_[i]);
if(pointcloud_pub_[i]->getNumSubscribers() == 0)
{
continue;
}
Duration min_time_interval_between_published_pointclouds_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_pointcloud_rate);
if(last_published_pointcloud_time_ > 0 && t - last_published_pointcloud_time_ < min_time_interval_between_published_pointclouds_)
{
return;
}
pcl::PointCloud<pcl::PointXYZRGB>::Ptr msg (new pcl::PointCloud<pcl::PointXYZRGB>);
std::stringstream ss; ss << "cam" << i;
pointCloudToMsg(pointclouds[i], ss.str(), t, msg);
pointcloud_pub_[i]->publish(msg);
}
last_published_pointcloud_time_ = t;
}
void RosPublisher::imageCallback(const ImagePtrVector& images, Time t)
{
CHECK_EQ(image_pub_.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i)
{
sensor_sizes_[i] = images[i]->size();
CHECK(image_pub_[i]);
if(image_pub_[i]->getNumSubscribers() == 0)
{
continue;
}
static const Duration min_time_interval_between_published_images_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_frame_rate);
if(last_published_image_time_ > 0 && t - last_published_image_time_ < min_time_interval_between_published_images_)
{
return;
}
if(images[i])
{
sensor_msgs::ImagePtr msg;
imageToMsg(*images[i], t, msg);
image_pub_[i]->publish(msg);
}
}
last_published_image_time_ = t;
}
void RosPublisher::imageCorruptedCallback(const ImagePtrVector& corrupted_images, Time t)
{
CHECK_EQ(image_corrupted_pub_.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i)
{
CHECK(image_corrupted_pub_[i]);
if(image_corrupted_pub_[i]->getNumSubscribers() == 0)
{
continue;
}
static const Duration min_time_interval_between_published_images_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_frame_rate);
if(last_published_corrupted_image_time_ > 0 && t - last_published_corrupted_image_time_ < min_time_interval_between_published_images_)
{
return;
}
if(corrupted_images[i])
{
sensor_msgs::ImagePtr msg;
imageToMsg(*corrupted_images[i], t, msg);
image_corrupted_pub_[i]->publish(msg);
}
}
last_published_corrupted_image_time_ = t;
}
void RosPublisher::depthmapCallback(const DepthmapPtrVector& depthmaps, Time t)
{
CHECK_EQ(depthmap_pub_.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i)
{
CHECK(depthmap_pub_[i]);
if(depthmap_pub_[i]->getNumSubscribers() == 0)
{
continue;
}
static const Duration min_time_interval_between_published_depthmaps_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_depth_rate);
if(last_published_depthmap_time_ > 0 && t - last_published_depthmap_time_ < min_time_interval_between_published_depthmaps_)
{
return;
}
if(depthmaps[i])
{
sensor_msgs::ImagePtr msg;
depthmapToMsg(*depthmaps[i], t, msg);
depthmap_pub_[i]->publish(msg);
}
}
last_published_depthmap_time_ = t;
}
void RosPublisher::opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t)
{
CHECK_EQ(optic_flow_pub_.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i)
{
CHECK(optic_flow_pub_[i]);
if(optic_flow_pub_[i]->getNumSubscribers() == 0)
{
continue;
}
static const Duration min_time_interval_between_published_optic_flows_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_optic_flow_rate);
if(last_published_optic_flow_time_ > 0 && t - last_published_optic_flow_time_ < min_time_interval_between_published_optic_flows_)
{
return;
}
if(optic_flows[i])
{
esim_msgs::OpticFlow::Ptr msg;
msg.reset(new esim_msgs::OpticFlow);
opticFlowToMsg(*optic_flows[i], t, msg);
optic_flow_pub_[i]->publish(msg);
}
}
last_published_optic_flow_time_ = t;
}
void RosPublisher::eventsCallback(const EventsVector& events)
{
CHECK_EQ(event_pub_.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i)
{
if(sensor_sizes_[i].width == 0 || sensor_sizes_[i].height == 0)
{
continue;
}
if(events[i].empty())
{
continue;
}
CHECK(event_pub_[i]);
if(event_pub_[i]->getNumSubscribers() == 0)
{
continue;
}
dvs_msgs::EventArrayPtr msg;
msg.reset(new dvs_msgs::EventArray);
eventsToMsg(events[i], sensor_sizes_[i].width, sensor_sizes_[i].height, msg);
event_pub_[i]->publish(msg);
}
}
void RosPublisher::poseCallback(const Transformation& T_W_B,
const TransformationVector& T_W_Cs,
Time t)
{
if(T_W_Cs.size() != num_cameras_)
{
LOG(WARNING) << "Number of poses is different than number of cameras."
<< "Will not output poses.";
return;
}
// Publish to tf
tf::StampedTransform bt;
bt.child_frame_id_ = "body";
bt.frame_id_ = "map";
bt.stamp_ = toRosTime(t);
tf::transformKindrToTF(T_W_B, &bt);
tf_broadcaster_->sendTransform(bt);
for(size_t i=0; i<num_cameras_; ++i)
{
std::stringstream ss;
ss << "cam" << i;
tf::StampedTransform bt;
bt.child_frame_id_ = ss.str();
bt.frame_id_ = "map";
bt.stamp_ = toRosTime(t);
tf::transformKindrToTF(T_W_Cs[i], &bt);
tf_broadcaster_->sendTransform(bt);
}
// Publish pose message
geometry_msgs::PoseStamped pose_stamped_msg;
tf::poseStampedKindrToMsg(T_W_B, toRosTime(t), "map", &pose_stamped_msg);
pose_pub_->publish(pose_stamped_msg);
}
void RosPublisher::twistCallback(const AngularVelocityVector &ws, const LinearVelocityVector &vs, Time t)
{
if(ws.size() != num_cameras_
|| vs.size() != num_cameras_)
{
LOG(WARNING) << "Number of twists is different than number of cameras."
<< "Will not output twists.";
return;
}
CHECK_EQ(ws.size(), num_cameras_);
CHECK_EQ(vs.size(), num_cameras_);
CHECK_EQ(twist_pub_.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i)
{
CHECK(twist_pub_[i]);
if(twist_pub_[i]->getNumSubscribers() == 0)
{
continue;
}
const geometry_msgs::TwistStamped msg = twistToMsg(ws[i], vs[i], t);
twist_pub_[i]->publish(msg);
}
}
void RosPublisher::imuCallback(const Vector3& acc, const Vector3& gyr, Time t)
{
if(imu_pub_->getNumSubscribers() == 0)
{
return;
}
const sensor_msgs::Imu msg = imuToMsg(acc, gyr, t);
imu_pub_->publish(msg);
}
void RosPublisher::cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t)
{
CHECK(camera_rig);
CHECK_EQ(camera_rig->size(), num_cameras_);
static const Duration min_time_interval_between_published_camera_info_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_camera_info_rate);
if(last_published_camera_info_time_ > 0 && t - last_published_camera_info_time_ < min_time_interval_between_published_camera_info_)
{
return;
}
for(size_t i=0; i<num_cameras_; ++i)
{
CHECK(camera_info_pub_[i]);
if(camera_info_pub_[i]->getNumSubscribers() == 0)
{
continue;
}
sensor_msgs::CameraInfoPtr msg;
msg.reset(new sensor_msgs::CameraInfo);
cameraToMsg(camera_rig->atShared(i), t, msg);
camera_info_pub_[i]->publish(msg);
}
last_published_camera_info_time_ = t;
}
} // namespace event_camera_simulator

View File

@ -0,0 +1,169 @@
#include <esim/visualization/ros_utils.hpp>
#include <esim/common/utils.hpp>
#include <pcl_conversions/pcl_conversions.h>
#include <cv_bridge/cv_bridge.h>
namespace event_camera_simulator {
void pointCloudToMsg(const PointCloud& pointcloud, const std::string& frame_id, Time t, pcl::PointCloud<pcl::PointXYZRGB>::Ptr& msg)
{
CHECK(msg);
msg->header.frame_id = frame_id;
msg->height = pointcloud.size();
msg->width = 1;
for(auto& p_c : pointcloud)
{
pcl::PointXYZRGB p;
p.x = p_c.xyz(0);
p.y = p_c.xyz(1);
p.z = p_c.xyz(2);
p.r = p_c.rgb(0);
p.g = p_c.rgb(1);
p.b = p_c.rgb(2);
msg->points.push_back(p);
}
pcl_conversions::toPCL(toRosTime(t), msg->header.stamp);
}
void imageToMsg(const Image& image, Time t, sensor_msgs::ImagePtr& msg)
{
cv_bridge::CvImage cv_image;
image.convertTo(cv_image.image, CV_8U, 255.0);
cv_image.encoding = "mono8";
cv_image.header.stamp = toRosTime(t);
msg = cv_image.toImageMsg();
}
void depthmapToMsg(const Depthmap& depthmap, Time t, sensor_msgs::ImagePtr& msg)
{
cv_bridge::CvImage cv_depthmap;
depthmap.convertTo(cv_depthmap.image, CV_32FC1);
cv_depthmap.encoding = "32FC1";
cv_depthmap.header.stamp = toRosTime(t);
msg = cv_depthmap.toImageMsg();
}
void opticFlowToMsg(const OpticFlow& flow, Time t, esim_msgs::OpticFlowPtr& msg)
{
CHECK(msg);
msg->header.stamp = toRosTime(t);
const int height = flow.rows;
const int width = flow.cols;
msg->height = height;
msg->width = width;
msg->flow_x.resize(height * width);
msg->flow_y.resize(height * width);
for(int y=0; y<height; ++y)
{
for(int x=0; x<width; ++x)
{
msg->flow_x[x + y * width] = static_cast<float>(flow(y,x)[0]);
msg->flow_y[x + y * width] = static_cast<float>(flow(y,x)[1]);
}
}
}
void eventsToMsg(const Events& events, int width, int height, dvs_msgs::EventArrayPtr& msg)
{
CHECK(msg);
std::vector<dvs_msgs::Event> events_list;
for(const Event& e : events)
{
dvs_msgs::Event ev;
ev.x = e.x;
ev.y = e.y;
ev.ts = toRosTime(e.t);
ev.polarity = e.pol;
events_list.push_back(ev);
}
msg->events = events_list;
msg->height = height;
msg->width = width;
msg->header.stamp = events_list.back().ts;
}
sensor_msgs::Imu imuToMsg(const Vector3& acc, const Vector3& gyr, Time t)
{
sensor_msgs::Imu imu;
imu.header.stamp = toRosTime(t);
imu.linear_acceleration.x = acc(0);
imu.linear_acceleration.y = acc(1);
imu.linear_acceleration.z = acc(2);
imu.angular_velocity.x = gyr(0);
imu.angular_velocity.y = gyr(1);
imu.angular_velocity.z = gyr(2);
return imu;
}
geometry_msgs::TwistStamped twistToMsg(const AngularVelocity& w, const LinearVelocity& v, Time t)
{
geometry_msgs::TwistStamped twist;
twist.header.stamp = toRosTime(t);
twist.twist.angular.x = w(0);
twist.twist.angular.y = w(1);
twist.twist.angular.z = w(2);
twist.twist.linear.x = v(0);
twist.twist.linear.y = v(1);
twist.twist.linear.z = v(2);
return twist;
}
void cameraToMsg(const ze::Camera::Ptr& camera, Time t, sensor_msgs::CameraInfoPtr& msg)
{
CHECK(msg);
msg->width = camera->width();
msg->height = camera->height();
msg->header.stamp = toRosTime(t);
CalibrationMatrix K = calibrationMatrixFromCamera(camera);
boost::array<double, 9> K_vec;
std::vector<double> D_vec;
for(int i=0; i<3; ++i)
{
for(int j=0; j<3; ++j)
{
K_vec[j+i*3] = static_cast<double>(K(i,j));
}
}
switch(camera->type())
{
case ze::CameraType::PinholeRadialTangential:
case ze::CameraType::Pinhole:
msg->distortion_model = "plumb_bob";
break;
case ze::CameraType::PinholeEquidistant:
msg->distortion_model = "equidistant";
break;
case ze::CameraType::PinholeFov:
msg->distortion_model = "fov";
break;
default:
LOG(WARNING) << "Unknown camera distortion model";
msg->distortion_model = "";
break;
}
for(int j=0; j<camera->distortionParameters().rows(); ++j)
{
D_vec.push_back(static_cast<double>(camera->distortionParameters()(j))); // @TODO: use the distortion params from the camera
}
msg->K = K_vec;
msg->D = D_vec;
// TODO: Add R and P
}
} // namespace event_camera_simulator

View File

@ -0,0 +1,308 @@
#include <esim/visualization/rosbag_writer.hpp>
#include <esim/common/utils.hpp>
#include <ze/common/time_conversions.hpp>
#include <esim/visualization/ros_utils.hpp>
#include <minkindr_conversions/kindr_msg.h>
#include <minkindr_conversions/kindr_tf.h>
#include <tf/tfMessage.h>
#include <gflags/gflags.h>
#include <glog/logging.h>
DECLARE_double(ros_publisher_camera_info_rate);
DECLARE_double(ros_publisher_frame_rate);
DECLARE_double(ros_publisher_depth_rate);
DECLARE_double(ros_publisher_pointcloud_rate);
DECLARE_double(ros_publisher_optic_flow_rate);
DEFINE_string(path_to_output_bag, "",
"Path to which save the output bag file.");
namespace event_camera_simulator {
RosbagWriter::RosbagWriter(const std::string& path_to_output_bag, size_t num_cameras)
{
CHECK_GE(num_cameras, 1);
num_cameras_ = num_cameras;
sensor_sizes_ = std::vector<cv::Size>(num_cameras_);
try
{
bag_.open(path_to_output_bag, rosbag::bagmode::Write);
}
catch(rosbag::BagIOException e)
{
LOG(FATAL) << "Error: could not open rosbag: " << FLAGS_path_to_output_bag << std::endl;
return;
}
LOG(INFO) << "Will write to bag: " << path_to_output_bag;
last_published_camera_info_time_ = 0;
last_published_image_time_ = 0;
last_published_corrupted_image_time_ = 0;
last_published_depthmap_time_ = 0;
last_published_optic_flow_time_ = 0;
last_published_pointcloud_time_ = 0;
}
Publisher::Ptr RosbagWriter::createBagWriterFromGflags(size_t num_cameras)
{
if(FLAGS_path_to_output_bag == "")
{
LOG(INFO) << "Empty output bag string: will not write to rosbag";
return nullptr;
}
return std::make_shared<RosbagWriter>(FLAGS_path_to_output_bag, num_cameras);
}
RosbagWriter::~RosbagWriter()
{
LOG(INFO) << "Finalizing the bag...";
bag_.close();
LOG(INFO) << "Finished writing to bag: " << FLAGS_path_to_output_bag;
}
void RosbagWriter::pointcloudCallback(const PointCloudVector& pointclouds, Time t)
{
CHECK_EQ(pointclouds.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i)
{
Duration min_time_interval_between_published_pointclouds_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_pointcloud_rate);
if(last_published_pointcloud_time_ > 0 && t - last_published_pointcloud_time_ < min_time_interval_between_published_pointclouds_)
{
return;
}
pcl::PointCloud<pcl::PointXYZRGB>::Ptr msg (new pcl::PointCloud<pcl::PointXYZRGB>);
std::stringstream ss; ss << "cam" << i;
pointCloudToMsg(pointclouds[i], ss.str(), t, msg);
bag_.write(getTopicName(topic_name_prefix_, i, "pointcloud"),
toRosTime(t), msg);
}
last_published_pointcloud_time_ = t;
}
void RosbagWriter::imageCallback(const ImagePtrVector& images, Time t)
{
for(size_t i=0; i<num_cameras_; ++i)
{
sensor_sizes_[i] = images[i]->size();
static const Duration min_time_interval_between_published_images_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_frame_rate);
if(last_published_image_time_ > 0 && t - last_published_image_time_ < min_time_interval_between_published_images_)
{
return;
}
if(images[i])
{
sensor_msgs::ImagePtr msg;
imageToMsg(*images[i], t, msg);
bag_.write(getTopicName(topic_name_prefix_, i, "image_raw"),
msg->header.stamp, msg);
}
}
last_published_image_time_ = t;
}
void RosbagWriter::imageCorruptedCallback(const ImagePtrVector& images_corrupted, Time t)
{
for(size_t i=0; i<num_cameras_; ++i)
{
static const Duration min_time_interval_between_published_images_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_frame_rate);
if(last_published_corrupted_image_time_ > 0 && t - last_published_corrupted_image_time_ < min_time_interval_between_published_images_)
{
return;
}
if(images_corrupted[i])
{
sensor_msgs::ImagePtr msg;
imageToMsg(*images_corrupted[i], t, msg);
bag_.write(getTopicName(topic_name_prefix_, i, "image_corrupted"),
msg->header.stamp, msg);
}
}
last_published_corrupted_image_time_ = t;
}
void RosbagWriter::depthmapCallback(const DepthmapPtrVector& depthmaps, Time t)
{
if(depthmaps.size() != num_cameras_)
{
return;
}
for(size_t i=0; i<num_cameras_; ++i)
{
static const Duration min_time_interval_between_published_depthmaps_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_depth_rate);
if(last_published_depthmap_time_ > 0 && t - last_published_depthmap_time_ < min_time_interval_between_published_depthmaps_)
{
return;
}
if(depthmaps[i])
{
sensor_msgs::ImagePtr msg;
depthmapToMsg(*depthmaps[i], t, msg);
bag_.write(getTopicName(topic_name_prefix_, i, "depthmap"),
msg->header.stamp, msg);
}
}
last_published_depthmap_time_ = t;
}
void RosbagWriter::opticFlowCallback(const OpticFlowPtrVector& optic_flows, Time t)
{
if(optic_flows.size() != num_cameras_)
{
return;
}
for(size_t i=0; i<num_cameras_; ++i)
{
static const Duration min_time_interval_between_published_optic_flows_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_optic_flow_rate);
if(last_published_optic_flow_time_ > 0 && t - last_published_optic_flow_time_ < min_time_interval_between_published_optic_flows_)
{
return;
}
if(optic_flows[i])
{
esim_msgs::OpticFlow::Ptr msg;
msg.reset(new esim_msgs::OpticFlow);
opticFlowToMsg(*optic_flows[i], t, msg);
bag_.write(getTopicName(topic_name_prefix_, i, "optic_flow"),
msg->header.stamp, msg);
}
}
last_published_optic_flow_time_ = t;
}
void RosbagWriter::eventsCallback(const EventsVector& events)
{
for(size_t i=0; i<num_cameras_; ++i)
{
if(sensor_sizes_[i].width == 0 || sensor_sizes_[i].height == 0)
{
continue;
}
if(events[i].empty())
{
continue;
}
dvs_msgs::EventArrayPtr msg;
msg.reset(new dvs_msgs::EventArray);
eventsToMsg(events[i], sensor_sizes_[i].width, sensor_sizes_[i].height, msg);
bag_.write(getTopicName(topic_name_prefix_, i, "events"),
msg->header.stamp, msg);
}
}
void RosbagWriter::poseCallback(const Transformation& T_W_B,
const TransformationVector& T_W_Cs,
Time t)
{
if(T_W_Cs.size() != num_cameras_)
{
LOG(WARNING) << "Number of poses is different than number of cameras."
<< "Will not output poses.";
return;
}
geometry_msgs::PoseStamped pose_stamped_msg;
geometry_msgs::TransformStamped transform_stamped_msg;
transform_stamped_msg.header.frame_id = "map";
transform_stamped_msg.header.stamp = toRosTime(t);
tf::tfMessage tf_msg;
for(size_t i=0; i<num_cameras_; ++i)
{
// Write pose to bag
tf::poseStampedKindrToMsg(T_W_Cs[i], toRosTime(t), "map", &pose_stamped_msg);
bag_.write(getTopicName(topic_name_prefix_, i, "pose"),
toRosTime(t), pose_stamped_msg);
// Write tf transform to bag
std::stringstream ss; ss << "cam" << i;
transform_stamped_msg.child_frame_id = ss.str();
tf::transformKindrToMsg(T_W_Cs[i], &transform_stamped_msg.transform);
tf_msg.transforms.push_back(transform_stamped_msg);
}
transform_stamped_msg.child_frame_id = "body";
tf::transformKindrToMsg(T_W_B, &transform_stamped_msg.transform);
tf_msg.transforms.push_back(transform_stamped_msg);
bag_.write("/tf", toRosTime(t), tf_msg);
}
void RosbagWriter::twistCallback(const AngularVelocityVector &ws, const LinearVelocityVector &vs, Time t)
{
if(ws.size() != num_cameras_
|| vs.size() != num_cameras_)
{
LOG(WARNING) << "Number of twists is different than number of cameras."
<< "Will not output twists.";
return;
}
CHECK_EQ(ws.size(), num_cameras_);
CHECK_EQ(vs.size(), num_cameras_);
for(size_t i=0; i<num_cameras_; ++i)
{
const geometry_msgs::TwistStamped msg = twistToMsg(ws[i], vs[i], t);
bag_.write(getTopicName(topic_name_prefix_, i, "twist"),
msg.header.stamp, msg);
}
}
void RosbagWriter::imuCallback(const Vector3& acc, const Vector3& gyr, Time t)
{
VLOG_EVERY_N(1, 500) << "t = " << ze::nanosecToSecTrunc(t) << " s";
const sensor_msgs::Imu msg = imuToMsg(acc, gyr, t);
const std::string imu_topic = "/imu";
bag_.write(imu_topic,
msg.header.stamp, msg);
}
void RosbagWriter::cameraInfoCallback(const ze::CameraRig::Ptr& camera_rig, Time t)
{
CHECK(camera_rig);
CHECK_EQ(camera_rig->size(), num_cameras_);
static const Duration min_time_interval_between_published_camera_info_
= ze::secToNanosec(1.0 / FLAGS_ros_publisher_camera_info_rate);
if(last_published_camera_info_time_ > 0 && t - last_published_camera_info_time_ < min_time_interval_between_published_camera_info_)
{
return;
}
for(size_t i=0; i<num_cameras_; ++i)
{
sensor_msgs::CameraInfoPtr msg;
msg.reset(new sensor_msgs::CameraInfo);
cameraToMsg(camera_rig->atShared(i), t, msg);
bag_.write(getTopicName(topic_name_prefix_, i, "camera_info"),
msg->header.stamp, msg);
}
last_published_camera_info_time_ = t;
}
} // namespace event_camera_simulator

View File

@ -0,0 +1,107 @@
#include <esim/visualization/synthetic_optic_flow_publisher.hpp>
#include <esim/common/utils.hpp>
#include <ze/common/path_utils.hpp>
#include <ze/common/file_utils.hpp>
#include <ze/common/time_conversions.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <gflags/gflags.h>
#include <glog/logging.h>
DEFINE_string(synthetic_optic_flow_output_folder, "",
"Folder in which to output the events.");
namespace event_camera_simulator {
/**
* This publisher was designed with the purpose of generating simulation data
* with ground truth labels, for the task of optic flow estimation.
*
* It assumes that it will receive a relatively small sequence of events (corresponding, for example,
* to all the events in between two frames), and will write all the events to disk in its destructor,
* in three forms:
* - an "events.txt" file that contains all the events in "t x y pol" format (one event per line)
* - an "event_count.png" image that whose first two channels contain the counts of the positive (resp. negative) event counts at each pixel
* - two "timestamps images" in which each pixel contains the timestamp at the last event that fell on the pixel.
* (since the timestamp is a floating point value, it is split in 3 8-bit values so that the timestamp images
* can be saved in a single 3-channel image).
*/
SyntheticOpticFlowPublisher::SyntheticOpticFlowPublisher(const std::string& output_folder)
: output_folder_(output_folder)
{
ze::openOutputFileStream(ze::joinPath(output_folder, "events.txt"),
&events_file_);
}
Publisher::Ptr SyntheticOpticFlowPublisher::createFromGflags()
{
if(FLAGS_synthetic_optic_flow_output_folder == "")
{
LOG(WARNING) << "Empty output folder string: will not write synthetic optic flow files";
return nullptr;
}
return std::make_shared<SyntheticOpticFlowPublisher>(FLAGS_synthetic_optic_flow_output_folder);
}
SyntheticOpticFlowPublisher::~SyntheticOpticFlowPublisher()
{
// Create an event count image using all the events collected
cv::Mat event_count_image = cv::Mat::zeros(sensor_size_, CV_8UC3);
// Create two event timestamps images using all the events collected
cv::Mat timestamps_pos = cv::Mat::zeros(sensor_size_, CV_8UC3);
cv::Mat timestamps_neg = cv::Mat::zeros(sensor_size_, CV_8UC3);
int remapped_timestamp_fraction;
double timestamp_fraction;
for(Event e : events_)
{
event_count_image.at<cv::Vec3b>(e.y,e.x)[int(e.pol)]++;
cv::Mat& curr_timestamp_image = e.pol ? timestamps_pos : timestamps_neg;
// remap value
timestamp_fraction = double(e.t - events_[0].t) / (events_[events_.size()-1].t - events_[0].t);
remapped_timestamp_fraction = timestamp_fraction * std::pow(2,24); // remap 0-1 to 0 - 2^24
// distribute the 24 bit number (remapped_timestamp_fraction) to 3 channel 8 bit image
for (int i=0; i<3; i++)
{
curr_timestamp_image.at<cv::Vec3b>(e.y,e.x)[i] = (int) remapped_timestamp_fraction & 0xFF; // bit mask of 0000 0000 0000 0000 1111 1111
remapped_timestamp_fraction = remapped_timestamp_fraction >> 8; // shifts bits to right by 8
}
}
// Write event count image + the two timestamps images to disk
std::string path_event_count_image = ze::joinPath(output_folder_, "event_count.png");
std::string path_timestamps_pos = ze::joinPath(output_folder_, "event_time_stamps_pos.png");
std::string path_timestamps_neg = ze::joinPath(output_folder_, "event_time_stamps_neg.png");
std::vector<int> compression_params;
compression_params.push_back(CV_IMWRITE_PNG_COMPRESSION);
compression_params.push_back(0);
cv::imwrite(path_event_count_image, event_count_image, compression_params);
cv::imwrite(path_timestamps_pos, timestamps_pos, compression_params);
cv::imwrite(path_timestamps_neg, timestamps_neg, compression_params);
// Finish writing event file
events_file_.close();
}
void SyntheticOpticFlowPublisher::eventsCallback(const EventsVector& events)
{
CHECK_EQ(events.size(), 1);
// Simply aggregate the events into the events_ buffer.
// At the destruction of this object, everything will be saved to disk.
for(const Event& e : events[0])
{
events_file_ << e.t << " " << e.x << " " << e.y << " " << (e.pol? 1 : 0) << std::endl;
events_.push_back(e);
}
}
} // namespace event_camera_simulator