Carma-platform v4.2.0
CARMA Platform is built on robot operating system (ROS) and utilizes open source software (OSS) that enables Cooperative Driving Automation (CDA) features to allow Automated Driving Systems to interact and cooperate with infrastructure and other vehicles through communication.
motion_computation_worker.cpp
Go to the documentation of this file.
1// Copyright 2019-2023 Leidos
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include <wgs84_utils/proj_tools.h>
16
17#include <memory>
18#include <string>
19
22
23namespace motion_computation
24{
26 const PublishObjectCallback & obj_pub,
27 rclcpp::node_interfaces::NodeLoggingInterface::SharedPtr logger,
28 rclcpp::node_interfaces::NodeClockInterface::SharedPtr node_clock)
29: obj_pub_(obj_pub), logger_(logger), node_clock_(node_clock)
30{
31}
32
34 carma_perception_msgs::msg::ExternalObjectList::UniquePtr obj_list)
35{
36 carma_perception_msgs::msg::ExternalObjectList sensor_list;
37 sensor_list.header = obj_list->header;
38
39 if (!obj_list || obj_list->objects.empty()) {
40 return;
41 }
42
43 for (auto obj : obj_list->objects) {
44 // Header contains the frame rest of the fields will use
45 // obj.header = obj_list.objects[i].header;
46
47 // Object id. Matching ids on a topic should refer to the same object within
48 // some time period, expanded
49 // obj.id = obj_list.objects[i].id;
50
51 // Update the object type and generate predictions using CV or CTRV vehicle models.
52 // If the object is a bicycle or motor vehicle use CTRV otherwise use CV.
53
54 bool use_ctrv_model;
55
56 if (obj.object_type == obj.UNKNOWN) {
57 use_ctrv_model = enable_ctrv_for_unknown_obj_;
58 } else if (obj.object_type == obj.MOTORCYCLE) {
59 use_ctrv_model = enable_ctrv_for_motorcycle_obj_;
60 } else if (obj.object_type == obj.SMALL_VEHICLE) {
62 } else if (obj.object_type == obj.LARGE_VEHICLE) {
64 } else if (obj.object_type == obj.PEDESTRIAN) {
65 use_ctrv_model = enable_ctrv_for_pedestrian_obj_;
66 } else {
67 obj.object_type = obj.UNKNOWN;
68 use_ctrv_model = enable_ctrv_for_unknown_obj_;
69 } // end if-else
70
71 if (use_ctrv_model == true) {
72 obj.predictions = motion_predict::ctrv::predictPeriod(
75 } else {
76 obj.predictions = motion_predict::cv::predictPeriod(
79 }
80
81 sensor_list.objects.emplace_back(obj);
82 } // end for-loop
83
84 // Synchronize all data to the current sensor data timestamp
85 carma_perception_msgs::msg::ExternalObjectList synchronization_base_objects;
86 synchronization_base_objects.header =
87 sensor_list.header; // Use the current sensing stamp as the sync point even if sensor data is
88 // not used
89
91 // If using sensor data add it to the base synchronization list since it
92 // already is at the desired time
93
94 synchronization_base_objects.objects = sensor_list.objects;
95
97 // Since we use the new sensor data as the sync point we will still be
98 // synchronizing but to an empty list
99 synchronization_base_objects.objects.clear();
100
101 } else {
102 RCLCPP_WARN_STREAM(
103 logger_->get_logger(),
104 "Not configured to publish any data publishing empty "
105 "object list. Operating like this is NOT "
106 "advised.");
107
108 obj_pub_(synchronization_base_objects);
109 bsm_list_.objects.clear();
110 bsm_obj_id_map_.clear();
111 psm_list_.objects.clear();
112 psm_obj_id_map_.clear();
113 mobility_path_list_.objects.clear();
115
116 return;
117 }
118
119 // Start synchronizing all the enabled data streams
121 synchronization_base_objects = synchronizeAndAppend(synchronization_base_objects, bsm_list_);
122 }
123
125 synchronization_base_objects = synchronizeAndAppend(synchronization_base_objects, psm_list_);
126 }
127
129 synchronization_base_objects =
130 synchronizeAndAppend(synchronization_base_objects, mobility_path_list_);
131 }
132
133 obj_pub_(synchronization_base_objects);
134 // Clear msg queue since it is published
135 mobility_path_list_.objects.clear();
137 bsm_list_.objects.clear();
138 bsm_obj_id_map_.clear();
139 psm_list_.objects.clear();
140 psm_obj_id_map_.clear();
141}
142
143void MotionComputationWorker::georeferenceCallback(const std_msgs::msg::String::UniquePtr msg)
144{
145 // Build projector from proj string
146 if (georeference_ != msg->data) {
147 georeference_ = msg->data;
148 map_projector_ = std::make_shared<lanelet::projection::LocalFrameProjector>(msg->data.c_str());
149
150 std::string axis =
151 wgs84_utils::proj_tools::getAxisFromProjString(msg->data); // Extract axis for orientation
152
153 RCLCPP_INFO_STREAM(logger_->get_logger(), "Extracted Axis: " << axis);
154
156 wgs84_utils::proj_tools::getRotationOfNEDFromProjAxis(axis); // Extract map rotation
157
158 RCLCPP_DEBUG_STREAM(
159 logger_->get_logger(), "Extracted NED in Map Rotation (x,y,z,w) : ( "
160 << ned_in_map_rotation_.getX() << ", " << ned_in_map_rotation_.getY()
161 << ", " << ned_in_map_rotation_.getZ() << ", "
162 << ned_in_map_rotation_.getW());
163 }
164}
165
167{
168 prediction_time_step_ = time_step;
169}
170
172
174
176
178{
180}
181
183{
185}
186
188 bool enable_sensor_processing, bool enable_bsm_processing, bool enable_psm_processing,
189 bool enable_mobility_path_processing)
190{
191 enable_sensor_processing_ = enable_sensor_processing;
192 enable_bsm_processing_ = enable_bsm_processing;
193 enable_psm_processing_ = enable_psm_processing;
194 enable_mobility_path_processing_ = enable_mobility_path_processing;
195}
196
198 bool enable_ctrv_for_unknown_obj, bool enable_ctrv_for_motorcycle_obj,
199 bool enable_ctrv_for_small_vehicle_obj, bool enable_ctrv_for_large_vehicle_obj,
200 bool enable_ctrv_for_pedestrian_obj)
201{
202 enable_ctrv_for_unknown_obj_ = enable_ctrv_for_unknown_obj;
203 enable_ctrv_for_motorcycle_obj_ = enable_ctrv_for_motorcycle_obj;
204 enable_ctrv_for_small_vehicle_obj_ = enable_ctrv_for_small_vehicle_obj;
205 enable_ctrv_for_large_vehicle_obj_ = enable_ctrv_for_large_vehicle_obj;
206 enable_ctrv_for_pedestrian_obj_ = enable_ctrv_for_pedestrian_obj;
207}
208
210 const carma_v2x_msgs::msg::MobilityPath::UniquePtr msg)
211{
212 if (!map_projector_) {
213 RCLCPP_ERROR(
214 logger_->get_logger(), "Map projection not available yet so ignoring MobilityPath messages");
215 return;
216 }
217
219 RCLCPP_WARN(
220 logger_->get_logger(),
221 "enable_mobility_path_processing is false so ignoring MobilityPath messages");
222 return;
223 }
224
225 carma_perception_msgs::msg::ExternalObject obj_msg;
226 conversion::convert(*msg, obj_msg, *map_projector_);
227
228 // Check if this mobility path is from an object already being queded.
229 // If so then update the existing object, if not add it to the queue
230 if (mobility_path_obj_id_map_.find(obj_msg.id) != mobility_path_obj_id_map_.end()) {
231 mobility_path_list_.objects[mobility_path_obj_id_map_[obj_msg.id]] = obj_msg;
232
233 } else {
234 // Add the new object to the queue and save its index
235 mobility_path_obj_id_map_[obj_msg.id] = mobility_path_list_.objects.size();
236 mobility_path_list_.objects.push_back(obj_msg);
237 }
238}
239
240void MotionComputationWorker::psmCallback(const carma_v2x_msgs::msg::PSM::UniquePtr msg)
241{
242 if (!map_projector_) {
243 RCLCPP_DEBUG_STREAM(
244 logger_->get_logger(), "Map projection not available yet so ignoring PSM messages");
245 return;
246 }
247
249 RCLCPP_DEBUG_STREAM(
250 logger_->get_logger(), "enable_psm_processing is false so ignoring PSM messages");
251 return;
252 }
253
254 carma_perception_msgs::msg::ExternalObject obj_msg;
258
259 // Check if this psm is from an object already being queded.
260 // If so then update the existing object, if not add it to the queue
261 if (psm_obj_id_map_.find(obj_msg.id) != psm_obj_id_map_.end()) {
262 psm_list_.objects[psm_obj_id_map_[obj_msg.id]] = obj_msg;
263
264 } else {
265 // Add the new object to the queue and save its index
266 psm_obj_id_map_[obj_msg.id] = psm_list_.objects.size();
267 psm_list_.objects.push_back(obj_msg);
268 }
269}
270
271void MotionComputationWorker::bsmCallback(const carma_v2x_msgs::msg::BSM::UniquePtr msg)
272{
273 if (!map_projector_) {
274 RCLCPP_DEBUG_STREAM(
275 logger_->get_logger(), "Map projection not available yet so ignoring PSM messages");
276 return;
277 }
278
280 RCLCPP_DEBUG_STREAM(
281 logger_->get_logger(), "enable_bsm_processing is false so ignoring BSM messages");
282 return;
283 }
284
285 carma_perception_msgs::msg::ExternalObject obj_msg;
289
290 // Check if this bsm is from an object already being queded.
291 // If so then update the existing object, if not add it to the queue
292 if (bsm_obj_id_map_.find(obj_msg.id) != bsm_obj_id_map_.end()) {
293 bsm_list_.objects[bsm_obj_id_map_[obj_msg.id]] = obj_msg;
294
295 } else {
296 // Add the new object to the queue and save its index
297 bsm_obj_id_map_[obj_msg.id] = bsm_list_.objects.size();
298 bsm_list_.objects.push_back(obj_msg);
299 }
300}
301
302carma_perception_msgs::msg::ExternalObjectList MotionComputationWorker::synchronizeAndAppend(
303 const carma_perception_msgs::msg::ExternalObjectList & base_objects,
304 carma_perception_msgs::msg::ExternalObjectList new_objects) const
305{
306 carma_perception_msgs::msg::ExternalObjectList output_list;
307 output_list.header = base_objects.header;
308 output_list.objects.reserve(base_objects.objects.size() + new_objects.objects.size());
309
310 // Compare time_stamps of first elements of each list as they are guaranteed to be the
311 // earliest of the respective lists
312
313 for (auto & obj : new_objects.objects) {
314 // interpolate and match timesteps
315 obj = matchAndInterpolateTimeStamp(obj, rclcpp::Time(base_objects.header.stamp));
316 }
317
318 output_list.objects.insert(
319 output_list.objects.begin(), base_objects.objects.begin(), base_objects.objects.end());
320 output_list.objects.insert(
321 output_list.objects.end(), new_objects.objects.begin(), new_objects.objects.end());
322 return output_list;
323}
324
325carma_perception_msgs::msg::ExternalObject MotionComputationWorker::matchAndInterpolateTimeStamp(
326 carma_perception_msgs::msg::ExternalObject path, const rclcpp::Time & time_to_match) const
327{
328 carma_perception_msgs::msg::ExternalObject output = path;
329 // empty predictions
330 output.predictions = {};
331
332 // add the first point to start of the predictions to easily loop over
333 carma_perception_msgs::msg::PredictedState prev_state;
334 prev_state.header.stamp = output.header.stamp;
335 prev_state.predicted_position.orientation = output.pose.pose.orientation;
336 prev_state.predicted_velocity = output.velocity.twist;
337 prev_state.predicted_position.position.x = output.pose.pose.position.x;
338 prev_state.predicted_position.position.y = output.pose.pose.position.y;
339 prev_state.predicted_position.position.z = output.pose.pose.position.z;
340 path.predictions.insert(path.predictions.begin(), prev_state);
341
342 rclcpp::Time curr_time_to_match = time_to_match;
343
344 // because of this logic, we would not encounter mobility path
345 // that starts later than the time we are trying to match (which is starting
346 // time of sensed objects)
347 bool is_first_point = true;
348 carma_perception_msgs::msg::PredictedState new_state;
349 for (auto const & curr_state : path.predictions) {
350 if (curr_time_to_match > curr_state.header.stamp) {
351 prev_state = curr_state;
352 continue;
353 }
354
355 if (is_first_point) // we store in the body if it is the first point, not predictions
356 {
357 // reaching here means curr_state starts later than the time we are trying to match
358 // copy old unchanged parts
359 new_state.header.stamp = curr_time_to_match;
360 new_state.predicted_velocity = prev_state.predicted_velocity;
361 new_state.predicted_position.orientation = prev_state.predicted_position.orientation;
362
363 // interpolate position
364 rclcpp::Duration delta_t = rclcpp::Time(curr_state.header.stamp) - curr_time_to_match;
365 rclcpp::Duration pred_delta_t =
366 rclcpp::Time(curr_state.header.stamp) - rclcpp::Time(prev_state.header.stamp);
367 double ratio;
368 if (pred_delta_t.seconds() < 0.00000001) { // Divide by zero check
369 // This can only happen if effectively all 3 points are on top of each other
370 // which is extremely unlikely
371 ratio = 0.0;
372 } else {
373 ratio = delta_t.seconds() / pred_delta_t.seconds();
374 }
375
376 double delta_x =
377 curr_state.predicted_position.position.x - prev_state.predicted_position.position.x;
378 double delta_y =
379 curr_state.predicted_position.position.y - prev_state.predicted_position.position.y;
380 double delta_z =
381 curr_state.predicted_position.position.z - prev_state.predicted_position.position.z;
382 // we are "stepping back in time" to match the position
383 new_state.predicted_position.position.x =
384 curr_state.predicted_position.position.x - delta_x * ratio;
385 new_state.predicted_position.position.y =
386 curr_state.predicted_position.position.y - delta_y * ratio;
387 new_state.predicted_position.position.z =
388 curr_state.predicted_position.position.z - delta_z * ratio;
389
390 output.header.stamp = curr_time_to_match;
391 output.pose.pose.orientation = new_state.predicted_position.orientation;
392 output.velocity.twist = new_state.predicted_velocity;
393 output.pose.pose.position.x = new_state.predicted_position.position.x;
394 output.pose.pose.position.y = new_state.predicted_position.position.y;
395 output.pose.pose.position.z = new_state.predicted_position.position.z;
396 is_first_point = false;
397
398 } else {
399 output.predictions.push_back(curr_state);
400 }
401 }
402
403 return output;
404}
405
406} // namespace motion_computation
carma_perception_msgs::msg::ExternalObjectList bsm_list_
void georeferenceCallback(const std_msgs::msg::String::UniquePtr msg)
Callback for map projection string to define lat/lon -> map conversion.
void predictionLogic(carma_perception_msgs::msg::ExternalObjectList::UniquePtr obj_list)
Function to populate duplicated detected objects along with their velocity, yaw, yaw_rate and static/...
carma_perception_msgs::msg::ExternalObjectList psm_list_
void setDetectionMotionModelFlags(bool enable_ctrv_for_unknown_obj, bool enable_ctrv_for_motorcycle_obj, bool enable_ctrv_for_small_vehicle_obj, bool enable_ctrv_for_large_vehicle_obj, bool enable_ctrv_for_pedestrian_obj)
carma_perception_msgs::msg::ExternalObjectList mobility_path_list_
carma_perception_msgs::msg::ExternalObjectList synchronizeAndAppend(const carma_perception_msgs::msg::ExternalObjectList &base_objects, carma_perception_msgs::msg::ExternalObjectList new_objects) const
Appends external objects list behind base_objects. This does not do sensor fusion....
void psmCallback(const carma_v2x_msgs::msg::PSM::UniquePtr msg)
std::unordered_map< uint32_t, size_t > bsm_obj_id_map_
void bsmCallback(const carma_v2x_msgs::msg::BSM::UniquePtr msg)
std::shared_ptr< lanelet::projection::LocalFrameProjector > map_projector_
rclcpp::node_interfaces::NodeLoggingInterface::SharedPtr logger_
std::unordered_map< uint32_t, size_t > psm_obj_id_map_
std::function< void(const carma_perception_msgs::msg::ExternalObjectList &)> PublishObjectCallback
void setDetectionInputFlags(bool enable_sensor_processing, bool enable_bsm_processing, bool enable_psm_processing, bool enable_mobility_path_processing)
rclcpp::node_interfaces::NodeClockInterface::SharedPtr node_clock_
void mobilityPathCallback(const carma_v2x_msgs::msg::MobilityPath::UniquePtr msg)
MotionComputationWorker(const PublishObjectCallback &obj_pub, rclcpp::node_interfaces::NodeLoggingInterface::SharedPtr logger, rclcpp::node_interfaces::NodeClockInterface::SharedPtr node_clock)
Constructor for MotionComputationWorker.
std::unordered_map< uint32_t, size_t > mobility_path_obj_id_map_
carma_perception_msgs::msg::ExternalObject matchAndInterpolateTimeStamp(carma_perception_msgs::msg::ExternalObject path, const rclcpp::Time &time_to_match) const
It cuts ExternalObject's prediction points before the time_to_match. And uses the average velocity in...
void convert(const carma_v2x_msgs::msg::PSM &in_msg, carma_perception_msgs::msg::ExternalObject &out_msg, const std::string &map_frame_id, double pred_period, double pred_step_size, const lanelet::projection::LocalFrameProjector &map_projector, const tf2::Quaternion &ned_in_map_rotation, rclcpp::node_interfaces::NodeClockInterface::SharedPtr node_clock)