Carma-platform v4.2.0
CARMA Platform is built on robot operating system (ROS) and utilizes open source software (OSS) that enables Cooperative Driving Automation (CDA) features to allow Automated Driving Systems to interact and cooperate with infrastructure and other vehicles through communication.
psm_to_external_object_convertor.cpp
Go to the documentation of this file.
1// Copyright 2019-2023 Leidos
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include <lanelet2_extension/regulatory_elements/CarmaTrafficSignal.h>
16#include <tf2/LinearMath/Transform.h>
17#include <tf2_geometry_msgs/tf2_geometry_msgs.h>
18#include <wgs84_utils/wgs84_utils.h>
19
20#include <algorithm>
21#include <string>
22#include <vector>
23
24#include <boost/date_time/posix_time/posix_time.hpp>
27#include <rclcpp/logger.hpp>
28#include <rclcpp/logging.hpp>
29#include <rclcpp/rclcpp.hpp>
30
31#include <carma_perception_msgs/msg/external_object.hpp>
32#include <carma_v2x_msgs/msg/psm.hpp>
33
34namespace motion_computation
35{
36namespace conversion
37{
39 const carma_v2x_msgs::msg::PSM & in_msg, carma_perception_msgs::msg::ExternalObject & out_msg,
40 const std::string & map_frame_id, double pred_period, double pred_step_size,
41
42 const lanelet::projection::LocalFrameProjector & map_projector,
43 const tf2::Quaternion & ned_in_map_rotation,
44 rclcpp::node_interfaces::NodeClockInterface::SharedPtr node_clock)
45{
47 out_msg.dynamic_obj = true; // If a PSM is sent then the object is dynamic
48 // since its a living thing
49 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::DYNAMIC_OBJ_PRESENCE;
50
52 // Generate a unique object id from the psm id
53 out_msg.id = 0;
54 for (int i = in_msg.id.id.size() - 1; i >= 0;
55 i--) { // using signed iterator to handle empty case
56 // each byte of the psm id gets placed in one byte of the object id.
57 // This should result in very large numbers which will be unlikely to
58 // conflict with standard detections
59 out_msg.id |= in_msg.id.id[i] << (8 * i);
60 }
61 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::ID_PRESENCE_VECTOR;
62
64 // Additionally, store the id in the bsm_id field
65 out_msg.bsm_id = in_msg.id.id;
66 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::BSM_ID_PRESENCE_VECTOR;
67
69 // Compute the timestamp
70
71 auto psm_timestamp = impl::get_psm_timestamp(in_msg, node_clock->get_clock());
72 out_msg.header.stamp = builtin_interfaces::msg::Time(psm_timestamp);
73 out_msg.header.frame_id = map_frame_id;
74
76 // Set the type
77
78 if (
79 in_msg.basic_type.type == j2735_v2x_msgs::msg::PersonalDeviceUserType::A_PEDESTRIAN ||
80 in_msg.basic_type.type == j2735_v2x_msgs::msg::PersonalDeviceUserType::A_PUBLIC_SAFETY_WORKER ||
81 in_msg.basic_type.type ==
82 j2735_v2x_msgs::msg::PersonalDeviceUserType::AN_ANIMAL) // Treat animals
83 // like people
84 // since we have
85 // no internal
86 // class for that
87 {
88 out_msg.object_type = carma_perception_msgs::msg::ExternalObject::PEDESTRIAN;
89
90 // Default pedestrian size
91 // Assume a
92 // ExternalObject dimensions are half actual size
93 // Here we assume 1.0, 1.0, 2.0
94 out_msg.size.x = 0.5;
95 out_msg.size.y = 0.5;
96 out_msg.size.z = 1.0;
97
98 } else if (
99 in_msg.basic_type.type == j2735_v2x_msgs::msg::PersonalDeviceUserType::A_PEDALCYCLIST) {
100 out_msg.object_type = carma_perception_msgs::msg::ExternalObject::
101 MOTORCYCLE; // Currently external object cannot represent bicycles,
102 // but motor cycle seems like the next best choice
103
104 // Default bicycle size
105 out_msg.size.x = 1.0;
106 out_msg.size.y = 0.5;
107 out_msg.size.z = 1.0;
108 } else {
109 out_msg.object_type = carma_perception_msgs::msg::ExternalObject::UNKNOWN;
110
111 // Default pedestrian size
112 out_msg.size.x = 0.5;
113 out_msg.size.y = 0.5;
114 out_msg.size.z = 1.0;
115 }
116 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::SIZE_PRESENCE_VECTOR;
117
119 // Set the velocity
120
121 out_msg.velocity.twist.linear.x = in_msg.speed.velocity;
122 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::VELOCITY_PRESENCE_VECTOR;
123 // NOTE: The velocity covariance is not provided in the PSM. In order to
124 // compute it you need at least two PSM messages
125 // Tracking and associating PSM messages would be an increase in
126 // complexity for this conversion which is not warranted without an
127 // existing use case for the velocity covariance. If a use case is
128 // presented for it, such an addition can be made at that time.
129
131
132 double largest_position_std = 0.0;
133 double lat_variance = 0.0;
134 double lon_variance = 0.0;
135 double heading_variance = 0.0; // Yaw variance is not available so mark as
136 // impossible perfect case
137 double position_confidence = 0.0; // Default will be 10% confidence. If the position accuracy is
138 // available then this value will be updated
139
140 // A standard deviation which is larger than the acceptable value to give
141 // 95% confidence interval on fitting the pedestrian within one 3.7m lane
142 constexpr double MAX_POSITION_STD = 1.85;
143
144 if (
145 (in_msg.accuracy.presence_vector |
146 carma_v2x_msgs::msg::PositionalAccuracy::ACCURACY_AVAILABLE) &&
147 (in_msg.accuracy.presence_vector |
148 carma_v2x_msgs::msg::PositionalAccuracy::ACCURACY_ORIENTATION_AVAILABLE)) {
149 // Both accuracies available
150
151 // Compute the position covariance
152 // For computing confidence we will use the largest provided standard
153 // deviation of position
154 largest_position_std = std::max(in_msg.accuracy.semi_major, in_msg.accuracy.semi_minor);
155 lat_variance = in_msg.accuracy.semi_minor * in_msg.accuracy.semi_minor;
156 lon_variance = in_msg.accuracy.semi_major * in_msg.accuracy.semi_major;
157 heading_variance = in_msg.accuracy.orientation * in_msg.accuracy.orientation;
158
159 // NOTE: ExternalObject.msg does not clearly define what is meant by
160 // position confidence
161 // Here we are providing a linear scale based on the positional accuracy
162 // where 0 confidence would denote A standard deviation which is larger
163 // than the acceptable value to give 95% confidence interval on fitting
164 // the pedestrian within one 3.7m lane
165 // Set the confidence
166 // Without a way of getting the velocity confidence from the PSM we will use
167 // the position confidence for both
168 out_msg.confidence = 1.0 - std::min(1.0, fabs(largest_position_std / MAX_POSITION_STD));
169 out_msg.presence_vector |=
170 carma_perception_msgs::msg::ExternalObject::CONFIDENCE_PRESENCE_VECTOR;
171 } else if (
172 in_msg.accuracy.presence_vector | carma_v2x_msgs::msg::PositionalAccuracy::ACCURACY_AVAILABLE) {
173 // Position accuracy available
174
175 largest_position_std = std::max(in_msg.accuracy.semi_major, in_msg.accuracy.semi_minor);
176 lat_variance = in_msg.accuracy.semi_minor * in_msg.accuracy.semi_minor;
177 lon_variance = in_msg.accuracy.semi_major * in_msg.accuracy.semi_major;
178 heading_variance = 1.0; // Yaw variance is not available so mark as
179 // impossible perfect case
180
181 // Same calculation as shown in above condition. See that for description
182 out_msg.confidence = 1.0 - std::min(1.0, fabs(largest_position_std / MAX_POSITION_STD));
183 out_msg.presence_vector |=
184 carma_perception_msgs::msg::ExternalObject::CONFIDENCE_PRESENCE_VECTOR;
185
186 } else if (
187 in_msg.accuracy.presence_vector |
188 carma_v2x_msgs::msg::PositionalAccuracy::ACCURACY_ORIENTATION_AVAILABLE) {
189 // Orientation accuracy available
190
191 lat_variance = 1.0;
192 lon_variance = 1.0;
193
194 heading_variance = in_msg.accuracy.orientation * in_msg.accuracy.orientation;
195 } else { // No accuracies available
196 lat_variance = 1.0;
197 lon_variance = 1.0;
198 heading_variance = 1.0;
199 }
200
202 // Compute the pose
203 lanelet::GPSPoint gps_point(
204 {in_msg.position.latitude, in_msg.position.longitude, in_msg.position.elevation});
205 out_msg.pose = impl::pose_from_gnss(
206 map_projector, ned_in_map_rotation, gps_point, in_msg.heading.heading, lat_variance,
207 lon_variance, heading_variance);
208 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::POSE_PRESENCE_VECTOR;
209
211 // Compute predictions
212 // For prediction, if the prediction is available we will sample it
213 // If not then assume linear motion
214
215 std::vector<geometry_msgs::msg::Pose> predicted_poses;
216
217 if (in_msg.presence_vector & carma_v2x_msgs::msg::PSM::HAS_PATH_PREDICTION) {
218 // Based on the vehicle frame used in j2735 positive should be to the right
219 // and negative to the left
220 predicted_poses = impl::sample_2d_path_from_radius(
221 out_msg.pose.pose, out_msg.velocity.twist.linear.x,
222 -in_msg.path_prediction.radius_of_curvature, pred_period, pred_step_size);
223 } else {
224 predicted_poses = impl::sample_2d_linear_motion(
225 out_msg.pose.pose, out_msg.velocity.twist.linear.x, pred_period, pred_step_size);
226 }
227
228 out_msg.predictions = impl::predicted_poses_to_predicted_state(
229 predicted_poses, out_msg.velocity.twist.linear.x, rclcpp::Time(out_msg.header.stamp),
230 rclcpp::Duration(pred_step_size * 1e9), map_frame_id, out_msg.confidence, out_msg.confidence);
231 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::PREDICTION_PRESENCE_VECTOR;
232}
233
234namespace impl
235{
236std::vector<geometry_msgs::msg::Pose> sample_2d_path_from_radius(
237 const geometry_msgs::msg::Pose & pose, double velocity, double radius_of_curvature, double period,
238 double step_size)
239{
240 std::vector<geometry_msgs::msg::Pose> output;
241 output.reserve((period / step_size) + 1);
242
243 tf2::Vector3 pose_in_map_translation(pose.position.x, pose.position.y, pose.position.z);
244 tf2::Quaternion pose_in_map_quat(
245 pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w);
246 tf2::Transform pose_in_map(pose_in_map_quat, pose_in_map_translation);
247
248 // The radius of curvature originates from the frame of the provided pose
249 // So the turning center is at (0, r)
250 double center_x_in_pose = 0;
251 double center_y_in_pose = radius_of_curvature;
252
253 double total_dt = 0;
254
255 while (total_dt < period) {
256 // Compute the 2d position and orientation in the Pose frame
257 total_dt += step_size;
258 double delta_arc_length = velocity * total_dt; // Assumes perfect point motion along curve
259
260 double turning_angle = delta_arc_length / radius_of_curvature;
261 double dx_from_center = radius_of_curvature * sin(turning_angle);
262 double dy_from_center = radius_of_curvature * cos(turning_angle);
263
264 double x = center_x_in_pose + dx_from_center;
265 double y = center_y_in_pose + dy_from_center;
266
267 tf2::Vector3 position(x, y, 0);
268
269 tf2::Quaternion quat;
270 quat.setRPY(0, 0, turning_angle);
271
272 // Convert the position and orientation in the pose frame to the map frame
273 tf2::Transform pose_to_sample(quat, position);
274 tf2::Transform map_to_sample = pose_in_map * pose_to_sample;
275
276 geometry_msgs::msg::Pose sample_pose;
277
278 sample_pose.position.x = map_to_sample.getOrigin().x();
279 sample_pose.position.y = map_to_sample.getOrigin().y();
280 sample_pose.position.z = pose.orientation.z; // Reuse the z position from the initial pose
281
282 sample_pose.orientation.x = map_to_sample.getRotation().x();
283 sample_pose.orientation.y = map_to_sample.getRotation().y();
284 sample_pose.orientation.z = map_to_sample.getRotation().z();
285 sample_pose.orientation.w = map_to_sample.getRotation().w();
286
287 output.emplace_back(sample_pose);
288 }
289
290 return output;
291}
292
293std::vector<geometry_msgs::msg::Pose> sample_2d_linear_motion(
294 const geometry_msgs::msg::Pose & pose, double velocity, double period, double step_size)
295{
296 std::vector<geometry_msgs::msg::Pose> output;
297 output.reserve((period / step_size) + 1);
298
299 tf2::Vector3 pose_in_map_translation(pose.position.x, pose.position.y, pose.position.z);
300 tf2::Quaternion pose_in_map_quat(
301 pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w);
302
303 tf2::Transform pose_in_map(pose_in_map_quat, pose_in_map_translation);
304
305 double total_dt = 0;
306
307 while (total_dt < period) {
308 // Compute the 2d position and orientation in the Pose frame
309 total_dt += step_size;
310 double dx_from_start = velocity * total_dt; // Assuming linear motion in pose frame
311
312 double x = pose.position.x + dx_from_start;
313
314 tf2::Vector3 position(x, 0, 0);
315
316 // Convert the position and orientation in the pose frame to the map frame
317 tf2::Transform pose_to_sample(tf2::Quaternion::getIdentity(), position);
318 tf2::Transform map_to_sample = pose_in_map * pose_to_sample;
319
320 geometry_msgs::msg::Pose sample_pose;
321
322 sample_pose.position.x = map_to_sample.getOrigin().x();
323 sample_pose.position.y = map_to_sample.getOrigin().y();
324 sample_pose.position.z = map_to_sample.getOrigin().z();
325
326 sample_pose.orientation.x = map_to_sample.getRotation().x();
327 sample_pose.orientation.y = map_to_sample.getRotation().y();
328 sample_pose.orientation.z = map_to_sample.getRotation().z();
329 sample_pose.orientation.w = map_to_sample.getRotation().w();
330
331 output.emplace_back(sample_pose);
332 }
333
334 return output;
335}
336
337// NOTE heading will need to be set after calling this
338
339geometry_msgs::msg::PoseWithCovariance pose_from_gnss(
340 const lanelet::projection::LocalFrameProjector & projector,
341 const tf2::Quaternion & ned_in_map_rotation, const lanelet::GPSPoint & gps_point,
342 const double & heading, const double lat_variance, const double lon_variance,
343 const double heading_variance)
344{
347 lanelet::BasicPoint3d map_point = projector.forward(gps_point);
348
349 RCLCPP_DEBUG_STREAM(
350 rclcpp::get_logger("motion_computation::conversion"),
351 "map_point: " << map_point.x() << ", " << map_point.y() << ", " << map_point.z());
352
353 if (
354 fabs(map_point.x()) > 10000.0 ||
355 fabs(map_point.y()) > 10000.0) { // Above 10km from map origin earth curvature will start to
356 // have a negative impact on system performance
357
358 RCLCPP_WARN_STREAM(
359 rclcpp::get_logger("motion_computation::conversion"),
360 "Distance from map origin is larger than supported by system "
361 "assumptions. Strongly advise "
362 "alternative map origin be used. ");
363 }
364
366 // This logic assumes that the orientation difference between an NED frame
367 // located at the map origin and an NED frame located at the GNSS point are
368 // sufficiently small that they can be ignored. Therefore it is assumed the
369 // heading report of the GNSS system regardless of its postion in the map
370 // without change in its orientation will give the same result (as far as we
371 // are concered).
372
373 tf2::Quaternion R_m_n(ned_in_map_rotation); // Rotation of NED frame in map
374 // frame
375 tf2::Quaternion R_n_h; // Rotation of sensor heading report in NED frame
376 R_n_h.setRPY(0, 0, heading * wgs84_utils::DEG2RAD);
377
378 tf2::Quaternion R_m_s = R_m_n * R_n_h; // Rotation of sensor in map frame under assumption that
379 // distance from map origin is sufficiently small so as to
380 // ignore local changes in NED orientation
381
382 RCLCPP_DEBUG_STREAM(
383 rclcpp::get_logger("motion_computation::conversion"),
384 "R_m_n (x,y,z,w) : ( " << R_m_n.x() << ", " << R_m_n.y() << ", " << R_m_n.z() << ", "
385 << R_m_n.w());
386
387 RCLCPP_DEBUG_STREAM(
388 rclcpp::get_logger("motion_computation::conversion"),
389 "R_n_h (x,y,z,w) : ( " << R_n_h.x() << ", " << R_n_h.y() << ", " << R_n_h.z() << ", "
390 << R_n_h.w());
391
392 RCLCPP_DEBUG_STREAM(
393 rclcpp::get_logger("motion_computation::conversion"),
394 "R_m_s (x,y,z,w) : ( " << R_m_s.x() << ", " << R_m_s.y() << ", " << R_m_s.z() << ", "
395 << R_m_s.w());
396
397 tf2::Transform T_m_s(
398 R_m_s, tf2::Vector3(
399 map_point.x(), map_point.y(),
400 map_point.z())); // Reported position and orientation
401 // of sensor frame in map frame
402
403 tf2::Transform T_m_n_no_heading(
404 R_m_n, tf2::Vector3(0, 0, 0)); // Used to transform the covariance
405
406 // This covariance represents the covariance of the NED frame N-lat,
407 // E-lon, heading- angle east of north This means that the covariance is
408 // in the NED frame, and needs to be transformed to the map frame
409 // clang-format off
410 std::array<double, 36> input_covariance = {
411 lat_variance, 0, 0, 0, 0, 0,
412 0, lon_variance, 0, 0, 0, 0,
413 0, 0, 1, 0, 0, 0,
414 0, 0, 0, 1, 0, 0,
415 0, 0, 0, 0, 1, 0,
416 0, 0, 0, 0, 0, heading_variance
417 };
418 // clang-format on
419
420 std::array<double, 36> new_cov = tf2::transformCovariance(
421 input_covariance,
422 // Per the usage of transformCovariance in tf2_geometry_msgs
423 // this frame should be the transform between the map which the pose
424 // is in an the target frame.
425 T_m_n_no_heading);
426
427 // Populate message
428 geometry_msgs::msg::PoseWithCovariance pose;
429 pose.pose.position.x = T_m_s.getOrigin().getX();
430 pose.pose.position.y = T_m_s.getOrigin().getY();
431 pose.pose.position.z = T_m_s.getOrigin().getZ();
432
433 pose.pose.orientation.x = T_m_s.getRotation().getX();
434 pose.pose.orientation.y = T_m_s.getRotation().getY();
435 pose.pose.orientation.z = T_m_s.getRotation().getZ();
436 pose.pose.orientation.w = T_m_s.getRotation().getW();
437
438 pose.covariance = new_cov;
439
440 return pose;
441}
442
443std::vector<carma_perception_msgs::msg::PredictedState> predicted_poses_to_predicted_state(
444 const std::vector<geometry_msgs::msg::Pose> & poses, double constant_velocity,
445 const rclcpp::Time & start_time, const rclcpp::Duration & step_size, const std::string & frame,
446 double initial_pose_confidence, double initial_vel_confidence)
447{
448 std::vector<carma_perception_msgs::msg::PredictedState> output;
449 output.reserve(poses.size());
450
451 rclcpp::Time time(start_time);
452
453 for (auto p : poses) {
454 time += step_size;
455 carma_perception_msgs::msg::PredictedState pred_state;
456 pred_state.header.stamp = time;
457 pred_state.header.frame_id = frame;
458
459 pred_state.predicted_position = p;
460 pred_state.predicted_position_confidence =
461 0.9 * initial_pose_confidence; // Reduce confidence by 10 % per timestep
462 initial_pose_confidence = pred_state.predicted_position_confidence;
463
464 pred_state.predicted_velocity.linear.x = constant_velocity;
465 pred_state.predicted_velocity_confidence =
466 0.9 * initial_vel_confidence; // Reduce confidence by
467 // 10 % per timestep
468 initial_vel_confidence = pred_state.predicted_velocity_confidence;
469
470 output.push_back(pred_state);
471 }
472
473 return output;
474}
475
476rclcpp::Time get_psm_timestamp(
477 const carma_v2x_msgs::msg::PSM & in_msg, rclcpp::Clock::SharedPtr clock)
478{
479 boost::posix_time::ptime utc_time_of_current_psm;
480
481 // Get the utc epoch start time
482 static const boost::posix_time::ptime inception_boost(
483 boost::posix_time::time_from_string("1970-01-01 00:00:00.000"));
484
485 // Determine if the utc time of the path history can be used instead of the
486 // sec_mark The sec_mark is susceptible to large error on minute transitions
487 // due to missing "minute of the year" field If the second mark in the path
488 // history is identical and the full utc time is provided with ms resolution
489 // then it can be assumed the initial_position is the same as the PSM data and
490 // the utc_time can be used instead of sec_mark
491 // clang-format off
492
493 const auto initial_position_time{in_msg.path_history.initial_position.utc_time};
494 if ((in_msg.presence_vector &
495 carma_v2x_msgs::msg::PSM::HAS_PATH_HISTORY) &&
496 (in_msg.path_history.presence_vector &
497 carma_v2x_msgs::msg::PathHistory::HAS_INITIAL_POSITION) &&
498 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::YEAR) &&
499 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::MONTH) &&
500 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::DAY) &&
501 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::HOUR) &&
502 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::MINUTE) &&
503 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::SECOND) &&
504 (in_msg.sec_mark.millisecond == initial_position_time.second.millisecond)) {
505 // clang-format on
506 RCLCPP_DEBUG_STREAM(
507 rclcpp::get_logger("motion_computation::conversion"),
508 "Using UTC time of path history to determine PSM "
509 "timestamp. Assumed valid since UTC is fully specified "
510 "and sec_mark == utc_time.seconds in this message.");
511
512 boost::posix_time::time_duration time_of_day =
513 boost::posix_time::hours(in_msg.path_history.initial_position.utc_time.hour.hour) +
514 boost::posix_time::minutes(in_msg.path_history.initial_position.utc_time.minute.minute) +
515 boost::posix_time::milliseconds(
516 in_msg.path_history.initial_position.utc_time.second.millisecond);
517
518 boost::gregorian::date utc_day(
519 in_msg.path_history.initial_position.utc_time.year.year,
520 in_msg.path_history.initial_position.utc_time.month.month,
521 in_msg.path_history.initial_position.utc_time.day.day);
522
523 utc_time_of_current_psm = boost::posix_time::ptime(utc_day) + time_of_day;
524 } else { // If the utc time of the path history cannot be used to account for minute
525 // change over, then we have to default to the sec mark
526
527 RCLCPP_WARN_STREAM_THROTTLE(
528 rclcpp::get_logger("motion_computation::conversion"), *clock.get(),
529 rclcpp::Duration(5, 0).nanoseconds(),
530 "PSM PathHistory utc timstamp does not match "
531 "sec_mark. Unable to determine the minute of "
532 "the year used for PSM data. Assuming local "
533 "clock is exactly synced. This is NOT "
534 "ADVISED.");
535
536 // Get the current ROS time
537 auto current_time = clock->now();
538
539 // Convert the ros time to a boost duration
540 boost::posix_time::time_duration duration_since_inception(
541 lanelet::time::durationFromSec(current_time.seconds()));
542
543 // Get the current ROS time in UTC
544 auto curr_time_boost = inception_boost + duration_since_inception;
545
546 // Get duration of current day
547 auto duration_in_day_till_current_time = curr_time_boost.time_of_day();
548
549 // Extract hours and minutes
550 auto hour_count_in_day = duration_in_day_till_current_time.hours();
551 auto minute_count_in_hour = duration_in_day_till_current_time.minutes();
552
553 // Get the duration of the minute in the day
554 auto start_of_minute_in_day = boost::posix_time::hours(hour_count_in_day) +
555 boost::posix_time::minutes(minute_count_in_hour);
556
557 // Get the start of the day in ROS time
558 boost::posix_time::ptime start_of_day(curr_time_boost.date());
559
560 // Ge the start of the current minute in ROS time
561 boost::posix_time::ptime utc_start_of_current_minute = start_of_day + start_of_minute_in_day;
562
563 // Compute the UTC PSM stamp from the sec_mark using ROS time as the clock
564
565 boost::posix_time::time_duration s_in_cur_minute =
566 boost::posix_time::milliseconds(in_msg.sec_mark.millisecond);
567
568 utc_time_of_current_psm = utc_start_of_current_minute + s_in_cur_minute;
569 }
570
571 boost::posix_time::time_duration nsec_since_epoch = utc_time_of_current_psm - inception_boost;
572
573 if (nsec_since_epoch.is_special()) {
574 RCLCPP_ERROR_STREAM(
575 rclcpp::get_logger("motion_computation::conversion"),
576 "Computed psm nsec_since_epoch is special (computation "
577 "failed). Value effectively undefined.");
578 }
579
580 return rclcpp::Time(nsec_since_epoch.total_nanoseconds());
581}
582} // namespace impl
583
584} // namespace conversion
585} // namespace motion_computation
geometry_msgs::msg::PoseWithCovariance::_covariance_type transformCovariance(const geometry_msgs::msg::PoseWithCovariance::_covariance_type &cov_in, const tf2::Transform &transform)
Transform the covariance matrix of a PoseWithCovariance message to a new frame.
std::vector< geometry_msgs::msg::Pose > sample_2d_path_from_radius(const geometry_msgs::msg::Pose &pose, double velocity, double radius_of_curvature, double period, double step_size)
geometry_msgs::msg::PoseWithCovariance pose_from_gnss(const lanelet::projection::LocalFrameProjector &projector, const tf2::Quaternion &ned_in_map_rotation, const lanelet::GPSPoint &gps_point, const double &heading, const double lat_variance, const double lon_variance, const double heading_variance)
std::vector< carma_perception_msgs::msg::PredictedState > predicted_poses_to_predicted_state(const std::vector< geometry_msgs::msg::Pose > &poses, double constant_velocity, const rclcpp::Time &start_time, const rclcpp::Duration &step_size, const std::string &frame, double initial_pose_confidence, double initial_vel_confidence)
rclcpp::Time get_psm_timestamp(const carma_v2x_msgs::msg::PSM &in_msg, rclcpp::Clock::SharedPtr clock)
std::vector< geometry_msgs::msg::Pose > sample_2d_linear_motion(const geometry_msgs::msg::Pose &pose, double velocity, double period, double step_size)
void convert(const carma_v2x_msgs::msg::PSM &in_msg, carma_perception_msgs::msg::ExternalObject &out_msg, const std::string &map_frame_id, double pred_period, double pred_step_size, const lanelet::projection::LocalFrameProjector &map_projector, const tf2::Quaternion &ned_in_map_rotation, rclcpp::node_interfaces::NodeClockInterface::SharedPtr node_clock)