Carma-platform v4.2.0
CARMA Platform is built on robot operating system (ROS) and utilizes open source software (OSS) that enables Cooperative Driving Automation (CDA) features to allow Automated Driving Systems to interact and cooperate with infrastructure and other vehicles through communication.
psm_to_external_object_convertor.cpp
Go to the documentation of this file.
1// Copyright 2019-2023 Leidos
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include <lanelet2_extension/regulatory_elements/CarmaTrafficSignal.h>
16#include <tf2/LinearMath/Transform.h>
17#include <wgs84_utils/wgs84_utils.h>
18
19#include <algorithm>
20#include <string>
21#include <vector>
22
23#include <boost/date_time/posix_time/posix_time.hpp>
26#include <rclcpp/logger.hpp>
27#include <rclcpp/logging.hpp>
28#include <rclcpp/rclcpp.hpp>
29
30#include <carma_perception_msgs/msg/external_object.hpp>
31#include <carma_v2x_msgs/msg/psm.hpp>
32#include <tf2_geometry_msgs/tf2_geometry_msgs.hpp>
33
34namespace motion_computation
35{
36namespace conversion
37{
39 const carma_v2x_msgs::msg::PSM & in_msg, carma_perception_msgs::msg::ExternalObject & out_msg,
40 const std::string & map_frame_id, double pred_period, double pred_step_size,
41
42 const lanelet::projection::LocalFrameProjector & map_projector,
43 const tf2::Quaternion & ned_in_map_rotation,
44 rclcpp::node_interfaces::NodeClockInterface::SharedPtr node_clock)
45{
47 out_msg.dynamic_obj = true; // If a PSM is sent then the object is dynamic
48 // since its a living thing
49 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::DYNAMIC_OBJ_PRESENCE;
50
52 // Generate a unique object id from the psm id
53 out_msg.id = 0;
54 for (int i = in_msg.id.id.size() - 1; i >= 0;
55 i--) { // using signed iterator to handle empty case
56 // each byte of the psm id gets placed in one byte of the object id.
57 // This should result in very large numbers which will be unlikely to
58 // conflict with standard detections
59 out_msg.id |= in_msg.id.id[i] << (8 * i);
60 }
61 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::ID_PRESENCE_VECTOR;
62
64 // Additionally, store the id in the bsm_id field
65 out_msg.bsm_id = in_msg.id.id;
66 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::BSM_ID_PRESENCE_VECTOR;
67
69 // Compute the timestamp
70
71 auto psm_timestamp = impl::get_psm_timestamp(in_msg, node_clock->get_clock());
72 out_msg.header.stamp = builtin_interfaces::msg::Time(psm_timestamp);
73 out_msg.header.frame_id = map_frame_id;
74
76 // Set the type
77
78 if (
79 in_msg.basic_type.type == j2735_v2x_msgs::msg::PersonalDeviceUserType::A_PEDESTRIAN ||
80 in_msg.basic_type.type == j2735_v2x_msgs::msg::PersonalDeviceUserType::A_PUBLIC_SAFETY_WORKER ||
81 in_msg.basic_type.type ==
82 j2735_v2x_msgs::msg::PersonalDeviceUserType::AN_ANIMAL) // Treat animals
83 // like people
84 // since we have
85 // no internal
86 // class for that
87 {
88 out_msg.object_type = carma_perception_msgs::msg::ExternalObject::PEDESTRIAN;
89
90 // Default pedestrian size
91 // Assume a
92 // ExternalObject dimensions are half actual size
93 // Here we assume 1.0, 1.0, 2.0
94 out_msg.size.x = 0.5;
95 out_msg.size.y = 0.5;
96 out_msg.size.z = 1.0;
97
98 } else if (
99 in_msg.basic_type.type == j2735_v2x_msgs::msg::PersonalDeviceUserType::A_PEDALCYCLIST) {
100 out_msg.object_type = carma_perception_msgs::msg::ExternalObject::
101 MOTORCYCLE; // Currently external object cannot represent bicycles,
102 // but motor cycle seems like the next best choice
103
104 // Default bicycle size
105 out_msg.size.x = 1.0;
106 out_msg.size.y = 0.5;
107 out_msg.size.z = 1.0;
108 } else {
109 out_msg.object_type = carma_perception_msgs::msg::ExternalObject::UNKNOWN;
110
111 // Default pedestrian size
112 out_msg.size.x = 0.5;
113 out_msg.size.y = 0.5;
114 out_msg.size.z = 1.0;
115 }
116 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::SIZE_PRESENCE_VECTOR;
117
119 // Set the velocity
120
121 out_msg.velocity.twist.linear.x = in_msg.speed.velocity;
122 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::VELOCITY_PRESENCE_VECTOR;
123 // NOTE: The velocity covariance is not provided in the PSM. In order to
124 // compute it you need at least two PSM messages
125 // Tracking and associating PSM messages would be an increase in
126 // complexity for this conversion which is not warranted without an
127 // existing use case for the velocity covariance. If a use case is
128 // presented for it, such an addition can be made at that time.
129
131
132 double largest_position_std = 0.0;
133 double lat_variance = 0.0;
134 double lon_variance = 0.0;
135 double heading_variance = 0.0; // Yaw variance is not available so mark as
136 // impossible perfect case
137 double position_confidence = 0.0; // Default will be 10% confidence. If the position accuracy is
138 // available then this value will be updated
139
140 // A standard deviation which is larger than the acceptable value to give
141 // 95% confidence interval on fitting the pedestrian within one 3.7m lane
142 constexpr double MAX_POSITION_STD = 1.85;
143
144 if (
145 (in_msg.accuracy.presence_vector |
146 carma_v2x_msgs::msg::PositionalAccuracy::ACCURACY_AVAILABLE) &&
147 (in_msg.accuracy.presence_vector |
148 carma_v2x_msgs::msg::PositionalAccuracy::ACCURACY_ORIENTATION_AVAILABLE)) {
149 // Both accuracies available
150
151 // Compute the position covariance
152 // For computing confidence we will use the largest provided standard
153 // deviation of position
154 largest_position_std = std::max(in_msg.accuracy.semi_major, in_msg.accuracy.semi_minor);
155 lat_variance = in_msg.accuracy.semi_minor * in_msg.accuracy.semi_minor;
156 lon_variance = in_msg.accuracy.semi_major * in_msg.accuracy.semi_major;
157 heading_variance = in_msg.accuracy.orientation * in_msg.accuracy.orientation;
158
159 // NOTE: ExternalObject.msg does not clearly define what is meant by
160 // position confidence
161 // Here we are providing a linear scale based on the positional accuracy
162 // where 0 confidence would denote A standard deviation which is larger
163 // than the acceptable value to give 95% confidence interval on fitting
164 // the pedestrian within one 3.7m lane
165 // Set the confidence
166 // Without a way of getting the velocity confidence from the PSM we will use
167 // the position confidence for both
168 out_msg.confidence = 1.0 - std::min(1.0, fabs(largest_position_std / MAX_POSITION_STD));
169 out_msg.presence_vector |=
170 carma_perception_msgs::msg::ExternalObject::CONFIDENCE_PRESENCE_VECTOR;
171 } else if (
172 in_msg.accuracy.presence_vector | carma_v2x_msgs::msg::PositionalAccuracy::ACCURACY_AVAILABLE) {
173 // Position accuracy available
174
175 largest_position_std = std::max(in_msg.accuracy.semi_major, in_msg.accuracy.semi_minor);
176 lat_variance = in_msg.accuracy.semi_minor * in_msg.accuracy.semi_minor;
177 lon_variance = in_msg.accuracy.semi_major * in_msg.accuracy.semi_major;
178 heading_variance = 1.0; // Yaw variance is not available so mark as
179 // impossible perfect case
180
181 // Same calculation as shown in above condition. See that for description
182 out_msg.confidence = 1.0 - std::min(1.0, fabs(largest_position_std / MAX_POSITION_STD));
183 out_msg.presence_vector |=
184 carma_perception_msgs::msg::ExternalObject::CONFIDENCE_PRESENCE_VECTOR;
185
186 } else if (
187 in_msg.accuracy.presence_vector |
188 carma_v2x_msgs::msg::PositionalAccuracy::ACCURACY_ORIENTATION_AVAILABLE) {
189 // Orientation accuracy available
190
191 lat_variance = 1.0;
192 lon_variance = 1.0;
193
194 heading_variance = in_msg.accuracy.orientation * in_msg.accuracy.orientation;
195 } else { // No accuracies available
196 lat_variance = 1.0;
197 lon_variance = 1.0;
198 heading_variance = 1.0;
199 }
200
202 // Compute the pose
203 lanelet::GPSPoint gps_point(
204 {in_msg.position.latitude, in_msg.position.longitude, in_msg.position.elevation});
205 out_msg.pose = impl::pose_from_gnss(
206 map_projector, ned_in_map_rotation, gps_point, in_msg.heading.heading, lat_variance,
207 lon_variance, heading_variance);
208 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::POSE_PRESENCE_VECTOR;
209
211 // Compute predictions
212 // For prediction, if the prediction is available we will sample it
213 // If not then assume linear motion
214
215 std::vector<geometry_msgs::msg::Pose> predicted_poses;
216
217 if (in_msg.presence_vector & carma_v2x_msgs::msg::PSM::HAS_PATH_PREDICTION) {
218 // Based on the vehicle frame used in j2735 positive should be to the right
219 // and negative to the left
220 predicted_poses = impl::sample_2d_path_from_radius(
221 out_msg.pose.pose, out_msg.velocity.twist.linear.x,
222 -in_msg.path_prediction.radius_of_curvature, pred_period, pred_step_size);
223 } else {
224 predicted_poses = impl::sample_2d_linear_motion(
225 out_msg.pose.pose, out_msg.velocity.twist.linear.x, pred_period, pred_step_size);
226 }
227
228 out_msg.predictions = impl::predicted_poses_to_predicted_state(
229 predicted_poses, out_msg.velocity.twist.linear.x, rclcpp::Time(out_msg.header.stamp),
230 rclcpp::Duration::from_nanoseconds(pred_step_size * 1e9), map_frame_id, out_msg.confidence,
231 out_msg.confidence);
232 out_msg.presence_vector |= carma_perception_msgs::msg::ExternalObject::PREDICTION_PRESENCE_VECTOR;
233}
234
235namespace impl
236{
237std::vector<geometry_msgs::msg::Pose> sample_2d_path_from_radius(
238 const geometry_msgs::msg::Pose & pose, double velocity, double radius_of_curvature, double period,
239 double step_size)
240{
241 std::vector<geometry_msgs::msg::Pose> output;
242 output.reserve((period / step_size) + 1);
243
244 tf2::Vector3 pose_in_map_translation(pose.position.x, pose.position.y, pose.position.z);
245 tf2::Quaternion pose_in_map_quat(
246 pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w);
247 tf2::Transform pose_in_map(pose_in_map_quat, pose_in_map_translation);
248
249 // The radius of curvature originates from the frame of the provided pose
250 // So the turning center is at (0, r)
251 double center_x_in_pose = 0;
252 double center_y_in_pose = radius_of_curvature;
253
254 double total_dt = 0;
255
256 while (total_dt < period) {
257 // Compute the 2d position and orientation in the Pose frame
258 total_dt += step_size;
259 double delta_arc_length = velocity * total_dt; // Assumes perfect point motion along curve
260
261 double turning_angle = delta_arc_length / radius_of_curvature;
262 double dx_from_center = radius_of_curvature * sin(turning_angle);
263 double dy_from_center = radius_of_curvature * cos(turning_angle);
264
265 double x = center_x_in_pose + dx_from_center;
266 double y = center_y_in_pose + dy_from_center;
267
268 tf2::Vector3 position(x, y, 0);
269
270 tf2::Quaternion quat;
271 quat.setRPY(0, 0, turning_angle);
272
273 // Convert the position and orientation in the pose frame to the map frame
274 tf2::Transform pose_to_sample(quat, position);
275 tf2::Transform map_to_sample = pose_in_map * pose_to_sample;
276
277 geometry_msgs::msg::Pose sample_pose;
278
279 sample_pose.position.x = map_to_sample.getOrigin().x();
280 sample_pose.position.y = map_to_sample.getOrigin().y();
281 sample_pose.position.z = pose.orientation.z; // Reuse the z position from the initial pose
282
283 sample_pose.orientation.x = map_to_sample.getRotation().x();
284 sample_pose.orientation.y = map_to_sample.getRotation().y();
285 sample_pose.orientation.z = map_to_sample.getRotation().z();
286 sample_pose.orientation.w = map_to_sample.getRotation().w();
287
288 output.emplace_back(sample_pose);
289 }
290
291 return output;
292}
293
294std::vector<geometry_msgs::msg::Pose> sample_2d_linear_motion(
295 const geometry_msgs::msg::Pose & pose, double velocity, double period, double step_size)
296{
297 std::vector<geometry_msgs::msg::Pose> output;
298 output.reserve((period / step_size) + 1);
299
300 tf2::Vector3 pose_in_map_translation(pose.position.x, pose.position.y, pose.position.z);
301 tf2::Quaternion pose_in_map_quat(
302 pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w);
303
304 tf2::Transform pose_in_map(pose_in_map_quat, pose_in_map_translation);
305
306 double total_dt = 0;
307
308 while (total_dt < period) {
309 // Compute the 2d position and orientation in the Pose frame
310 total_dt += step_size;
311 double dx_from_start = velocity * total_dt; // Assuming linear motion in pose frame
312
313 double x = pose.position.x + dx_from_start;
314
315 tf2::Vector3 position(x, 0, 0);
316
317 // Convert the position and orientation in the pose frame to the map frame
318 tf2::Transform pose_to_sample(tf2::Quaternion::getIdentity(), position);
319 tf2::Transform map_to_sample = pose_in_map * pose_to_sample;
320
321 geometry_msgs::msg::Pose sample_pose;
322
323 sample_pose.position.x = map_to_sample.getOrigin().x();
324 sample_pose.position.y = map_to_sample.getOrigin().y();
325 sample_pose.position.z = map_to_sample.getOrigin().z();
326
327 sample_pose.orientation.x = map_to_sample.getRotation().x();
328 sample_pose.orientation.y = map_to_sample.getRotation().y();
329 sample_pose.orientation.z = map_to_sample.getRotation().z();
330 sample_pose.orientation.w = map_to_sample.getRotation().w();
331
332 output.emplace_back(sample_pose);
333 }
334
335 return output;
336}
337
338// NOTE heading will need to be set after calling this
339
340geometry_msgs::msg::PoseWithCovariance pose_from_gnss(
341 const lanelet::projection::LocalFrameProjector & projector,
342 const tf2::Quaternion & ned_in_map_rotation, const lanelet::GPSPoint & gps_point,
343 const double & heading, const double lat_variance, const double lon_variance,
344 const double heading_variance)
345{
348 lanelet::BasicPoint3d map_point = projector.forward(gps_point);
349
350 RCLCPP_DEBUG_STREAM(
351 rclcpp::get_logger("motion_computation::conversion"),
352 "map_point: " << map_point.x() << ", " << map_point.y() << ", " << map_point.z());
353
354 if (
355 fabs(map_point.x()) > 10000.0 ||
356 fabs(map_point.y()) > 10000.0) { // Above 10km from map origin earth curvature will start to
357 // have a negative impact on system performance
358
359 RCLCPP_WARN_STREAM(
360 rclcpp::get_logger("motion_computation::conversion"),
361 "Distance from map origin is larger than supported by system "
362 "assumptions. Strongly advise "
363 "alternative map origin be used. ");
364 }
365
367 // This logic assumes that the orientation difference between an NED frame
368 // located at the map origin and an NED frame located at the GNSS point are
369 // sufficiently small that they can be ignored. Therefore it is assumed the
370 // heading report of the GNSS system regardless of its postion in the map
371 // without change in its orientation will give the same result (as far as we
372 // are concered).
373
374 tf2::Quaternion R_m_n(ned_in_map_rotation); // Rotation of NED frame in map
375 // frame
376 tf2::Quaternion R_n_h; // Rotation of sensor heading report in NED frame
377 R_n_h.setRPY(0, 0, heading * wgs84_utils::DEG2RAD);
378
379 tf2::Quaternion R_m_s = R_m_n * R_n_h; // Rotation of sensor in map frame under assumption that
380 // distance from map origin is sufficiently small so as to
381 // ignore local changes in NED orientation
382
383 RCLCPP_DEBUG_STREAM(
384 rclcpp::get_logger("motion_computation::conversion"),
385 "R_m_n (x,y,z,w) : ( " << R_m_n.x() << ", " << R_m_n.y() << ", " << R_m_n.z() << ", "
386 << R_m_n.w());
387
388 RCLCPP_DEBUG_STREAM(
389 rclcpp::get_logger("motion_computation::conversion"),
390 "R_n_h (x,y,z,w) : ( " << R_n_h.x() << ", " << R_n_h.y() << ", " << R_n_h.z() << ", "
391 << R_n_h.w());
392
393 RCLCPP_DEBUG_STREAM(
394 rclcpp::get_logger("motion_computation::conversion"),
395 "R_m_s (x,y,z,w) : ( " << R_m_s.x() << ", " << R_m_s.y() << ", " << R_m_s.z() << ", "
396 << R_m_s.w());
397
398 tf2::Transform T_m_s(
399 R_m_s, tf2::Vector3(
400 map_point.x(), map_point.y(),
401 map_point.z())); // Reported position and orientation
402 // of sensor frame in map frame
403
404 tf2::Transform T_m_n_no_heading(
405 R_m_n, tf2::Vector3(0, 0, 0)); // Used to transform the covariance
406
407 // This covariance represents the covariance of the NED frame N-lat,
408 // E-lon, heading- angle east of north This means that the covariance is
409 // in the NED frame, and needs to be transformed to the map frame
410 // clang-format off
411 std::array<double, 36> input_covariance = {
412 lat_variance, 0, 0, 0, 0, 0,
413 0, lon_variance, 0, 0, 0, 0,
414 0, 0, 1, 0, 0, 0,
415 0, 0, 0, 1, 0, 0,
416 0, 0, 0, 0, 1, 0,
417 0, 0, 0, 0, 0, heading_variance
418 };
419 // clang-format on
420
421 std::array<double, 36> new_cov = tf2::transformCovariance(
422 input_covariance,
423 // Per the usage of transformCovariance in tf2_geometry_msgs
424 // this frame should be the transform between the map which the pose
425 // is in an the target frame.
426 T_m_n_no_heading);
427
428 // Populate message
429 geometry_msgs::msg::PoseWithCovariance pose;
430 pose.pose.position.x = T_m_s.getOrigin().getX();
431 pose.pose.position.y = T_m_s.getOrigin().getY();
432 pose.pose.position.z = T_m_s.getOrigin().getZ();
433
434 pose.pose.orientation.x = T_m_s.getRotation().getX();
435 pose.pose.orientation.y = T_m_s.getRotation().getY();
436 pose.pose.orientation.z = T_m_s.getRotation().getZ();
437 pose.pose.orientation.w = T_m_s.getRotation().getW();
438
439 pose.covariance = new_cov;
440
441 return pose;
442}
443
444std::vector<carma_perception_msgs::msg::PredictedState> predicted_poses_to_predicted_state(
445 const std::vector<geometry_msgs::msg::Pose> & poses, double constant_velocity,
446 const rclcpp::Time & start_time, const rclcpp::Duration & step_size, const std::string & frame,
447 double initial_pose_confidence, double initial_vel_confidence)
448{
449 std::vector<carma_perception_msgs::msg::PredictedState> output;
450 output.reserve(poses.size());
451
452 rclcpp::Time time(start_time);
453
454 for (auto p : poses) {
455 time += step_size;
456 carma_perception_msgs::msg::PredictedState pred_state;
457 pred_state.header.stamp = time;
458 pred_state.header.frame_id = frame;
459
460 pred_state.predicted_position = p;
461 pred_state.predicted_position_confidence =
462 0.9 * initial_pose_confidence; // Reduce confidence by 10 % per timestep
463 initial_pose_confidence = pred_state.predicted_position_confidence;
464
465 pred_state.predicted_velocity.linear.x = constant_velocity;
466 pred_state.predicted_velocity_confidence =
467 0.9 * initial_vel_confidence; // Reduce confidence by
468 // 10 % per timestep
469 initial_vel_confidence = pred_state.predicted_velocity_confidence;
470
471 output.push_back(pred_state);
472 }
473
474 return output;
475}
476
477rclcpp::Time get_psm_timestamp(
478 const carma_v2x_msgs::msg::PSM & in_msg, rclcpp::Clock::SharedPtr clock)
479{
480 boost::posix_time::ptime utc_time_of_current_psm;
481
482 // Get the utc epoch start time
483 static const boost::posix_time::ptime inception_boost(
484 boost::posix_time::time_from_string("1970-01-01 00:00:00.000"));
485
486 // Determine if the utc time of the path history can be used instead of the
487 // sec_mark The sec_mark is susceptible to large error on minute transitions
488 // due to missing "minute of the year" field If the second mark in the path
489 // history is identical and the full utc time is provided with ms resolution
490 // then it can be assumed the initial_position is the same as the PSM data and
491 // the utc_time can be used instead of sec_mark
492 // clang-format off
493
494 const auto initial_position_time{in_msg.path_history.initial_position.utc_time};
495 if ((in_msg.presence_vector &
496 carma_v2x_msgs::msg::PSM::HAS_PATH_HISTORY) &&
497 (in_msg.path_history.presence_vector &
498 carma_v2x_msgs::msg::PathHistory::HAS_INITIAL_POSITION) &&
499 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::YEAR) &&
500 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::MONTH) &&
501 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::DAY) &&
502 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::HOUR) &&
503 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::MINUTE) &&
504 (initial_position_time.presence_vector & j2735_v2x_msgs::msg::DDateTime::SECOND) &&
505 (in_msg.sec_mark.millisecond == initial_position_time.second.millisecond)) {
506 // clang-format on
507 RCLCPP_DEBUG_STREAM(
508 rclcpp::get_logger("motion_computation::conversion"),
509 "Using UTC time of path history to determine PSM "
510 "timestamp. Assumed valid since UTC is fully specified "
511 "and sec_mark == utc_time.seconds in this message.");
512
513 boost::posix_time::time_duration time_of_day =
514 boost::posix_time::hours(in_msg.path_history.initial_position.utc_time.hour.hour) +
515 boost::posix_time::minutes(in_msg.path_history.initial_position.utc_time.minute.minute) +
516 boost::posix_time::milliseconds(
517 in_msg.path_history.initial_position.utc_time.second.millisecond);
518
519 boost::gregorian::date utc_day(
520 in_msg.path_history.initial_position.utc_time.year.year,
521 in_msg.path_history.initial_position.utc_time.month.month,
522 in_msg.path_history.initial_position.utc_time.day.day);
523
524 utc_time_of_current_psm = boost::posix_time::ptime(utc_day) + time_of_day;
525 } else { // If the utc time of the path history cannot be used to account for minute
526 // change over, then we have to default to the sec mark
527
528 RCLCPP_WARN_STREAM_THROTTLE(
529 rclcpp::get_logger("motion_computation::conversion"), *clock.get(),
530 rclcpp::Duration(5, 0).nanoseconds(),
531 "PSM PathHistory utc timstamp does not match "
532 "sec_mark. Unable to determine the minute of "
533 "the year used for PSM data. Assuming local "
534 "clock is exactly synced. This is NOT "
535 "ADVISED.");
536
537 // Get the current ROS time
538 auto current_time = clock->now();
539
540 // Convert the ros time to a boost duration
541 boost::posix_time::time_duration duration_since_inception(
542 lanelet::time::durationFromSec(current_time.seconds()));
543
544 // Get the current ROS time in UTC
545 auto curr_time_boost = inception_boost + duration_since_inception;
546
547 // Get duration of current day
548 auto duration_in_day_till_current_time = curr_time_boost.time_of_day();
549
550 // Extract hours and minutes
551 auto hour_count_in_day = duration_in_day_till_current_time.hours();
552 auto minute_count_in_hour = duration_in_day_till_current_time.minutes();
553
554 // Get the duration of the minute in the day
555 auto start_of_minute_in_day = boost::posix_time::hours(hour_count_in_day) +
556 boost::posix_time::minutes(minute_count_in_hour);
557
558 // Get the start of the day in ROS time
559 boost::posix_time::ptime start_of_day(curr_time_boost.date());
560
561 // Ge the start of the current minute in ROS time
562 boost::posix_time::ptime utc_start_of_current_minute = start_of_day + start_of_minute_in_day;
563
564 // Compute the UTC PSM stamp from the sec_mark using ROS time as the clock
565
566 boost::posix_time::time_duration s_in_cur_minute =
567 boost::posix_time::milliseconds(in_msg.sec_mark.millisecond);
568
569 utc_time_of_current_psm = utc_start_of_current_minute + s_in_cur_minute;
570 }
571
572 boost::posix_time::time_duration nsec_since_epoch = utc_time_of_current_psm - inception_boost;
573
574 if (nsec_since_epoch.is_special()) {
575 RCLCPP_ERROR_STREAM(
576 rclcpp::get_logger("motion_computation::conversion"),
577 "Computed psm nsec_since_epoch is special (computation "
578 "failed). Value effectively undefined.");
579 }
580
581 return rclcpp::Time(nsec_since_epoch.total_nanoseconds());
582}
583} // namespace impl
584
585} // namespace conversion
586} // namespace motion_computation
geometry_msgs::msg::PoseWithCovariance::_covariance_type transformCovariance(const geometry_msgs::msg::PoseWithCovariance::_covariance_type &cov_in, const tf2::Transform &transform)
Transform the covariance matrix of a PoseWithCovariance message to a new frame.
std::vector< geometry_msgs::msg::Pose > sample_2d_path_from_radius(const geometry_msgs::msg::Pose &pose, double velocity, double radius_of_curvature, double period, double step_size)
geometry_msgs::msg::PoseWithCovariance pose_from_gnss(const lanelet::projection::LocalFrameProjector &projector, const tf2::Quaternion &ned_in_map_rotation, const lanelet::GPSPoint &gps_point, const double &heading, const double lat_variance, const double lon_variance, const double heading_variance)
std::vector< carma_perception_msgs::msg::PredictedState > predicted_poses_to_predicted_state(const std::vector< geometry_msgs::msg::Pose > &poses, double constant_velocity, const rclcpp::Time &start_time, const rclcpp::Duration &step_size, const std::string &frame, double initial_pose_confidence, double initial_vel_confidence)
rclcpp::Time get_psm_timestamp(const carma_v2x_msgs::msg::PSM &in_msg, rclcpp::Clock::SharedPtr clock)
std::vector< geometry_msgs::msg::Pose > sample_2d_linear_motion(const geometry_msgs::msg::Pose &pose, double velocity, double period, double step_size)
void convert(const carma_v2x_msgs::msg::PSM &in_msg, carma_perception_msgs::msg::ExternalObject &out_msg, const std::string &map_frame_id, double pred_period, double pred_step_size, const lanelet::projection::LocalFrameProjector &map_projector, const tf2::Quaternion &ned_in_map_rotation, rclcpp::node_interfaces::NodeClockInterface::SharedPtr node_clock)